hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7b25ca9a1adfc4d4015fbd4d61ee6bed2204d0e6
| 152
|
py
|
Python
|
JellyBot/api/ar/__init__.py
|
RaenonX/Jelly-Bot-API
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 5
|
2020-08-26T20:12:00.000Z
|
2020-12-11T16:39:22.000Z
|
JellyBot/api/ar/__init__.py
|
RaenonX/Jelly-Bot
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 234
|
2019-12-14T03:45:19.000Z
|
2020-08-26T18:55:19.000Z
|
JellyBot/api/ar/__init__.py
|
RaenonX/Jelly-Bot-API
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 2
|
2019-10-23T15:21:15.000Z
|
2020-05-22T09:35:55.000Z
|
from .add import AutoReplyAddView, AutoReplyAddExecodeView
from .validate import ContentValidationView
from .tag import AutoReplyTagPopularityQueryView
| 38
| 58
| 0.888158
| 13
| 152
| 10.384615
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085526
| 152
| 3
| 59
| 50.666667
| 0.971223
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9e96e800876c0797dad67820e02be1e2407d9738
| 20
|
py
|
Python
|
mocks/usocket.py
|
stefanhoelzl/alarm-clock
|
efba84e71fcade26bef020dc7eaa10181ea9f96c
|
[
"MIT"
] | 1
|
2019-07-31T12:39:53.000Z
|
2019-07-31T12:39:53.000Z
|
mocks/usocket.py
|
stefanhoelzl/alarm-clock
|
efba84e71fcade26bef020dc7eaa10181ea9f96c
|
[
"MIT"
] | null | null | null |
mocks/usocket.py
|
stefanhoelzl/alarm-clock
|
efba84e71fcade26bef020dc7eaa10181ea9f96c
|
[
"MIT"
] | 1
|
2019-10-04T04:32:20.000Z
|
2019-10-04T04:32:20.000Z
|
from socket import *
| 20
| 20
| 0.8
| 3
| 20
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7ba61f570a19e2b2a48434ccf55b71a5af2ea6a2
| 79
|
py
|
Python
|
plugins/tasks_plugin/__init__.py
|
shivammmmm/querybook
|
71263eb7db79e56235ea752f2cf3339ca9b3a092
|
[
"Apache-2.0"
] | 1,144
|
2021-03-30T05:06:16.000Z
|
2022-03-31T10:40:31.000Z
|
plugins/tasks_plugin/__init__.py
|
shivammmmm/querybook
|
71263eb7db79e56235ea752f2cf3339ca9b3a092
|
[
"Apache-2.0"
] | 593
|
2021-07-01T10:34:25.000Z
|
2022-03-31T23:24:40.000Z
|
plugins/tasks_plugin/__init__.py
|
shivammmmm/querybook
|
71263eb7db79e56235ea752f2cf3339ca9b3a092
|
[
"Apache-2.0"
] | 113
|
2021-03-30T00:07:20.000Z
|
2022-03-31T07:18:43.000Z
|
# from tasks.delete_mysql_cache import delete_mysql_cache
# delete_mysql_cache
| 26.333333
| 57
| 0.873418
| 12
| 79
| 5.25
| 0.5
| 0.52381
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 58
| 39.5
| 0.875
| 0.936709
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c86fdb89d80e85a246a77374cc13332e568bf4ef
| 28
|
py
|
Python
|
models/__init__.py
|
danielism97/ST-MFNet
|
fb7ea12db0aee0793d7858f4fe86847fe81d6bf7
|
[
"MIT"
] | 5
|
2021-12-14T11:51:56.000Z
|
2022-03-31T05:13:39.000Z
|
models/__init__.py
|
danielism97/ST-MFNet
|
fb7ea12db0aee0793d7858f4fe86847fe81d6bf7
|
[
"MIT"
] | 1
|
2022-02-09T11:19:44.000Z
|
2022-02-16T01:48:41.000Z
|
models/__init__.py
|
danielism97/ST-MFNet
|
fb7ea12db0aee0793d7858f4fe86847fe81d6bf7
|
[
"MIT"
] | null | null | null |
from .stmfnet import STMFNet
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c875a037aa4647ea7c73b8a26743b0b975c0855a
| 5,695
|
py
|
Python
|
test/test_cam_v1.py
|
dondemonz/RestApi
|
0459d2b8079b9f2abc50bf5e206625427c4a2dcf
|
[
"Apache-2.0"
] | null | null | null |
test/test_cam_v1.py
|
dondemonz/RestApi
|
0459d2b8079b9f2abc50bf5e206625427c4a2dcf
|
[
"Apache-2.0"
] | null | null | null |
test/test_cam_v1.py
|
dondemonz/RestApi
|
0459d2b8079b9f2abc50bf5e206625427c4a2dcf
|
[
"Apache-2.0"
] | null | null | null |
import requests
from model.json_check import *
from model.input_data import *
# Запрос на получение настроек всех объектов CAM
def test_GetV1AllCamerasCode200():
data = "success"
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/", auth=auth)
user_resp_code = "200"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["status"]
assert data == n
def test_GetV1AllCamerasStatus401():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/", auth=("", ""))
user_resp_code = "401"
assert str(response.status_code) == user_resp_code
# Запрос на получение настроек объекта CAM
def test_GetV1CamerasByIdCode200():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId, auth=auth)
user_resp_code = "200"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["data"]["id"]
assert camId == n
def test_GetV1CamerasByIdCode401():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId, auth=("", ""))
user_resp_code = "401"
assert str(response.status_code) == user_resp_code
def test_GetV1CamerasByIdCode404():
data = "Unknown CAM id:0"
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/0", auth=auth)
user_resp_code = "404"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["message"]
assert data == n
#Запрос на получение поля status объекта CAM
def test_GetV1CameraStatusCode200():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/status", auth=auth)
user_resp_code = "200"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["data"]["id"]
assert camId == n
def test_GetV1CameraStatusCode401():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/status", auth=("", ""))
user_resp_code = "401"
assert str(response.status_code) == user_resp_code
def test_GetV1CameraStatusCode404():
data = "Unknown CAM id:0"
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/0/status", auth=auth)
user_resp_code = "404"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["message"]
assert data == n
# Запрос на получение поля rtsp объекта CAM
def test_GetV1CameraRtspCode200():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/rtsp", auth=auth)
user_resp_code = "200"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["data"]["id"]
assert camId == n
def test_GetV1CameraRtspCode401():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/rtsp", auth=("", ""))
user_resp_code = "401"
assert str(response.status_code) == user_resp_code
def test_GetV1CameraRtspCode404():
data = "Unknown CAM id:0"
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/0/rtsp", auth=auth)
user_resp_code = "404"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["message"]
assert data == n
# Запрос на получение поля rtsp/live объекта CAM
def test_GetV1CameraRtspLiveCode200():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/rtsp/live", auth=auth)
user_resp_code = "200"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["data"]["id"]
assert camId == n
def test_GetV1CameraRtspLiveCode401():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/rtsp/live", auth=("", ""))
user_resp_code = "401"
assert str(response.status_code) == user_resp_code
def test_GetV1CameraRtspLiveCode404():
data = "Unknown CAM id:0"
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/0/rtsp/live", auth=auth)
user_resp_code = "404"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["message"]
assert data == n
# Запрос на получение поля rtsp/archive объекта CAM
def test_GetV1CameraRtspArchiveCode200():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/rtsp/archive", auth=auth)
user_resp_code = "200"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["data"]["id"]
assert camId == n
def test_GetV1CameraRtspArchiveCode401():
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/"+camId+"/rtsp/archive", auth=("", ""))
user_resp_code = "401"
assert str(response.status_code) == user_resp_code
def test_GetV1CameraRtspArchiveCode404():
data = "Unknown CAM id:0"
response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v1/cameras/0/rtsp/archive", auth=auth)
user_resp_code = "404"
assert str(response.status_code) == user_resp_code
body = json.dumps(response.json())
data1 = json.loads(body)
n = data1["message"]
assert data == n
| 37.222222
| 120
| 0.676558
| 755
| 5,695
| 4.943046
| 0.092715
| 0.072883
| 0.109325
| 0.100214
| 0.81538
| 0.81538
| 0.813773
| 0.810825
| 0.810825
| 0.810825
| 0
| 0.034913
| 0.155048
| 5,695
| 152
| 121
| 37.467105
| 0.740648
| 0.047234
| 0
| 0.677686
| 0
| 0
| 0.134158
| 0.019007
| 0
| 0
| 0
| 0
| 0.231405
| 1
| 0.140496
| false
| 0
| 0.024793
| 0
| 0.165289
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c878f2a50a90c4b5c7d260136098425d7f3c8b94
| 7,363
|
py
|
Python
|
python/tests/test_model.py
|
alexkreidler/oxigraph
|
73d6138ede7e71caeb5030c6844b11dbd2adeb93
|
[
"Apache-2.0",
"MIT"
] | 403
|
2020-05-01T22:12:54.000Z
|
2022-03-31T11:16:38.000Z
|
python/tests/test_model.py
|
alexkreidler/oxigraph
|
73d6138ede7e71caeb5030c6844b11dbd2adeb93
|
[
"Apache-2.0",
"MIT"
] | 126
|
2020-05-04T08:44:21.000Z
|
2022-03-31T13:50:23.000Z
|
python/tests/test_model.py
|
alexkreidler/oxigraph
|
73d6138ede7e71caeb5030c6844b11dbd2adeb93
|
[
"Apache-2.0",
"MIT"
] | 27
|
2020-05-02T01:05:07.000Z
|
2022-03-23T02:48:30.000Z
|
import unittest
from pyoxigraph import *
XSD_STRING = NamedNode("http://www.w3.org/2001/XMLSchema#string")
XSD_INTEGER = NamedNode("http://www.w3.org/2001/XMLSchema#integer")
RDF_LANG_STRING = NamedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString")
class TestNamedNode(unittest.TestCase):
def test_constructor(self):
self.assertEqual(NamedNode("http://foo").value, "http://foo")
def test_string(self):
self.assertEqual(str(NamedNode("http://foo")), "<http://foo>")
def test_equal(self):
self.assertEqual(NamedNode("http://foo"), NamedNode("http://foo"))
self.assertNotEqual(NamedNode("http://foo"), NamedNode("http://bar"))
class TestBlankNode(unittest.TestCase):
def test_constructor(self):
self.assertEqual(BlankNode("foo").value, "foo")
self.assertNotEqual(BlankNode(), BlankNode())
def test_string(self):
self.assertEqual(str(BlankNode("foo")), "_:foo")
def test_equal(self):
self.assertEqual(BlankNode("foo"), BlankNode("foo"))
self.assertNotEqual(BlankNode("foo"), BlankNode("bar"))
self.assertNotEqual(BlankNode('foo'), NamedNode('http://foo'))
self.assertNotEqual(NamedNode('http://foo'), BlankNode('foo'))
class TestLiteral(unittest.TestCase):
def test_constructor(self):
self.assertEqual(Literal("foo").value, "foo")
self.assertEqual(Literal("foo").datatype, XSD_STRING)
self.assertEqual(Literal("foo", language="en").value, "foo")
self.assertEqual(Literal("foo", language="en").language, "en")
self.assertEqual(Literal("foo", language="en").datatype, RDF_LANG_STRING)
self.assertEqual(Literal("foo", datatype=XSD_INTEGER).value, "foo")
self.assertEqual(Literal("foo", datatype=XSD_INTEGER).datatype, XSD_INTEGER)
def test_string(self):
self.assertEqual(str(Literal("foo")), '"foo"')
self.assertEqual(str(Literal("foo", language="en")), '"foo"@en')
self.assertEqual(
str(Literal("foo", datatype=XSD_INTEGER)),
'"foo"^^<http://www.w3.org/2001/XMLSchema#integer>',
)
def test_equals(self):
self.assertEqual(Literal("foo", datatype=XSD_STRING), Literal("foo"))
self.assertEqual(
Literal("foo", language="en", datatype=RDF_LANG_STRING),
Literal("foo", language="en"),
)
self.assertNotEqual(NamedNode('http://foo'), Literal('foo'))
self.assertNotEqual(Literal('foo'), NamedNode('http://foo'))
self.assertNotEqual(BlankNode('foo'), Literal('foo'))
self.assertNotEqual(Literal('foo'), BlankNode('foo'))
class TestTriple(unittest.TestCase):
def test_constructor(self):
t = Triple(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
)
self.assertEqual(t.subject, NamedNode("http://example.com/s"))
self.assertEqual(t.predicate, NamedNode("http://example.com/p"))
self.assertEqual(t.object, NamedNode("http://example.com/o"))
def test_mapping(self):
t = Triple(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
)
self.assertEqual(t[0], NamedNode("http://example.com/s"))
self.assertEqual(t[1], NamedNode("http://example.com/p"))
self.assertEqual(t[2], NamedNode("http://example.com/o"))
def test_destruct(self):
(s, p, o) = Triple(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
)
self.assertEqual(s, NamedNode("http://example.com/s"))
self.assertEqual(p, NamedNode("http://example.com/p"))
self.assertEqual(o, NamedNode("http://example.com/o"))
def test_string(self):
self.assertEqual(
str(
Triple(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
)
),
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .",
)
class TestQuad(unittest.TestCase):
def test_constructor(self):
t = Quad(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
NamedNode("http://example.com/g"),
)
self.assertEqual(t.subject, NamedNode("http://example.com/s"))
self.assertEqual(t.predicate, NamedNode("http://example.com/p"))
self.assertEqual(t.object, NamedNode("http://example.com/o"))
self.assertEqual(t.graph_name, NamedNode("http://example.com/g"))
self.assertEqual(
t.triple,
Triple(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
),
)
self.assertEqual(
Quad(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
),
Quad(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
DefaultGraph(),
),
)
def test_mapping(self):
t = Quad(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
NamedNode("http://example.com/g"),
)
self.assertEqual(t[0], NamedNode("http://example.com/s"))
self.assertEqual(t[1], NamedNode("http://example.com/p"))
self.assertEqual(t[2], NamedNode("http://example.com/o"))
self.assertEqual(t[3], NamedNode("http://example.com/g"))
def test_destruct(self):
(s, p, o, g) = Quad(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
NamedNode("http://example.com/g"),
)
self.assertEqual(s, NamedNode("http://example.com/s"))
self.assertEqual(p, NamedNode("http://example.com/p"))
self.assertEqual(o, NamedNode("http://example.com/o"))
self.assertEqual(g, NamedNode("http://example.com/g"))
def test_string(self):
self.assertEqual(
str(
Triple(
NamedNode("http://example.com/s"),
NamedNode("http://example.com/p"),
NamedNode("http://example.com/o"),
)
),
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .",
)
class TestVariable(unittest.TestCase):
def test_constructor(self):
self.assertEqual(Variable("foo").value, "foo")
def test_string(self):
self.assertEqual(str(Variable("foo")), "?foo")
def test_equal(self):
self.assertEqual(Variable("foo"), Variable("foo"))
self.assertNotEqual(Variable("foo"), Variable("bar"))
if __name__ == "__main__":
unittest.main()
| 37.375635
| 85
| 0.572864
| 807
| 7,363
| 5.172243
| 0.08798
| 0.218016
| 0.211308
| 0.314087
| 0.871107
| 0.810973
| 0.77839
| 0.631289
| 0.506229
| 0.480834
| 0
| 0.005568
| 0.243787
| 7,363
| 196
| 86
| 37.566327
| 0.744073
| 0
| 0
| 0.587879
| 0
| 0.018182
| 0.23686
| 0
| 0
| 0
| 0
| 0
| 0.339394
| 1
| 0.121212
| false
| 0
| 0.012121
| 0
| 0.169697
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c87e7e42fd6e3c9f487d2ed6e331a109c55a126e
| 231
|
py
|
Python
|
scripts/generate_gantt_chart.py
|
spisakt/PUMI
|
bea29696aa90e5581f08919e1a2cd9f569284984
|
[
"BSD-3-Clause"
] | 5
|
2018-06-12T08:17:13.000Z
|
2022-02-25T20:07:00.000Z
|
scripts/generate_gantt_chart.py
|
spisakt/PUMI
|
bea29696aa90e5581f08919e1a2cd9f569284984
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/generate_gantt_chart.py
|
spisakt/PUMI
|
bea29696aa90e5581f08919e1a2cd9f569284984
|
[
"BSD-3-Clause"
] | 2
|
2020-10-19T15:27:28.000Z
|
2021-06-04T17:02:27.000Z
|
#!/usr/bin/env python
# start it like: scripts/generate_gantt_chart.py
# from the project folder
import PUMI.utils.resource_profiler as rp
rp.generate_gantt_chart('/Users/tspisak/Dropbox/comp/PAINTeR/szeged/run_stats.log', cores=8)
| 46.2
| 92
| 0.809524
| 38
| 231
| 4.763158
| 0.894737
| 0.143646
| 0.198895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004673
| 0.073593
| 231
| 5
| 92
| 46.2
| 0.841122
| 0.393939
| 0
| 0
| 1
| 0
| 0.405797
| 0.405797
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
c88c4147482d329da8dcc88d504798141aa006f3
| 554
|
py
|
Python
|
django_workflow_system/utils/response_schema_handlers/__init__.py
|
eikonomega/django-workflow-system
|
dc0e8807263266713d3d7fa46e240e8d72db28d1
|
[
"MIT"
] | 2
|
2022-01-28T12:35:42.000Z
|
2022-03-23T16:06:05.000Z
|
django_workflow_system/utils/response_schema_handlers/__init__.py
|
eikonomega/django-workflow-system
|
dc0e8807263266713d3d7fa46e240e8d72db28d1
|
[
"MIT"
] | 10
|
2021-04-27T20:26:32.000Z
|
2021-07-21T15:34:31.000Z
|
django_workflow_system/utils/response_schema_handlers/__init__.py
|
eikonomega/django-workflow-system
|
dc0e8807263266713d3d7fa46e240e8d72db28d1
|
[
"MIT"
] | 1
|
2021-11-13T14:30:34.000Z
|
2021-11-13T14:30:34.000Z
|
"""Convenience imports."""
from .date_range_question import get_response_schema as date_range_question_schema
from .free_form_question import get_response_schema as free_form_question_schema
from .multiple_choice_question import (
get_response_schema as multiple_choice_question_schema,
)
from .numeric_range_question import get_response_schema as numeric_range_question_schema
from .single_choice_question import get_response_schema as single_choice_question_schema
from .true_false_question import get_response_schema as true_false_question_schema
| 55.4
| 88
| 0.891697
| 80
| 554
| 5.65
| 0.2375
| 0.185841
| 0.225664
| 0.331858
| 0.486726
| 0.486726
| 0.340708
| 0
| 0
| 0
| 0
| 0
| 0.079422
| 554
| 9
| 89
| 61.555556
| 0.886275
| 0.036101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c89f883d4b09f4fe5b1091ffacfe1b93f72c7bd6
| 207
|
py
|
Python
|
tests/test_calculations.py
|
godzilla-but-nicer/cellularautomata
|
16c1d31403a26131f1e18f5d72b96a316082e596
|
[
"MIT"
] | null | null | null |
tests/test_calculations.py
|
godzilla-but-nicer/cellularautomata
|
16c1d31403a26131f1e18f5d72b96a316082e596
|
[
"MIT"
] | null | null | null |
tests/test_calculations.py
|
godzilla-but-nicer/cellularautomata
|
16c1d31403a26131f1e18f5d72b96a316082e596
|
[
"MIT"
] | null | null | null |
import numpy as np
from casim.calculations import word_entropy
def test_word_entropy():
test_arr = np.array([1, 0, 0, 1, 1, 0, 1, 0])
assert np.round(word_entropy(test_arr, 3), decimals=1) == 2.5
| 23
| 65
| 0.68599
| 38
| 207
| 3.578947
| 0.552632
| 0.242647
| 0.220588
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070588
| 0.178744
| 207
| 8
| 66
| 25.875
| 0.729412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c8dba0226e9d63fddee9a5f90630124ba90d6604
| 161
|
py
|
Python
|
mongo_db_from_config/__init__.py
|
cisagov/mongo-db-from-config
|
021bb2ceae01b918f863592269899185f17ac2f4
|
[
"CC0-1.0"
] | null | null | null |
mongo_db_from_config/__init__.py
|
cisagov/mongo-db-from-config
|
021bb2ceae01b918f863592269899185f17ac2f4
|
[
"CC0-1.0"
] | null | null | null |
mongo_db_from_config/__init__.py
|
cisagov/mongo-db-from-config
|
021bb2ceae01b918f863592269899185f17ac2f4
|
[
"CC0-1.0"
] | null | null | null |
"""This package contains the mongo_db_from_config code."""
from .mongo_db_from_config import db_from_config
__version__ = "0.0.1"
__all__ = ["db_from_config"]
| 23
| 58
| 0.776398
| 26
| 161
| 4.115385
| 0.538462
| 0.224299
| 0.448598
| 0.317757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020979
| 0.111801
| 161
| 6
| 59
| 26.833333
| 0.727273
| 0.322981
| 0
| 0
| 0
| 0
| 0.184466
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
cd9e290d1db0e882b80d6b86c14d96d620b1c2eb
| 156
|
py
|
Python
|
Numpy/Zeros and Ones.py
|
Code-With-Aagam/python-hackerrank
|
270c75cf2ca30916183c7fe5ca130a64c7a8ed6d
|
[
"MIT"
] | 3
|
2022-03-05T15:38:26.000Z
|
2022-03-09T13:39:30.000Z
|
Numpy/Zeros and Ones.py
|
Code-With-Aagam/python-hackerrank
|
270c75cf2ca30916183c7fe5ca130a64c7a8ed6d
|
[
"MIT"
] | null | null | null |
Numpy/Zeros and Ones.py
|
Code-With-Aagam/python-hackerrank
|
270c75cf2ca30916183c7fe5ca130a64c7a8ed6d
|
[
"MIT"
] | null | null | null |
import numpy
numbers = tuple(map(int, input().split()))
print(numpy.zeros(numbers, dtype = numpy.int), numpy.ones(numbers, dtype = numpy.int), sep='\n')
| 39
| 97
| 0.685897
| 23
| 156
| 4.652174
| 0.608696
| 0.224299
| 0.317757
| 0.373832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 156
| 3
| 98
| 52
| 0.775362
| 0
| 0
| 0
| 0
| 0
| 0.013072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
cdfcb7135cb12a5b623eb4ec15e364706908105e
| 136
|
py
|
Python
|
quanttrader/event/__init__.py
|
qalpha/quanttrader
|
e5c407f76c9d0beeccaa8735052a7e7717f0bed6
|
[
"Apache-2.0"
] | 135
|
2020-09-07T01:07:18.000Z
|
2022-03-31T23:04:40.000Z
|
quanttrader/event/__init__.py
|
qalpha/quanttrader
|
e5c407f76c9d0beeccaa8735052a7e7717f0bed6
|
[
"Apache-2.0"
] | 4
|
2021-04-09T22:13:48.000Z
|
2021-12-23T02:10:37.000Z
|
quanttrader/event/__init__.py
|
qalpha/quanttrader
|
e5c407f76c9d0beeccaa8735052a7e7717f0bed6
|
[
"Apache-2.0"
] | 51
|
2020-09-08T00:18:45.000Z
|
2022-03-28T19:42:44.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .event import *
from .backtest_event_engine import *
from .live_event_engine import *
| 27.2
| 36
| 0.720588
| 20
| 136
| 4.7
| 0.65
| 0.212766
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008475
| 0.132353
| 136
| 5
| 37
| 27.2
| 0.788136
| 0.308824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b566e5eeb4fb4e4ad38acb2696332b814b5a6148
| 5,941
|
py
|
Python
|
tests/test_nodes_types.py
|
WaylonWalker/find-kedro
|
c985b173795afed551d3b2db057b8d6eac7b6067
|
[
"MIT"
] | 17
|
2020-04-13T11:34:43.000Z
|
2021-12-19T11:53:03.000Z
|
tests/test_nodes_types.py
|
WaylonWalker/find-kedro
|
c985b173795afed551d3b2db057b8d6eac7b6067
|
[
"MIT"
] | 10
|
2020-04-15T00:28:01.000Z
|
2020-10-12T17:01:00.000Z
|
tests/test_nodes_types.py
|
WaylonWalker/find-kedro
|
c985b173795afed551d3b2db057b8d6eac7b6067
|
[
"MIT"
] | 4
|
2020-04-14T14:10:32.000Z
|
2021-03-17T04:40:36.000Z
|
"""
This module tests the creation of pipeline nodes from various different types
and combinations of types.
"""
import textwrap
import pytest
from find_kedro import find_kedro
contents = [
(
"single_nodes",
2,
"""\
from kedro.pipeline import node
node_a_b = node(lambda x: x, "a", "b", name="a_b")
node_b_c = node(lambda x: x, "b", "c", name="b_c")
""",
),
(
"list_nodes",
2,
"""\
from kedro.pipeline import node
nodes = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
]
""",
),
(
"set_nodes",
2,
"""\
from kedro.pipeline import node
nodes = {
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
}
""",
),
(
"tuple_nodes",
2,
"""\
from kedro.pipeline import node
nodes = (
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
)
""",
),
(
"pipeline_nodes",
2,
"""\
from kedro.pipeline import node, Pipeline
nodes = Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
])
""",
),
(
"pipeline_list_nodes",
4,
"""\
from kedro.pipeline import node, Pipeline
nodes_pipeline = Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
])
nodes_list = [
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
]
""",
),
(
"pipeline_nodes_nodes",
4,
"""\
from kedro.pipeline import node, Pipeline
nodes_pipeline = Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
])
node_a2 = node(lambda x: x, "a2", "b2", name="a_b2")
node_b2 = node(lambda x: x, "b2", "c2", name="b_c2")
""",
),
(
"list_nodes_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
]
node_a2 = node(lambda x: x, "a2", "b2", name="a_b2")
node_b2 = node(lambda x: x, "b2", "c2", name="b_c2")
""",
),
(
"dynamic_list_nodes",
100,
"""\
from kedro.pipeline import node
nodes_pipeline = [ node(lambda x: x, f"a{n}", f"a{n+1}", name=f"a{n}_a{n+1}") for n in range(100)]
""",
),
(
"dynamic_pipeline_nodes",
100,
"""\
from kedro.pipeline import node, Pipeline
nodes_pipeline = Pipeline([ node(lambda x: x, f"a{n}", f"a{n+1}", name=f"a{n}_a{n+1}") for n in range(100)])
""",
),
(
"nested_list_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
[
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
]
]
""",
),
(
"nested_tuple_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = (
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
(
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
)
)
""",
),
(
"nested_set_nodes",
4,
"""\
from kedro.pipeline import node
nodes_pipeline = {
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
(
node(lambda x: x, "a2", "b2", name="a_b2"),
node(lambda x: x, "b2", "c2", name="b_c2"),
)
}
""",
),
(
"function_nodes",
2,
"""\
from kedro.pipeline import Pipeline, node
def create_pipeline():
return Pipeline([
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c"),
]
)
""",
),
(
"function_single_nodes",
4,
"""\
from kedro.pipeline import Pipeline, node
node_a_b = node(lambda x: x, "a", "b", name="a_b")
node_b_c = node(lambda x: x, "b", "c", name="b_c")
def create_pipeline():
return Pipeline([
node(lambda x: x, "fa", "fb", name="fa_fb"),
node(lambda x: x, "fb", "fc", name="fb_fc"),
]
)
""",
),
(
"function_list_nodes",
4,
"""\
from kedro.pipeline import Pipeline, node
nodes = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
]
def create_pipeline():
return Pipeline([
node(lambda x: x, "fa", "fb", name="fa_fb"),
node(lambda x: x, "fb", "fc", name="fb_fc"),
]
)
""",
),
(
"list_create_pipeline",
2,
"""\
from kedro.pipeline import Pipeline, node
creaste_pipeline = [
node(lambda x: x, "a", "b", name="a_b"),
node(lambda x: x, "b", "c", name="b_c")
]
""",
),
]
@pytest.mark.parametrize("name, num_nodes, content", contents)
def test_create_file(tmpdir, name, num_nodes, content):
p = tmpdir.mkdir("nodes").join(f"{ name }.py")
p.write(textwrap.dedent(content))
pipelines = find_kedro(directory=tmpdir, verbose=True)
assert list(pipelines.keys()) == [f"nodes.{ name }", "__default__"]
assert (
len(pipelines["__default__"].nodes) == num_nodes
), f"did not collect all nodes from { name }.py"
assert len(tmpdir.listdir()) == 1
| 23.298039
| 112
| 0.46541
| 772
| 5,941
| 3.430052
| 0.102332
| 0.181269
| 0.199396
| 0.217523
| 0.771148
| 0.771148
| 0.76926
| 0.721299
| 0.673338
| 0.664653
| 0
| 0.018433
| 0.342535
| 5,941
| 254
| 113
| 23.389764
| 0.659498
| 0.017505
| 0
| 0.34
| 0
| 0
| 0.239223
| 0.026108
| 0
| 0
| 0
| 0
| 0.03
| 1
| 0.01
| false
| 0
| 0.03
| 0
| 0.04
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a90950ed1600fc999adb1ceef6efd388d590ad2b
| 17,787
|
py
|
Python
|
ondewo/survey/survey_pb2_grpc.py
|
ondewo/ondewo-survey-client-python
|
de95b8480051780cc1fdae21158f9626399efebe
|
[
"Apache-2.0"
] | null | null | null |
ondewo/survey/survey_pb2_grpc.py
|
ondewo/ondewo-survey-client-python
|
de95b8480051780cc1fdae21158f9626399efebe
|
[
"Apache-2.0"
] | null | null | null |
ondewo/survey/survey_pb2_grpc.py
|
ondewo/ondewo-survey-client-python
|
de95b8480051780cc1fdae21158f9626399efebe
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from ondewo.survey import survey_pb2 as ondewo_dot_survey_dot_survey__pb2
class SurveysStub(object):
"""///// Services ///////
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/CreateSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.GetSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/GetSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.UpdateSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/UpdateSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
)
self.DeleteSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/DeleteSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListSurveys = channel.unary_unary(
'/ondewo.survey.Surveys/ListSurveys',
request_serializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,
)
self.GetSurveyAnswers = channel.unary_unary(
'/ondewo.survey.Surveys/GetSurveyAnswers',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
)
self.GetAllSurveyAnswers = channel.unary_unary(
'/ondewo.survey.Surveys/GetAllSurveyAnswers',
request_serializer=ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
)
self.CreateAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/CreateAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
)
self.UpdateAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/UpdateAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
)
self.DeleteAgentSurvey = channel.unary_unary(
'/ondewo.survey.Surveys/DeleteAgentSurvey',
request_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class SurveysServicer(object):
"""///// Services ///////
"""
def CreateSurvey(self, request, context):
"""Create a Survey and an empty NLU Agent for it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSurvey(self, request, context):
"""Retrieve a Survey message from the Database and return it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSurvey(self, request, context):
"""Update an existing Survey message from the Database and return it
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSurvey(self, request, context):
"""Delete a survey and its associated agent (if existent)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSurveys(self, request, context):
"""Returns the list of all surveys in the server
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSurveyAnswers(self, request, context):
"""Retrieve answers to survey questions collected in interactions with a survey agent for a specific session
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAllSurveyAnswers(self, request, context):
"""Retrieve all answers to survey questions collected in interactions with a survey agent in any session
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAgentSurvey(self, request, context):
"""Populate and configures an NLU Agent from a Survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAgentSurvey(self, request, context):
"""Update an NLU agent from a survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAgentSurvey(self, request, context):
"""Deletes all data of an NLU agent associated to a survey
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SurveysServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateSurvey': grpc.unary_unary_rpc_method_handler(
servicer.CreateSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'GetSurvey': grpc.unary_unary_rpc_method_handler(
servicer.GetSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'UpdateSurvey': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.Survey.SerializeToString,
),
'DeleteSurvey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListSurveys': grpc.unary_unary_rpc_method_handler(
servicer.ListSurveys,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.SerializeToString,
),
'GetSurveyAnswers': grpc.unary_unary_rpc_method_handler(
servicer.GetSurveyAnswers,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString,
),
'GetAllSurveyAnswers': grpc.unary_unary_rpc_method_handler(
servicer.GetAllSurveyAnswers,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.SerializeToString,
),
'CreateAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.CreateAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString,
),
'UpdateAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.SerializeToString,
),
'DeleteAgentSurvey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAgentSurvey,
request_deserializer=ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'ondewo.survey.Surveys', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Surveys(object):
"""///// Services ///////
"""
@staticmethod
def CreateSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/CreateSurvey',
ondewo_dot_survey_dot_survey__pb2.CreateSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetSurvey',
ondewo_dot_survey_dot_survey__pb2.GetSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/UpdateSurvey',
ondewo_dot_survey_dot_survey__pb2.UpdateSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.Survey.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/DeleteSurvey',
ondewo_dot_survey_dot_survey__pb2.DeleteSurveyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSurveys(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/ListSurveys',
ondewo_dot_survey_dot_survey__pb2.ListSurveysRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.ListSurveysResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSurveyAnswers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetSurveyAnswers',
ondewo_dot_survey_dot_survey__pb2.GetSurveyAnswersRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAllSurveyAnswers(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/GetAllSurveyAnswers',
ondewo_dot_survey_dot_survey__pb2.GetAllSurveyAnswersRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.SurveyAnswersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/CreateAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/UpdateAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
ondewo_dot_survey_dot_survey__pb2.AgentSurveyResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAgentSurvey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/ondewo.survey.Surveys/DeleteAgentSurvey',
ondewo_dot_survey_dot_survey__pb2.AgentSurveyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 46.685039
| 116
| 0.673638
| 1,646
| 17,787
| 6.919198
| 0.083232
| 0.086926
| 0.072438
| 0.086926
| 0.827904
| 0.823514
| 0.787075
| 0.739749
| 0.631223
| 0.620774
| 0
| 0.004838
| 0.256255
| 17,787
| 380
| 117
| 46.807895
| 0.856074
| 0.057739
| 0
| 0.626198
| 1
| 0
| 0.082063
| 0.045924
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070288
| false
| 0
| 0.009585
| 0.031949
| 0.121406
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a964b2997c586074953131a0e79b0d461e2198b2
| 610
|
py
|
Python
|
home/views.py
|
sa-y-an/Qriosity2.0
|
f0a46533881a6a7f8cd548eadbc72570396b1141
|
[
"Apache-2.0"
] | null | null | null |
home/views.py
|
sa-y-an/Qriosity2.0
|
f0a46533881a6a7f8cd548eadbc72570396b1141
|
[
"Apache-2.0"
] | 2
|
2020-06-30T16:28:26.000Z
|
2020-07-25T21:35:31.000Z
|
home/views.py
|
sa-y-an/Qriosity2.0
|
f0a46533881a6a7f8cd548eadbc72570396b1141
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
from django.contrib.auth.decorators import user_passes_test
# Create your views here.
def not_logged_in(user):
return not user.is_authenticated
def base(request):
return render(request, 'home/base.html')
def home(request):
return render(request, 'home/home.html')
def hello(request):
return render(request, 'home/hello.html')
@user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None)
def login(request):
return render(request, 'home/login.html')
def rules(request):
return render(request, 'home/rule.html')
| 20.333333
| 87
| 0.747541
| 87
| 610
| 5.103448
| 0.413793
| 0.146396
| 0.213964
| 0.292793
| 0.337838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136066
| 610
| 29
| 88
| 21.034483
| 0.842505
| 0.037705
| 0
| 0
| 0
| 0
| 0.148718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.133333
| 0.133333
| 0.4
| 0.933333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
8da8965aae91afb60db143801de21fdd9d462bf9
| 34
|
py
|
Python
|
build/lib/abp/adaptives/a3c/__init__.py
|
LinearZoetrope/abp
|
2459c1b4d77606c1d70715ce8378d738ba102f37
|
[
"MIT"
] | null | null | null |
build/lib/abp/adaptives/a3c/__init__.py
|
LinearZoetrope/abp
|
2459c1b4d77606c1d70715ce8378d738ba102f37
|
[
"MIT"
] | 1
|
2018-10-17T03:28:08.000Z
|
2018-10-17T03:28:08.000Z
|
build/lib/abp/adaptives/a3c/__init__.py
|
Zaerei/abp
|
2459c1b4d77606c1d70715ce8378d738ba102f37
|
[
"MIT"
] | null | null | null |
from .adaptive import A2CAdaptive
| 17
| 33
| 0.852941
| 4
| 34
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0.117647
| 34
| 1
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a5c33e8021500981ea9dacedf0445d972cc86986
| 112
|
py
|
Python
|
pycomlink/core/__init__.py
|
jpolz/pycomlink
|
bd15ed4dd55fb2735578b18194bb3e9966cb40d9
|
[
"BSD-3-Clause"
] | 1
|
2018-09-24T03:38:38.000Z
|
2018-09-24T03:38:38.000Z
|
pycomlink/core/__init__.py
|
jayapudashine/pycomlink
|
8670e4492d0bf439ea238be2bd6f69df460b8d41
|
[
"BSD-3-Clause"
] | null | null | null |
pycomlink/core/__init__.py
|
jayapudashine/pycomlink
|
8670e4492d0bf439ea238be2bd6f69df460b8d41
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from .comlink_channel import ComlinkChannel
from .comlink import Comlink
| 28
| 43
| 0.875
| 14
| 112
| 6.571429
| 0.5
| 0.23913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 112
| 3
| 44
| 37.333333
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a5e5c81a740363e4e1a66ba1880208d27198fe54
| 30
|
py
|
Python
|
qiradb/qiradb/__init__.py
|
pAplakidis/qira
|
9d0aa4f887e9dce8c0307a54c7051755ede26810
|
[
"MIT"
] | null | null | null |
qiradb/qiradb/__init__.py
|
pAplakidis/qira
|
9d0aa4f887e9dce8c0307a54c7051755ede26810
|
[
"MIT"
] | null | null | null |
qiradb/qiradb/__init__.py
|
pAplakidis/qira
|
9d0aa4f887e9dce8c0307a54c7051755ede26810
|
[
"MIT"
] | 1
|
2020-12-02T16:32:17.000Z
|
2020-12-02T16:32:17.000Z
|
from qiradb._qiradb import *
| 10
| 28
| 0.766667
| 4
| 30
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 2
| 29
| 15
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9393225e5e0a5151b73339e6a6ac639b6c27cdc5
| 201
|
py
|
Python
|
sebs/__init__.py
|
opal-mimuw/serverless-benchmarks
|
16ac3988b3891a6ad2ae91e7c7175315d924c70c
|
[
"BSD-3-Clause"
] | 35
|
2020-12-30T19:31:30.000Z
|
2022-03-28T11:10:00.000Z
|
sebs/__init__.py
|
opal-mimuw/serverless-benchmarks
|
16ac3988b3891a6ad2ae91e7c7175315d924c70c
|
[
"BSD-3-Clause"
] | 24
|
2021-01-04T15:37:05.000Z
|
2022-03-14T00:45:20.000Z
|
sebs/__init__.py
|
opal-mimuw/serverless-benchmarks
|
16ac3988b3891a6ad2ae91e7c7175315d924c70c
|
[
"BSD-3-Clause"
] | 10
|
2021-06-13T13:13:39.000Z
|
2021-12-20T22:05:50.000Z
|
from .sebs import SeBS # noqa
from .aws import * # noqa
from .azure import * # noqa
from .cache import Cache # noqa
from .benchmark import Benchmark # noqa
# from .experiments import * # noqa
| 20.1
| 40
| 0.691542
| 27
| 201
| 5.148148
| 0.333333
| 0.28777
| 0.201439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.228856
| 201
| 9
| 41
| 22.333333
| 0.896774
| 0.288557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
93d72784430a6d56d8037d0831c2643c8357d829
| 6,089
|
py
|
Python
|
tests/lib/result_reporters/pyne_stat_summary_reporter_test.py
|
Avvir/pyne
|
864885a8fb632b72c00af164f150b1daa38a346f
|
[
"MIT"
] | 4
|
2018-08-10T20:05:10.000Z
|
2019-07-24T15:29:32.000Z
|
tests/lib/result_reporters/pyne_stat_summary_reporter_test.py
|
Avvir/pyne
|
864885a8fb632b72c00af164f150b1daa38a346f
|
[
"MIT"
] | 6
|
2018-09-25T20:15:51.000Z
|
2021-12-22T17:09:52.000Z
|
tests/lib/result_reporters/pyne_stat_summary_reporter_test.py
|
Avvir/pyne
|
864885a8fb632b72c00af164f150b1daa38a346f
|
[
"MIT"
] | null | null | null |
from pynetest.expectations import expect
from pynetest.lib.result_reporters.printing_reporter import PrintingReporter
from pynetest.lib.result_reporters.pyne_result_reporters import PyneStatSummaryReporter
from pynetest.lib.pyne_test_blocks import ItBlock, DescribeBlock
from tests.test_helpers.fake_print import StubPrint, printed_text
def test__report_failure__increases_the_failure_count():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_failure(it_block, it_block, Exception("some exception"), 0)
reporter.report_failure(it_block, it_block, Exception("some exception"), 0)
expect(reporter.stats.failure_count).to_be(2)
def test__report_failure__increases_the_test_run_count():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_failure(it_block, it_block, Exception("some exception"), 0)
reporter.report_failure(it_block, it_block, Exception("some exception"), 0)
expect(reporter.stats.test_count).to_be(2)
def test__report_failure__sets_overall_failure():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_failure(it_block, it_block, Exception("some exception"), 0)
expect(reporter.stats.is_failure).to_be(True)
def test__report_failure__increases_the_total_timing():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_failure(it_block, it_block, Exception("some exception"), 1000)
reporter.report_failure(it_block, it_block, Exception("some exception"), 20)
expect(reporter.stats.total_timing_millis).to_be(1020)
def test__report_success__increases_the_test_run_count():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_success(it_block, 0)
reporter.report_success(it_block, 0)
expect(reporter.stats.test_count).to_be(2)
def test__report_success__increases_the_passes_count():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_success(it_block, 0)
reporter.report_success(it_block, 0)
expect(reporter.stats.pass_count).to_be(2)
def test__report_success__increases_the_total_timing():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_success(it_block, 10)
reporter.report_success(it_block, 300)
expect(reporter.stats.total_timing_millis).to_be(310)
def test__report_pending__increases_the_test_run_count():
reporter = PyneStatSummaryReporter()
it_block = ItBlock(None, None, None)
reporter.report_pending(it_block)
reporter.report_pending(it_block)
expect(reporter.stats.test_count).to_be(2)
def test__report_enter_context__increases_depth():
reporter = PyneStatSummaryReporter()
describe_block = DescribeBlock(None, None, None)
reporter.report_enter_context(describe_block)
expect(reporter.depth).to_be(1)
reporter.report_enter_context(describe_block)
expect(reporter.depth).to_be(2)
def test__report_exit_context__decreases_depth():
reporter = PyneStatSummaryReporter()
describe_block = DescribeBlock(None, None, None)
reporter.report_enter_context(describe_block)
reporter.report_enter_context(describe_block)
reporter.report_exit_context(describe_block)
expect(reporter.depth).to_be(1)
reporter.report_exit_context(describe_block)
expect(reporter.depth).to_be(0)
def test__report_end_result__when_a_test_has_failed__it_prints_stats():
with StubPrint():
reporter = PrintingReporter(PyneStatSummaryReporter())
it_block = ItBlock(None, None, None)
reporter.report_failure(it_block, it_block, Exception("some exception"), 1000)
reporter.report_success(it_block, 500)
reporter.report_success(it_block, 500)
printed_text.clear()
reporter.report_end_result()
expect(printed_text[0]).to_contain("1 failed, 2 passed in 2.00 seconds")
def test__report_end_result__when_all_tests_passed__it_prints_stats():
with StubPrint():
reporter = PrintingReporter(PyneStatSummaryReporter())
it_block = ItBlock(None, None, None)
reporter.report_success(it_block, 1000)
reporter.report_success(it_block, 500)
printed_text.clear()
reporter.report_end_result()
expect(printed_text[0]).to_contain("2 passed in 1.50 seconds")
def test__report_end_result__test_is_pending__reports_stats():
with StubPrint():
reporter = PrintingReporter(PyneStatSummaryReporter())
passing_it_block = ItBlock(None, None, None)
pending_it_block = ItBlock(None, None, None, pending=True)
reporter.report_success(passing_it_block, 1000)
reporter.report_pending(pending_it_block)
printed_text.clear()
reporter.report_end_result()
expect(printed_text[0]).to_contain("1 passed, 1 pending in 1.00 seconds")
def test__report_end_result__when_no_tests_run__reports_stats():
with StubPrint():
reporter = PrintingReporter(PyneStatSummaryReporter())
printed_text.clear()
reporter.report_end_result()
expect(printed_text[0]).to_contain("Ran 0 tests")
def test__reset__sets_stats_to_0():
describe_block = DescribeBlock(None, None, None)
it_block = ItBlock(None, None, None)
reporter = PyneStatSummaryReporter()
reporter.report_enter_context(describe_block)
reporter.report_enter_context(describe_block)
reporter.report_success(it_block, 1000)
reporter.report_failure(it_block, it_block, Exception("some exception"), 1000)
reporter.report_failure(it_block, it_block, Exception("some exception"), 1000)
reporter.reset()
expect(reporter.stats.pass_count).to_be(0)
expect(reporter.stats.is_failure).to_be(False)
expect(reporter.stats.total_timing_millis).to_be(0)
expect(reporter.stats.failure_count).to_be(0)
expect(reporter.stats.test_count).to_be(0)
expect(reporter.depth).to_be(0)
| 31.386598
| 87
| 0.758088
| 781
| 6,089
| 5.519846
| 0.111396
| 0.07794
| 0.044537
| 0.05428
| 0.869172
| 0.833217
| 0.785665
| 0.716771
| 0.63164
| 0.63164
| 0
| 0.018108
| 0.147479
| 6,089
| 193
| 88
| 31.549223
| 0.812368
| 0
| 0
| 0.638655
| 0
| 0
| 0.040072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12605
| false
| 0.07563
| 0.042017
| 0
| 0.168067
| 0.10084
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
9e1fdc9a600b4684f339dbefa0d8f6573d3d1c37
| 36
|
py
|
Python
|
currency_calculator/__init__.py
|
jie17/PyCurrency
|
d5cf806d93e10665aa171f06178122a1cd3f5f68
|
[
"MIT"
] | 1
|
2020-06-27T17:52:45.000Z
|
2020-06-27T17:52:45.000Z
|
currency_calculator/__init__.py
|
jie17/PyCurrency
|
d5cf806d93e10665aa171f06178122a1cd3f5f68
|
[
"MIT"
] | null | null | null |
currency_calculator/__init__.py
|
jie17/PyCurrency
|
d5cf806d93e10665aa171f06178122a1cd3f5f68
|
[
"MIT"
] | 1
|
2020-06-27T17:52:02.000Z
|
2020-06-27T17:52:02.000Z
|
from .PyCurrency import get, convert
| 36
| 36
| 0.833333
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f5089b73f107e465258ad67f85aecb83f2187250
| 140
|
py
|
Python
|
error_propagation/__init__.py
|
davidreissmello/error_propagation
|
ce6600498dabfcf494243edd3a40557dbe8dc709
|
[
"MIT"
] | 4
|
2021-09-14T02:21:06.000Z
|
2021-11-11T15:33:08.000Z
|
error_propagation/__init__.py
|
davidreissmello/error_propagation
|
ce6600498dabfcf494243edd3a40557dbe8dc709
|
[
"MIT"
] | null | null | null |
error_propagation/__init__.py
|
davidreissmello/error_propagation
|
ce6600498dabfcf494243edd3a40557dbe8dc709
|
[
"MIT"
] | null | null | null |
from error_propagation.core import arrays_to_complex
from error_propagation.core import Complex
__all__ = ["Complex", "arrays_to_complex"]
| 28
| 52
| 0.835714
| 19
| 140
| 5.631579
| 0.473684
| 0.168224
| 0.373832
| 0.448598
| 0.560748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092857
| 140
| 4
| 53
| 35
| 0.84252
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f5714d822b10b4811a7719770de14c2d4d98cf7d
| 2,081
|
py
|
Python
|
control_produccion/migrations/0008_auto_20160808_1812.py
|
asapper/lito-produccion
|
792ac029323106012f654988e6c002f63e50d883
|
[
"Unlicense"
] | null | null | null |
control_produccion/migrations/0008_auto_20160808_1812.py
|
asapper/lito-produccion
|
792ac029323106012f654988e6c002f63e50d883
|
[
"Unlicense"
] | null | null | null |
control_produccion/migrations/0008_auto_20160808_1812.py
|
asapper/lito-produccion
|
792ac029323106012f654988e6c002f63e50d883
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-08-09 00:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('control_produccion', '0007_auto_20160623_1052'),
]
operations = [
migrations.RemoveField(
model_name='order',
name='order_date_created',
),
migrations.RemoveField(
model_name='order',
name='order_machine',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_datetime_finished',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_datetime_pause_start',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_datetime_started',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_is_paused',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_seconds_paused',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_user_finished',
),
migrations.RemoveField(
model_name='order_process',
name='order_process_user_started',
),
migrations.AddField(
model_name='order',
name='order_sh_id',
field=models.PositiveSmallIntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='process',
name='process_group_sh_id',
field=models.PositiveSmallIntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='process',
name='process_sh_id',
field=models.PositiveSmallIntegerField(default=0),
preserve_default=False,
),
]
| 29.728571
| 62
| 0.580971
| 185
| 2,081
| 6.194595
| 0.324324
| 0.157068
| 0.195462
| 0.235602
| 0.760035
| 0.739965
| 0.739965
| 0.663176
| 0.663176
| 0.612565
| 0
| 0.024788
| 0.32148
| 2,081
| 69
| 63
| 30.15942
| 0.786827
| 0.032196
| 0
| 0.677419
| 1
| 0
| 0.215813
| 0.110393
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.080645
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f5a6eef0aa361653b96d330f127c07657f80053d
| 12,510
|
py
|
Python
|
utest/x3270/test_assertions.py
|
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library
|
76b589d58c55a39f96c027a8ae28c41fa37ed445
|
[
"MIT"
] | 3
|
2018-10-02T14:32:06.000Z
|
2018-10-02T14:33:32.000Z
|
utest/x3270/test_assertions.py
|
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library
|
76b589d58c55a39f96c027a8ae28c41fa37ed445
|
[
"MIT"
] | null | null | null |
utest/x3270/test_assertions.py
|
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library
|
76b589d58c55a39f96c027a8ae28c41fa37ed445
|
[
"MIT"
] | null | null | null |
import re
import pytest
from pytest_mock import MockerFixture
from robot.api import logger
from Mainframe3270.x3270 import x3270
def test_page_should_contain_string(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
mocker.patch("robot.api.logger.info")
under_test.page_should_contain_string("abc")
logger.info.assert_called_with('The string "abc" was found')
def test_page_should_contain_string_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="aBc")
mocker.patch("robot.api.logger.info")
under_test.page_should_contain_string("abc", ignore_case=True)
logger.info.assert_called_with('The string "abc" was found')
def test_page_should_contain_string_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match='The string "def" was not found'):
under_test.page_should_contain_string("def")
def test_page_should_contain_string_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_contain_string("def", error_message="my error message")
def test_page_should_not_contain_string(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_not_contain_string("ABC")
def test_page_should_not_contain_string_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_not_contain_string("def", ignore_case=True)
def test_page_should_not_contain_string_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match='The string "ABC" was found'):
under_test.page_should_not_contain_string("ABC", ignore_case=True)
def test_page_should_not_contain_string_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_not_contain_string(
"abc", error_message="my error message"
)
def test_page_should_contain_any_string(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_contain_any_string(["abc", "def"])
def test_page_should_contain_any_string_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_contain_any_string(["ABC", "def"], ignore_case=True)
def test_page_should_contain_any_string_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(
Exception, match=re.escape("The strings \"['def', 'ghi']\" were not found")
):
under_test.page_should_contain_any_string(["def", "ghi"])
def test_page_should_contain_any_string_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_contain_any_string(
["def", "ghi"], error_message="my error message"
)
def test_page_should_contain_all_strings(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", side_effect=["abc", "def"])
under_test.page_should_contain_all_strings(["abc", "def"])
def test_page_should_contain_all_strings_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", side_effect=["AbC", "DeF"])
under_test.page_should_contain_all_strings(["abc", "def"], ignore_case=True)
def test_page_should_contain_all_strings_fails(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value=["def"])
with pytest.raises(Exception, match='The string "ghi" was not found'):
under_test.page_should_contain_all_strings(["def", "ghi"])
def test_page_should_contain_all_strings_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_contain_all_strings(
["abc", "def"], error_message="my error message"
)
def test_page_should_not_contain_any_string(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_not_contain_any_string(["def", "ghi"])
def test_page_should_not_contain_any_string_fails(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match='The string "abc" was found'):
under_test.page_should_not_contain_any_string(["abc", "def"])
def test_page_should_not_contain_any_string_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="ABC")
with pytest.raises(Exception, match='The string "abc" was found'):
under_test.page_should_not_contain_any_string(["abc", "def"], ignore_case=True)
def test_page_should_not_contain_any_string_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_not_contain_any_string(
["abc", "def"], error_message="my error message"
)
def test_page_should_not_contain_all_strings(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_not_contain_all_strings(["def", "ghi"])
def test_page_should_not_contain_all_strings_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match='The string "abc" was found'):
under_test.page_should_not_contain_all_strings(["ABC", "def"], ignore_case=True)
def test_page_should_not_contain_all_strings_fails(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match='The string "abc" was found'):
under_test.page_should_not_contain_all_strings(["abc", "def"])
def test_page_should_not_contain_all_strings_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_not_contain_all_strings(
["abc", "def"], error_message="my error message"
)
def test_page_should_contain_string_x_times(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="a")
under_test.page_should_contain_string_x_times("a", 24)
def test_page_should_contain_string_x_times_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="a")
under_test.page_should_contain_string_x_times("A", 24, ignore_case=True)
def test_page_should_contain_string_x_times_fails(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="a")
with pytest.raises(
Exception, match='The string "a" was not found "1" times, it appears "24" times'
):
under_test.page_should_contain_string_x_times("a", 1)
with pytest.raises(
Exception, match='The string "b" was not found "1" times, it appears "0" times'
):
under_test.page_should_contain_string_x_times("b", 1)
def test_page_should_contain_string_x_times_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="a")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_contain_string_x_times(
"b", 1, error_message="my error message"
)
def test_page_should_match_regex(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_match_regex(r"\w+")
def test_page_should_match_regex_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(
Exception, match=re.escape(r'No matches found for "\d+" pattern')
):
under_test.page_should_match_regex(r"\d+")
def test_page_should_not_match_regex(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_not_match_regex(r"\d+")
def test_page_should_not_match_regex_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="a")
with pytest.raises(
Exception, match=re.escape('There are matches found for "[a]+" pattern')
):
under_test.page_should_not_match_regex("[a]+")
def test_page_should_contain_match(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_contain_match("*a?c*")
def test_page_should_contain_match_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(
Exception, match=re.escape('No matches found for "*e?g*" pattern')
):
under_test.page_should_contain_match("*e?g*")
def test_page_should_contain_match_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="ABC")
under_test.page_should_contain_match("*a?c*", ignore_case=True)
def test_page_should_contain_match_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_contain_match("*def*", error_message="my error message")
def test_page_should_not_contain_match(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
under_test.page_should_not_contain_match("*def*")
def test_page_should_not_contain_match_fails(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(
Exception, match=re.escape('There are matches found for "*abc*" pattern')
):
under_test.page_should_not_contain_match("*abc*")
def test_page_should_not_contain_match_ignore_case(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(
Exception, match=re.escape('There are matches found for "*abc*" pattern')
):
under_test.page_should_not_contain_match("*ABC*", ignore_case=True)
def test_page_should_not_contain_match_custom_message(
mocker: MockerFixture, under_test: x3270
):
mocker.patch("Mainframe3270.py3270.Emulator.string_get", return_value="abc")
with pytest.raises(Exception, match="my error message"):
under_test.page_should_not_contain_match(
"*abc*", error_message="my error message"
)
| 35.338983
| 88
| 0.756115
| 1,677
| 12,510
| 5.282051
| 0.044723
| 0.073154
| 0.12802
| 0.0972
| 0.978776
| 0.977196
| 0.956311
| 0.924023
| 0.893543
| 0.855385
| 0
| 0.046366
| 0.131095
| 12,510
| 353
| 89
| 35.439093
| 0.768537
| 0
| 0
| 0.517544
| 0
| 0
| 0.22534
| 0.131255
| 0
| 0
| 0
| 0
| 0.008772
| 1
| 0.175439
| false
| 0
| 0.02193
| 0
| 0.197368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
19af211940ee38a124dd9e3f143ff16cf23bb7bf
| 37
|
py
|
Python
|
scrabbler/__init__.py
|
astralcai/scrabble-solver
|
9d4729f796850fa02601808a48f2c60e2f51211e
|
[
"MIT"
] | 3
|
2019-11-14T02:13:19.000Z
|
2021-05-03T14:06:24.000Z
|
scrabbler/__init__.py
|
astralcai/scrabbler
|
9d4729f796850fa02601808a48f2c60e2f51211e
|
[
"MIT"
] | 3
|
2021-04-05T01:09:16.000Z
|
2021-04-19T20:19:30.000Z
|
scrabbler/__init__.py
|
jfoley-yw/scrabble
|
049a69572138b06341af163ec69e18a1eb20b737
|
[
"MIT"
] | null | null | null |
from scrabbler.scrabbler import Game
| 18.5
| 36
| 0.864865
| 5
| 37
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5fee604c0ab8aa758b628917188bda24ca6b4666
| 2,679
|
py
|
Python
|
backend/tests/session/test_session_login_user.py
|
fjacob21/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | null | null | null |
backend/tests/session/test_session_login_user.py
|
fjacob21/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | 88
|
2016-11-12T14:54:38.000Z
|
2018-08-02T00:25:07.000Z
|
backend/tests/session/test_session_login_user.py
|
mididecouverte/mididecweb
|
b65f28eb6fdeafa265796b6190a4264a5eac54ce
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timedelta
import pytz
from bcrypt_hash import BcryptHash
import pytest
from src.users import Users
from src.events import Events
from src.stores import MemoryStore
from src.session import Session
def test_login_user():
store = MemoryStore()
users = Users(store)
params = {}
params['password'] = 'password'
session = Session(params, store, '')
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
user.validated = True
with pytest.raises(Exception):
session.login('')
loging_dict = session.login('test')
assert loging_dict
assert 'user' in loging_dict
def test_login_user_bad_password():
store = MemoryStore()
users = Users(store)
params = {}
params['password'] = 'password2'
session = Session(params, store, '')
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
user.validated = True
with pytest.raises(Exception):
session.login('test')
def test_login_user_register():
store = MemoryStore()
users = Users(store)
events = Events(store)
start = datetime.now(pytz.timezone("America/New_York"))
dur = timedelta(hours=1)
params = {}
params['password'] = 'password'
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
events.add('test', 'test', 30, start, dur, 'test', 'test',
'[email protected]', 'test', user)
user.validated = True
params['register'] = 'test'
session = Session(params, store, '')
loging_dict = session.login('test')
assert loging_dict
assert 'user' in loging_dict
assert 'register' in loging_dict
assert loging_dict['register'] == 'test'
def test_login_user_register_bad_event():
store = MemoryStore()
users = Users(store)
events = Events(store)
start = datetime.now(pytz.timezone("America/New_York"))
dur = timedelta(hours=1)
params = {}
params['password'] = 'password'
password = BcryptHash('password').encrypt()
user = users.add('email', 'name', 'alias', password, 'phone', True, True,
user_id='test')
events.add('test', 'test', 30, start, dur, 'test', 'test',
'[email protected]', 'test', user)
user.validated = True
params['register'] = ''
session = Session(params, store, '')
with pytest.raises(Exception):
session.login('test')
| 31.151163
| 77
| 0.6271
| 307
| 2,679
| 5.37785
| 0.172638
| 0.048455
| 0.029073
| 0.038764
| 0.779528
| 0.779528
| 0.748032
| 0.720775
| 0.658995
| 0.658995
| 0
| 0.003382
| 0.227324
| 2,679
| 85
| 78
| 31.517647
| 0.794203
| 0
| 0
| 0.756757
| 0
| 0
| 0.131019
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 1
| 0.054054
| false
| 0.175676
| 0.108108
| 0
| 0.162162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
278e6f4a27ea55c26c0173c264a3e94455f7bd43
| 3,856
|
py
|
Python
|
sljassbot/player/rl_player/model.py
|
andieder/pyschieberSLJassBot
|
eb7218d30c89c02ee0cd28f4fa0088b9339ea09b
|
[
"MIT"
] | null | null | null |
sljassbot/player/rl_player/model.py
|
andieder/pyschieberSLJassBot
|
eb7218d30c89c02ee0cd28f4fa0088b9339ea09b
|
[
"MIT"
] | null | null | null |
sljassbot/player/rl_player/model.py
|
andieder/pyschieberSLJassBot
|
eb7218d30c89c02ee0cd28f4fa0088b9339ea09b
|
[
"MIT"
] | null | null | null |
import os
import os.path
from keras.layers import Dense, Flatten, Conv1D, Reshape
from keras.optimizers import Nadam
from keras.models import Sequential
from keras.models import load_model
from keras.regularizers import l2
from keras import backend as K
from keras.losses import mean_squared_error
from sljassbot.player.rl_player.input_handler import InputHandler
def huber_loss(a, b, in_keras=True):
error = a - b
quadratic_term = error * error / 2
linear_term = abs(error) - 1 / 2
use_linear_term = (abs(error) > 1.0)
if in_keras:
# Keras won't let us multiply floats by booleans, so we explicitly cast the booleans to floats
use_linear_term = K.cast(use_linear_term, 'float32')
return use_linear_term * linear_term + (1 - use_linear_term) * quadratic_term
'''
def build_model(model_path, learning_rate=0.01):
if os.path.exists(model_path):
# model = load_model(model_path, custom_objects={'huber_loss': huber_loss})
model = load_model(model_path)
print('Load existing model.')
else:
model = Sequential()
model.add(Dense(InputHandler.input_size * 2, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Reshape((InputHandler.input_size * 2, 1,), input_shape=(InputHandler.input_size * 2,)))
#model.add(Dense(InputHandler.input_size, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Conv1D(filters=50, kernel_size=18, strides=18, padding='same', activation='relu'))
model.add(Conv1D(filters=25, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(InputHandler.input_size * 2, activation='relu', W_regularizer=l2(0.01)))
model.add(Dense(InputHandler.output_size, activation='linear'))
# optimizer = RMSprop(lr=0.00025, rho=0.95, epsilon=0.01)
optimizer = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)
# model.compile(loss=huber_loss, optimizer=optimizer)
model.compile(loss=mean_squared_error, optimizer=optimizer)
print('Create new model.')
return model
'''
# TODO: first 2 Conv1D then 2 Fully
def build_model(model_path, learning_rate=0.01):
if os.path.exists(model_path):
# model = load_model(model_path, custom_objects={'huber_loss': huber_loss})
model = load_model(model_path)
print('Load existing model.')
else:
model = Sequential()
model.add(Dense(InputHandler.input_size * 2, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Reshape((InputHandler.input_size * 2, 1,), input_shape=(InputHandler.input_size * 2,)))
#model.add(Dense(InputHandler.input_size, input_shape=(InputHandler.input_size,), activation='relu',W_regularizer=l2(0.01)))
model.add(Conv1D(filters=50, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=50, kernel_size=18, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=50, kernel_size=36, strides=9, padding='same', activation='relu'))
model.add(Conv1D(filters=25, kernel_size=9, strides=9, padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(InputHandler.input_size * 2, activation='relu', W_regularizer=l2(0.01)))
model.add(Dense(InputHandler.output_size, activation='linear'))
# optimizer = RMSprop(lr=0.00025, rho=0.95, epsilon=0.01)
optimizer = Nadam(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=None, schedule_decay=0.004)
# model.compile(loss=huber_loss, optimizer=optimizer)
model.compile(loss=mean_squared_error, optimizer=optimizer)
print('Create new model.')
return model
| 50.736842
| 135
| 0.701245
| 547
| 3,856
| 4.778793
| 0.204753
| 0.055088
| 0.112471
| 0.076511
| 0.780413
| 0.765876
| 0.765876
| 0.765876
| 0.765876
| 0.761668
| 0
| 0.044424
| 0.165197
| 3,856
| 75
| 136
| 51.413333
| 0.76763
| 0.111774
| 0
| 0
| 0
| 0
| 0.044843
| 0
| 0
| 0
| 0
| 0.013333
| 0
| 1
| 0.054054
| false
| 0
| 0.27027
| 0
| 0.378378
| 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
27f2f3342adf8bcadcf1fb6bd4a1b90222619533
| 933
|
py
|
Python
|
FRCScouting/TheBlueAlliance/team.py
|
xNovax/FRCScouting.ca
|
caf2774e5854a7386eceb21e57b68c1f9c1f7d2d
|
[
"MIT"
] | 1
|
2019-06-13T03:07:15.000Z
|
2019-06-13T03:07:15.000Z
|
FRCScouting/TheBlueAlliance/team.py
|
xNovax/FRCScouting.ca
|
caf2774e5854a7386eceb21e57b68c1f9c1f7d2d
|
[
"MIT"
] | 8
|
2019-07-04T16:19:06.000Z
|
2019-07-12T17:37:51.000Z
|
FRCScouting/TheBlueAlliance/team.py
|
xNovax/FRCScouting.ca
|
caf2774e5854a7386eceb21e57b68c1f9c1f7d2d
|
[
"MIT"
] | null | null | null |
from django.conf import settings
import tbaapiv3client
from tbaapiv3client.rest import ApiException
def get_team(teamkey):
configuration = tbaapiv3client.Configuration()
configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY
api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration))
try:
api_response = api_instance.get_team("frc" + str(teamkey))
info = api_response
return info
except ApiException as e:
return None
def get_team_events(teamkey):
configuration = tbaapiv3client.Configuration()
configuration.api_key['X-TBA-Auth-Key'] = settings.THE_BLUE_ALLIANCE_KEY
api_instance = tbaapiv3client.TeamApi(tbaapiv3client.ApiClient(configuration))
try:
api_response = api_instance.get_team_events("frc" + str(teamkey))
info = api_response
return info
except ApiException as e:
return None
| 34.555556
| 82
| 0.735263
| 108
| 933
| 6.148148
| 0.324074
| 0.042169
| 0.03012
| 0.141566
| 0.819277
| 0.819277
| 0.819277
| 0.819277
| 0.819277
| 0.819277
| 0
| 0.010526
| 0.185423
| 933
| 26
| 83
| 35.884615
| 0.863158
| 0
| 0
| 0.695652
| 0
| 0
| 0.036442
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.130435
| 0
| 0.391304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
27f4213489895f61e1bf296e012da46fec2f1be6
| 148
|
py
|
Python
|
prompt_toolkit/contrib/ssh/__init__.py
|
anthonyrota/school-yr10-russian-mafia-game
|
0c048dac650ed1a8de17e1301ba24092434a3b45
|
[
"BSD-3-Clause"
] | null | null | null |
prompt_toolkit/contrib/ssh/__init__.py
|
anthonyrota/school-yr10-russian-mafia-game
|
0c048dac650ed1a8de17e1301ba24092434a3b45
|
[
"BSD-3-Clause"
] | null | null | null |
prompt_toolkit/contrib/ssh/__init__.py
|
anthonyrota/school-yr10-russian-mafia-game
|
0c048dac650ed1a8de17e1301ba24092434a3b45
|
[
"BSD-3-Clause"
] | null | null | null |
# from .server import PromptToolkitSession, PromptToolkitSSHServer
# __all__ = [
# "PromptToolkitSession",
# "PromptToolkitSSHServer",
# ]
| 21.142857
| 66
| 0.722973
| 8
| 148
| 12.875
| 0.75
| 0.815534
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168919
| 148
| 6
| 67
| 24.666667
| 0.837398
| 0.918919
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fd7a6e055b87775a23efe8634620a54e3b09ce2a
| 170
|
py
|
Python
|
src/cython/test.py
|
chandnii7/ImageProcessing
|
233c6493ce714e5f2116c563c2c2ac77a6a7dcc4
|
[
"MIT"
] | 3
|
2021-06-12T07:56:26.000Z
|
2022-01-28T09:12:34.000Z
|
src/cython/test.py
|
chandnii7/ImageProcessing
|
233c6493ce714e5f2116c563c2c2ac77a6a7dcc4
|
[
"MIT"
] | null | null | null |
src/cython/test.py
|
chandnii7/ImageProcessing
|
233c6493ce714e5f2116c563c2c2ac77a6a7dcc4
|
[
"MIT"
] | null | null | null |
import smoothing_convolution
import numpy as np
print(smoothing_convolution.apply_convolution(np.array([[1,1,1],[1,1,1],[1,1,1]]), np.array([[1,1,1],[1,1,1],[1,1,1]])))
| 34
| 120
| 0.688235
| 34
| 170
| 3.352941
| 0.294118
| 0.280702
| 0.368421
| 0.421053
| 0.280702
| 0.280702
| 0.280702
| 0.280702
| 0.280702
| 0.280702
| 0
| 0.111801
| 0.052941
| 170
| 4
| 121
| 42.5
| 0.596273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
fdc413e3f0ba2f021b5dece09313275c51852276
| 223
|
py
|
Python
|
item_engine/textbase/__init__.py
|
GabrielAmare/ItemEngine
|
10277626c3724ad9ae7b934f53e11e305dc34da5
|
[
"MIT"
] | null | null | null |
item_engine/textbase/__init__.py
|
GabrielAmare/ItemEngine
|
10277626c3724ad9ae7b934f53e11e305dc34da5
|
[
"MIT"
] | null | null | null |
item_engine/textbase/__init__.py
|
GabrielAmare/ItemEngine
|
10277626c3724ad9ae7b934f53e11e305dc34da5
|
[
"MIT"
] | null | null | null |
from item_engine import *
from .constants import *
from .items import *
from .functions import *
from .base_materials import *
from .materials import *
from .operators import *
from .display import *
from .setup import *
| 18.583333
| 29
| 0.753363
| 29
| 223
| 5.724138
| 0.413793
| 0.481928
| 0.228916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170404
| 223
| 11
| 30
| 20.272727
| 0.897297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e351bab4f383a8f0e3ea1ea4e98ede0e6715c79c
| 606
|
py
|
Python
|
pokeman/coatings/messaging_endpoints/_abc_endpoint.py
|
wmarcuse/pokeman
|
5d654c227c456a065b2fea6a0d5827bff424c703
|
[
"BSD-3-Clause"
] | null | null | null |
pokeman/coatings/messaging_endpoints/_abc_endpoint.py
|
wmarcuse/pokeman
|
5d654c227c456a065b2fea6a0d5827bff424c703
|
[
"BSD-3-Clause"
] | null | null | null |
pokeman/coatings/messaging_endpoints/_abc_endpoint.py
|
wmarcuse/pokeman
|
5d654c227c456a065b2fea6a0d5827bff424c703
|
[
"BSD-3-Clause"
] | null | null | null |
from pokeman.utils.custom_abc import ABCMeta, abstract_attribute
# TODO: Add more arguments https://pika.readthedocs.io/en/stable/modules/channel.html#pika.channel.Channel.basic_consume
class AbstractBasicMessagingEndpoint(metaclass=ABCMeta):
"""
Abstract base class for Enterprise Integration Patterns,
in specific Messaging Endpoints.
"""
@abstract_attribute
def exchange(self):
pass
@abstract_attribute
def queue(self):
pass
@abstract_attribute
def callback_method(self):
pass
@abstract_attribute
def qos(self):
pass
| 23.307692
| 120
| 0.711221
| 67
| 606
| 6.313433
| 0.656716
| 0.200946
| 0.189125
| 0.177305
| 0.198582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211221
| 606
| 25
| 121
| 24.24
| 0.884937
| 0.343234
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0
| 1
| 0.285714
| false
| 0.285714
| 0.071429
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
e372260c15a5f61577d3e090d1980aa7c16d99fd
| 137
|
py
|
Python
|
ofstest/ofs/doctype/store/test_store.py
|
keithyang77/ofstest
|
aed4c8d22ea1f7857d6e6fcf774ec36d26a5ed17
|
[
"MIT"
] | null | null | null |
ofstest/ofs/doctype/store/test_store.py
|
keithyang77/ofstest
|
aed4c8d22ea1f7857d6e6fcf774ec36d26a5ed17
|
[
"MIT"
] | null | null | null |
ofstest/ofs/doctype/store/test_store.py
|
keithyang77/ofstest
|
aed4c8d22ea1f7857d6e6fcf774ec36d26a5ed17
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021, mds and Contributors
# See license.txt
# import frappe
import unittest
class TestStore(unittest.TestCase):
pass
| 15.222222
| 42
| 0.766423
| 18
| 137
| 5.833333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.153285
| 137
| 8
| 43
| 17.125
| 0.87069
| 0.510949
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
8b50f9b2c5740ceaa34274c2a7af3c69f55b247b
| 117
|
py
|
Python
|
vimeo/auth/__init__.py
|
greedo/vimeo.py
|
23112aecf6ecd6c4377dc363ba1c52b258465aab
|
[
"Apache-2.0"
] | null | null | null |
vimeo/auth/__init__.py
|
greedo/vimeo.py
|
23112aecf6ecd6c4377dc363ba1c52b258465aab
|
[
"Apache-2.0"
] | null | null | null |
vimeo/auth/__init__.py
|
greedo/vimeo.py
|
23112aecf6ecd6c4377dc363ba1c52b258465aab
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
class GrantFailed(Exception): pass
| 16.714286
| 38
| 0.777778
| 16
| 117
| 5.375
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.128205
| 117
| 6
| 39
| 19.5
| 0.833333
| 0.316239
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
479de275bbbc4941cff3c777847fb3e130c921f5
| 33
|
py
|
Python
|
testsuite/modulegraph-dir/renamed_package/__init__.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 9
|
2020-03-22T14:48:01.000Z
|
2021-05-30T12:18:12.000Z
|
testsuite/modulegraph-dir/renamed_package/__init__.py
|
xoviat/modulegraph2
|
766d00bdb40e5b2fe206b53a87b1bce3f9dc9c2a
|
[
"MIT"
] | 15
|
2020-01-06T10:02:32.000Z
|
2021-05-28T12:22:44.000Z
|
testsuite/modulegraph-dir/renamed_package/__init__.py
|
ronaldoussoren/modulegraph2
|
b6ab1766b0098651b51083235ff8a18a5639128b
|
[
"MIT"
] | 4
|
2020-05-10T18:51:41.000Z
|
2021-04-07T14:03:12.000Z
|
from sys import path as the_path
| 16.5
| 32
| 0.818182
| 7
| 33
| 3.714286
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 33
| 1
| 33
| 33
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
47a2f3051487b0266b37a192df0734bfae202670
| 44
|
py
|
Python
|
meteocat_api_client/xarxes/pronostic/__init__.py
|
herrera-lu/meteocat-api-client
|
c5ff627c954123786543a0712c82181f76e87d7c
|
[
"MIT"
] | null | null | null |
meteocat_api_client/xarxes/pronostic/__init__.py
|
herrera-lu/meteocat-api-client
|
c5ff627c954123786543a0712c82181f76e87d7c
|
[
"MIT"
] | null | null | null |
meteocat_api_client/xarxes/pronostic/__init__.py
|
herrera-lu/meteocat-api-client
|
c5ff627c954123786543a0712c82181f76e87d7c
|
[
"MIT"
] | 1
|
2022-03-16T13:28:32.000Z
|
2022-03-16T13:28:32.000Z
|
from ..pronostic.pronostic import Pronostic
| 22
| 43
| 0.840909
| 5
| 44
| 7.4
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 44
| 1
| 44
| 44
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
47dbd93121f1e06f2e6486b4325eecbd6af30f03
| 36
|
py
|
Python
|
src/entities/__init__.py
|
alliance-genome/agr_neo4j_qc
|
8020bfa1392aecdec8f2ae86b30f9e9aa1ad1dc3
|
[
"MIT"
] | 2
|
2019-05-30T02:06:24.000Z
|
2019-11-29T18:53:25.000Z
|
src/entities/__init__.py
|
alliance-genome/agr_neo4j_qc
|
8020bfa1392aecdec8f2ae86b30f9e9aa1ad1dc3
|
[
"MIT"
] | null | null | null |
src/entities/__init__.py
|
alliance-genome/agr_neo4j_qc
|
8020bfa1392aecdec8f2ae86b30f9e9aa1ad1dc3
|
[
"MIT"
] | 3
|
2020-12-19T09:06:26.000Z
|
2020-12-19T09:06:53.000Z
|
from .generic import GenericEntities
| 36
| 36
| 0.888889
| 4
| 36
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
47ec641debab7cd5dd697ae51a170cf562030bb5
| 31
|
py
|
Python
|
src/masonite/authentication/guards/__init__.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 1,816
|
2018-02-14T01:59:51.000Z
|
2022-03-31T17:09:20.000Z
|
src/masonite/authentication/guards/__init__.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 340
|
2018-02-11T00:27:26.000Z
|
2022-03-21T12:00:24.000Z
|
src/masonite/authentication/guards/__init__.py
|
cercos/masonite
|
f7f220efa7fae833683e9f07ce13c3795a87d3b8
|
[
"MIT"
] | 144
|
2018-03-18T00:08:16.000Z
|
2022-02-26T01:51:58.000Z
|
from .WebGuard import WebGuard
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9a05d6a75dcf087aa33219e4919df956c131bd97
| 7,161
|
py
|
Python
|
tensormonk/loss/adversarial_loss.py
|
Tensor46/TensorMONK
|
67617d3fdf8fde072ba9cab42de7d67c79b17494
|
[
"MIT"
] | 29
|
2018-07-06T23:57:23.000Z
|
2022-03-08T20:38:57.000Z
|
tensormonk/loss/adversarial_loss.py
|
sparupat/TensorMONK
|
7a2699a28299a89b186e0eb17ed6e9feaea5429e
|
[
"MIT"
] | 3
|
2018-12-14T22:21:26.000Z
|
2020-06-19T02:13:34.000Z
|
tensormonk/loss/adversarial_loss.py
|
sparupat/TensorMONK
|
7a2699a28299a89b186e0eb17ed6e9feaea5429e
|
[
"MIT"
] | 8
|
2018-07-06T23:58:03.000Z
|
2021-04-12T01:35:54.000Z
|
"""TensorMONK :: loss :: AdversarialLoss"""
__all__ = ["AdversarialLoss"]
import torch
import numpy as np
eps = np.finfo(float).eps
def g_minimax(d_of_fake: torch.Tensor, invert_labels: bool = False):
r"""Minimax loss for generator. (`"Generative Adversarial Nets"
<https://papers.nips.cc/paper/5423-generative-adversarial-nets.pdf>`_).
Assumes, real label is 1 and fake label is 0 (use invert_labels to flip
real and fake labels). d_of_fake = D(G(z)).
loss = - log(σ( d_of_fake ))
Args:
d_of_fake (torch.Tensor, required): discriminator output of fake.
invert_labels (bool, optional): Inverts real and fake labels to 0 and 1
respectively (default: :obj:`"False"`).
"""
assert isinstance(d_of_fake, torch.Tensor)
assert isinstance(invert_labels, bool)
if invert_labels:
return - (1 - d_of_fake.sigmoid()).clamp(eps).log().mean()
return - d_of_fake.sigmoid().clamp(eps).log().mean()
def d_minimax(d_of_real: torch.Tensor, d_of_fake: torch.Tensor,
invert_labels: bool = False):
r"""Minimax loss for discriminator. (`"Generative Adversarial Nets"
<https://papers.nips.cc/paper/5423-generative-adversarial-nets.pdf>`_).
Assumes, real label is 1 and fake label is 0 (use invert_labels to flip
real and fake labels). d_of_fake = D(G(z)) and d_of_real = D(I).
loss = - log(σ( d_of_real )) - log(1 - σ( d_of_fake ))
Args:
d_of_real (torch.Tensor, required): discriminator output of real.
d_of_fake (torch.Tensor, required): discriminator output of fake.
invert_labels (bool, optional): Inverts real and fake labels to 0 and 1
respectively (default: :obj:`"False"`).
"""
assert isinstance(d_of_real, torch.Tensor)
assert isinstance(d_of_fake, torch.Tensor)
assert isinstance(invert_labels, bool)
if invert_labels:
rloss = - (1 - d_of_real.sigmoid()).clamp(eps).log()
floss = - d_of_fake.sigmoid().clamp(eps).log()
else:
rloss = - d_of_real.sigmoid().clamp(eps).log()
floss = - (1 - d_of_fake.sigmoid()).clamp(eps).log()
return (rloss + floss).mean() / 2
def g_least_squares(d_of_fake: torch.Tensor, invert_labels: bool = False):
r"""Least squares loss for generator. (`"Least Squares Generative
Adversarial Networks"<https://arxiv.org/abs/1611.04076>`_).
Assumes, real label is 1 and fake label is 0 (use invert_labels to flip
real and fake labels). d_of_fake = D(G(z)).
loss = (1 - σ( d_of_fake ))^2
Args:
d_of_fake (torch.Tensor, required): discriminator output of fake.
invert_labels (bool, optional): Inverts real and fake labels to 0 and 1
respectively (default: :obj:`"False"`).
"""
assert isinstance(d_of_fake, torch.Tensor)
assert isinstance(invert_labels, bool)
if invert_labels:
return d_of_fake.sigmoid().pow(2).mean()
return (1 - d_of_fake.sigmoid()).pow(2).mean()
def d_least_squares(d_of_real: torch.Tensor, d_of_fake: torch.Tensor,
invert_labels: bool = False):
r"""Least squares loss for generator. (`"Least Squares Generative
Adversarial Networks"<https://arxiv.org/abs/1611.04076>`_).
Assumes, real label is 1 and fake label is 0 (use invert_labels to flip
real and fake labels). d_of_fake = D(G(z)) and d_of_real = D(I).
loss = ((1 - σ( d_of_real ))^2 + σ( d_of_fake )^2) / 2
Args:
d_of_real (torch.Tensor, required): discriminator output of real.
d_of_fake (torch.Tensor, required): discriminator output of fake.
invert_labels (bool, optional): Inverts real and fake labels to 0 and 1
respectively (default: :obj:`"False"`).
"""
assert isinstance(d_of_real, torch.Tensor)
assert isinstance(d_of_fake, torch.Tensor)
assert isinstance(invert_labels, bool)
if invert_labels:
rloss = d_of_real.sigmoid().pow(2)
floss = (1 - d_of_fake.sigmoid()).pow(2)
else:
rloss = (1 - d_of_real.sigmoid()).pow(2)
floss = d_of_fake.sigmoid().pow(2)
return (rloss + floss).mean() / 2
def g_relativistic(d_of_real: torch.Tensor, d_of_fake: torch.Tensor,
invert_labels: bool = False):
r"""Relativistic loss for generator. (`"The relativistic discriminator: a
key element missing from standard GAN"<https://arxiv.org/abs/1807.00734>`_
). Assumes, real label is 1 and fake label is 0 (use invert_labels to flip
real and fake labels). d_of_fake = D(G(z)) and d_of_real = D(I).
loss = - log(1 - σ(d_of_fake - E[d_of_real]))
Args:
d_of_real (torch.Tensor, required): discriminator output of real.
d_of_fake (torch.Tensor, required): discriminator output of fake.
invert_labels (bool, optional): Inverts real and fake labels to 0 and 1
respectively (default: :obj:`"False"`).
"""
assert isinstance(d_of_real, torch.Tensor)
assert isinstance(d_of_fake, torch.Tensor)
assert isinstance(invert_labels, bool)
if invert_labels:
return - (d_of_fake - d_of_real.mean()).sigmoid().clamp(eps).log()
return - (1 - (d_of_fake - d_of_real.mean()).sigmoid()).clamp(eps).log()
def d_relativistic(d_of_real: torch.Tensor, d_of_fake: torch.Tensor,
invert_labels: bool = False):
r"""Relativistic loss for generator. (`"The relativistic discriminator: a
key element missing from standard GAN"<https://arxiv.org/abs/1807.00734>`_
). Assumes, real label is 1 and fake label is 0 (use invert_labels to flip
real and fake labels). d_of_fake = D(G(z)) and d_of_real = D(I).
loss = - log(1 - σ(d_of_real - E[d_of_fake])) -
log(σ(d_of_fake - E[d_of_real]))
Args:
d_of_real (torch.Tensor, required): discriminator output of real.
d_of_fake (torch.Tensor, required): discriminator output of fake.
invert_labels (bool, optional): Inverts real and fake labels to 0 and 1
respectively (default: :obj:`"False"`).
"""
assert isinstance(d_of_real, torch.Tensor)
assert isinstance(d_of_fake, torch.Tensor)
assert isinstance(invert_labels, bool)
dra_rf = (d_of_real - d_of_fake.mean()).sigmoid().clamp(eps)
dra_fr = (d_of_fake - d_of_real.mean()).sigmoid().clamp(eps)
if invert_labels:
return - (dra_rf.log() + (1 - dra_fr).log()).mean()
return - ((1 - dra_rf).log() + dra_fr.log()).mean()
class AdversarialLoss:
r"""Adversarial losses.
Assumes 1 is real and 0 is fake.
Fake --> D(G(z)) = d_of_fake = d_of_g_of_z
Real --> D(I) = d_of_real
"""
# Paper: Generative Adversarial Nets
# URL: https://arxiv.org/abs/1406.2661
g_minimax = g_minimax
d_minimax = d_minimax
# Paper: Least Squares Generative Adversarial Networks
# URL: https://arxiv.org/abs/1611.04076
g_least_squares = g_least_squares
d_least_squares = d_least_squares
# Paper: The relativistic discriminatorr: a key element missing from
# standard GAN
# URL: https://arxiv.org/pdf/1807.00734.pdf
g_relativistic = g_relativistic
d_relativistic = d_relativistic
| 41.155172
| 79
| 0.661081
| 1,072
| 7,161
| 4.208955
| 0.093284
| 0.049867
| 0.068262
| 0.047872
| 0.871676
| 0.830895
| 0.817598
| 0.767287
| 0.735151
| 0.735151
| 0
| 0.021693
| 0.214635
| 7,161
| 173
| 80
| 41.393064
| 0.780583
| 0.537215
| 0
| 0.416667
| 0
| 0
| 0.004987
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.083333
| false
| 0
| 0.027778
| 0
| 0.347222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d04905293087af5a3d2fa452c0fd8622feb13aaf
| 2,294
|
py
|
Python
|
puzzle-utils/words/tests/crossword_clue_test.py
|
s-zhang/puzzlehunt-tools
|
f994bae1aa9421758a55db0921f87c709e960eed
|
[
"MIT"
] | 2
|
2018-05-07T04:54:58.000Z
|
2019-07-06T21:55:24.000Z
|
puzzle-utils/words/tests/crossword_clue_test.py
|
s-zhang/puzzlehunt-tools
|
f994bae1aa9421758a55db0921f87c709e960eed
|
[
"MIT"
] | 11
|
2018-05-12T18:27:06.000Z
|
2018-11-16T17:47:56.000Z
|
puzzle-utils/words/tests/crossword_clue_test.py
|
s-zhang/puzzlehunt-tools
|
f994bae1aa9421758a55db0921f87c709e960eed
|
[
"MIT"
] | 1
|
2018-05-08T05:25:53.000Z
|
2018-05-08T05:25:53.000Z
|
import pytest
from ..crossword_clue import crossword_clue_wordplays_extract, crossword_clue_dictionary_extract
def test_crossword_clue_wordplays_extract():
html = '<!DOCTYPE html><html lang="en" prefix="og: http://ogp.me/ns#" ><title>largest sea creature Crossword Clue, Crossword Solver | Wordplays.com</title><div id="content" ><div id=app><div id=app-pad><h1>Crossword Solver</h1><div id=adwrap><table id=adwrap-table><tr><td colspan=3><table id="wordlists" class=\'wp-widget\' cellspacing=0><thead class=\'wp-widget-header\'><tr><th colspan=4><strong>Crossword Answers:largest sea creature</strong></th></tr></thead><tbody class=\'wp-widget-content\'><tr class=subtitle><td>RANK</td><td>ANSWER</td><td> CLUE</td><td> ?</td></tr><tr class="odd"><td><div class=stars><div></div><div></div><div></div><div></div></div><div class=clear></div></td><td><a href="/crossword-clues/WHALE">WHALE</a></td><td class=clue>Largest sea mammal</td><td class=deflink-td></td></tr><tr class="even"><td><div class=stars><div></div><div></div><div></div></div><div class=clear></div></td><td><a href="/crossword-clues/WHELK">WHELK</a></td><td class=clue>Spiral-shelled sea creature</td><td class=deflink-td></td></tr></tbody></table></td></tr></table></div></div></div></div></html>'
assert crossword_clue_wordplays_extract(html) == ['WHALE', 'WHELK']
def test_crossword_clue_dictionary_extract():
html = '<!DOCTYPE html><html xmlns="http://www.w3.org/1999/xhtml" prefix="og: http://opengraphprotocol.org/schema/ fb: http://www.facebook.com/2010/fbml d: http://dictionary.com/2011/dml"><body class="game-page"><div class="main-container"><div class="row"><div class="left-column"><div><div class="specific-content crossword-solver-results"><div class="inner-border"><h2 class="title">Try these answers for \'largest sea creature\'</h2><div class="result-row result-head"><div class="confidence">Confidence</div><div class="matching-answer">Matching Answer</div></div><div class="result-row"><div class="confidence">60%</div><div class="matching-answer">WHELK</div></div><div class="result-row"><div class="confidence">60%</div><div class="matching-answer">WHALE</div></div></div></div></div></div></div></div></body></html>'
assert crossword_clue_dictionary_extract(html) == ['WHELK', 'WHALE']
| 229.4
| 1,122
| 0.714037
| 361
| 2,294
| 4.479224
| 0.290859
| 0.12616
| 0.133581
| 0.133581
| 0.356834
| 0.222635
| 0.222635
| 0.195424
| 0.180581
| 0.180581
| 0
| 0.011004
| 0.049259
| 2,294
| 10
| 1,123
| 229.4
| 0.730399
| 0
| 0
| 0
| 0
| 0.625
| 0.817429
| 0.505882
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d051c0370222a89da6f0076b411781fd39826343
| 978
|
py
|
Python
|
dlcli/api/user.py
|
outlyerapp/dlcli
|
57458b7346a9b9c92cf99628f6c051589a9edb27
|
[
"Apache-2.0"
] | 1
|
2017-12-06T11:07:48.000Z
|
2017-12-06T11:07:48.000Z
|
dlcli/api/user.py
|
outlyerapp/dlcli
|
57458b7346a9b9c92cf99628f6c051589a9edb27
|
[
"Apache-2.0"
] | 1
|
2020-02-22T20:29:15.000Z
|
2020-02-22T20:29:15.000Z
|
dlcli/api/user.py
|
outlyerapp/dlcli
|
57458b7346a9b9c92cf99628f6c051589a9edb27
|
[
"Apache-2.0"
] | null | null | null |
import logging
from wrapper import *
logger = logging.getLogger(__name__)
# noinspection PyUnusedLocal
def get_user(url='', key='', timeout=60, **kwargs):
return get(url + '/user', headers={'Authorization': "Bearer " + key}, timeout=timeout).json()
# noinspection PyUnusedLocal
def get_user_tokens(url='', key='', timeout=60, **kwargs):
return get(url + '/user/tokens',
headers={'Authorization': "Bearer " + key}, timeout=timeout).json()
# noinspection PyUnusedLocal
def create_user_token(url='', key='', token_name='', timeout=60, **kwargs):
return post(url + '/user/tokens',
headers={'Authorization': "Bearer " + key},
data={'name': token_name}, timeout=timeout).json()
# noinspection PyUnusedLocal
def delete_user_token(url='', key='', token_name='', timeout=60, **kwargs):
return delete(url + '/user/tokens/' + token_name,
headers={'Authorization': "Bearer " + key}, timeout=timeout)
| 33.724138
| 97
| 0.645194
| 108
| 978
| 5.703704
| 0.259259
| 0.081169
| 0.181818
| 0.136364
| 0.800325
| 0.732143
| 0.587662
| 0.50974
| 0.50974
| 0.38961
| 0
| 0.009988
| 0.180982
| 978
| 28
| 98
| 34.928571
| 0.759051
| 0.109407
| 0
| 0
| 0
| 0
| 0.145497
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0.133333
| 0.266667
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d06a538ff92bcaee8543e7411b2e6ce6b1565cdd
| 25
|
py
|
Python
|
src/tf_frodo/__init__.py
|
xarion/tf_frodo
|
fd80640bdfd3425b91f37a46d388fd9d322536e6
|
[
"MIT"
] | 1
|
2022-03-30T14:32:48.000Z
|
2022-03-30T14:32:48.000Z
|
src/tf_frodo/__init__.py
|
xarion/tf_frodo
|
fd80640bdfd3425b91f37a46d388fd9d322536e6
|
[
"MIT"
] | null | null | null |
src/tf_frodo/__init__.py
|
xarion/tf_frodo
|
fd80640bdfd3425b91f37a46d388fd9d322536e6
|
[
"MIT"
] | null | null | null |
from .frodo import FRODO
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d0b0604c2fb54776ef56e09052249faee63feba4
| 8,727
|
py
|
Python
|
tests/Monkeypatching/test_Api_monkeypatching_api_delete.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
tests/Monkeypatching/test_Api_monkeypatching_api_delete.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
tests/Monkeypatching/test_Api_monkeypatching_api_delete.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
import unittest
import requests
from assertpy import assert_that
from requests.exceptions import Timeout
from unittest.mock import Mock, patch
from src.Api import Api
from src.todos import todos
class TestApiMonkeyPatch(unittest.TestCase):
@patch('src.Api.Api', autospec=True)
def test_method_api_delete_raises_timeout(self, mock_class):
mock_id = Mock()
mock_id.return_value = 1
mock_class.api_delete.side_effect = Timeout
with self.assertRaises(Timeout):
mock_class.api_delete(mock_id)
def test_method_api_delete_assert_that_called_once(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_delete(mock_id)
mock_api.api_delete.assert_called_once()
def test_method_api_delete_assert_that_called(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_delete(mock_id)
mock_api.api_delete(mock_id2)
mock_api.api_delete.assert_called()
def test_method_api_delete_assert_that_not_called(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_delete.assert_not_called()
def test_method_api_delete_assert_that_called_with_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_delete(mock_id)
mock_api.api_delete.assert_called_with(mock_id)
def test_method_api_delete_assert_that_called_once_with_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_delete(mock_id)
mock_api.api_delete.assert_called_once_with(mock_id)
def test_method_api_delete_assert_that_response_has_status_code_200(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_delete.return_value = {"delete_id": todo_id,
"deleted_data": todos[todo_id - 1],
"status_code": 200}
response = mock_api.api_delete(todo_id)
assert_that(response).has_status_code(200)
def test_method_api_delete_assert_that_response_status_code_is_not_200(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_delete.return_value = {"status_code": 408}
response = mock_api.api_delete(todo_id)
assert_that(response["status_code"]).is_not_equal_to(200)
def test_method_api_delete_assert_that_response_is_instance_of_dict(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_delete.return_value = {"delete_id": todo_id,
"deleted_data": todos[todo_id - 1],
"status_code": 200}
response = mock_api.api_delete(todo_id)
assert_that(response).is_instance_of(dict)
def test_method_api_delete_assert_that_response_has_key_delete_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_delete.return_value = {"delete_id": todo_id,
"deleted_data": todos[todo_id - 1],
"status_code": 200}
response = mock_api.api_delete(todo_id)
assert_that(response).has_delete_id(1)
def test_method_api_delete_assert_that_response_returns_deleted_data(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_delete.return_value = {"delete_id": todo_id,
"deleted_data": todos[todo_id - 1],
"status_code": 200}
response = mock_api.api_delete(todo_id)
assert_that(response["deleted_data"]).is_equal_to(todos[0])
def test_method_api_delete_assert_that_response_deleted_data_contain_all_keys_userId_id_title_completed(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_delete.return_value = {"delete_id": todo_id,
"deleted_data": todos[todo_id - 1],
"status_code": 200}
response = mock_api.api_delete(todo_id)
assert_that(response["deleted_data"]).contains_key("userId", "id", "title", "completed")
def test_method_api_delete_assert_that_not_called_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_delete(mock_id)
with self.assertRaises(AssertionError):
mock_api.api_delete.assert_not_called()
def test_method_api_delete_assert_that_called_once_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_delete(mock_id)
mock_api.api_delete(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_delete.assert_called_once()
def test_method_api_delete_assert_that_called_with_id_1_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_delete(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_delete.assert_called_with(mock_id)
def test_method_api_delete_assert_that_called_once_with_id_1_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_delete(mock_id)
mock_api.api_delete(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_delete.assert_called_once_with(mock_id)
def test_method_api_delete_no_parameter_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
with self.assertRaises(TypeError):
mock_api.api_delete()
def test_method_api_delete_assert_that_response_returns_ValueError_when_called_with_id_0_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 0
mock_api.api_delete.return_value = {"status_code": 408}
mock_api.api_delete.side_effect = ValueError
assert_that(mock_api.api_delete).raises(ValueError).when_called_with(todo_id)
def test_method_api_delete_assert_that_response_returns_ValueError_when_called_with_id_300_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 300
mock_api.api_delete.return_value = {"status_code": 408}
mock_api.api_delete.side_effect = ValueError
assert_that(mock_api.api_delete).raises(ValueError).when_called_with(todo_id)
def test_method_api_delete_assert_that_response_returns_TypeError_when_called_with_id_not_int_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = "1"
mock_api.api_delete.return_value = {"status_code": 408}
mock_api.api_delete.side_effect = TypeError
assert_that(mock_api.api_delete).raises(TypeError).when_called_with(todo_id)
def test_method_api_delete_assert_that_response_returns_AttributeError_when_called_with_None_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = None
mock_api.api_delete.return_value = {"status_code": 408}
mock_api.api_delete.side_effect = AttributeError
assert_that(mock_api.api_delete).raises(AttributeError).when_called_with(todo_id)
if __name__ == '__main__':
unittest.main()
| 44.52551
| 114
| 0.637332
| 1,154
| 8,727
| 4.379549
| 0.067591
| 0.121092
| 0.089038
| 0.142461
| 0.876533
| 0.866047
| 0.856549
| 0.827661
| 0.81856
| 0.752671
| 0
| 0.015132
| 0.280623
| 8,727
| 196
| 115
| 44.52551
| 0.789901
| 0
| 0
| 0.672956
| 0
| 0
| 0.058662
| 0
| 0
| 0
| 0
| 0
| 0.283019
| 1
| 0.132075
| false
| 0
| 0.044025
| 0
| 0.18239
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d0b85837f2f9254586f0016b5548b45273a219d6
| 30
|
py
|
Python
|
upcycle/cuda/__init__.py
|
samuelstanton/upcycle
|
f7066b22655d53bac9e78f9c631a33ada552071a
|
[
"MIT"
] | null | null | null |
upcycle/cuda/__init__.py
|
samuelstanton/upcycle
|
f7066b22655d53bac9e78f9c631a33ada552071a
|
[
"MIT"
] | null | null | null |
upcycle/cuda/__init__.py
|
samuelstanton/upcycle
|
f7066b22655d53bac9e78f9c631a33ada552071a
|
[
"MIT"
] | null | null | null |
from .try_cuda import try_cuda
| 30
| 30
| 0.866667
| 6
| 30
| 4
| 0.666667
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d0fc9d8644d4ac724b220fce364b8f8e26c90f7f
| 334
|
py
|
Python
|
model/project.py
|
checheninao/python_training_mantis
|
4077813bd6fcb668cd703dc044e19b51a4ceff91
|
[
"Apache-2.0"
] | null | null | null |
model/project.py
|
checheninao/python_training_mantis
|
4077813bd6fcb668cd703dc044e19b51a4ceff91
|
[
"Apache-2.0"
] | null | null | null |
model/project.py
|
checheninao/python_training_mantis
|
4077813bd6fcb668cd703dc044e19b51a4ceff91
|
[
"Apache-2.0"
] | null | null | null |
class Project:
def __init__(self, name, status=None, description=None):
self.name = name
self.description = description
self.status = status
def __repr__(self):
return self.name
def __eq__(self, other):
return self.name == other.name
def key(self):
return self.name
| 20.875
| 60
| 0.610778
| 40
| 334
| 4.8
| 0.35
| 0.208333
| 0.21875
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296407
| 334
| 15
| 61
| 22.266667
| 0.817021
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0
| 0.272727
| 0.727273
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ef92e7d3a65f826a8add5882190fe2bfc3933709
| 109,749
|
py
|
Python
|
tests/integration/CbfSubarray_test.py
|
jamesjiang52/mid-cbf-mcs
|
072f8fdb91d77010e875f441e536fab842bf8319
|
[
"BSD-3-Clause"
] | null | null | null |
tests/integration/CbfSubarray_test.py
|
jamesjiang52/mid-cbf-mcs
|
072f8fdb91d77010e875f441e536fab842bf8319
|
[
"BSD-3-Clause"
] | 4
|
2021-05-20T05:19:23.000Z
|
2021-05-20T05:19:26.000Z
|
tests/integration/CbfSubarray_test.py
|
ska-telescope/mid-cbf-mcs
|
072f8fdb91d77010e875f441e536fab842bf8319
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the csp-lmc-prototype project
#
#
#
# Distributed under the terms of the BSD-3-Clause license.
# See LICENSE.txt for more info.
"""Contain the tests for the CbfSubarray."""
# Standard imports
import sys
import os
import time
from datetime import datetime
import json
import logging
# Path
file_path = os.path.dirname(os.path.abspath(__file__))
# Tango imports
import tango
from tango import DevState
import pytest
# SKA specific imports
from ska_mid_cbf_mcs.commons.global_enum import freq_band_dict
from ska_tango_base.control_model import LoggingLevel, HealthState
from ska_tango_base.control_model import AdminMode, ObsState
from ska_tango_base.base_device import _DEBUGGER_PORT
@pytest.mark.usefixtures("proxies", "input_test_data")
class TestCbfSubarray:
def test_AddRemoveReceptors_valid(self, proxies):
"""
Test valid AddReceptors and RemoveReceptors commands
"""
timeout_millis = proxies.subarray[1].get_timeout_millis()
log_msg = "timeout_millis = {} ".format(timeout_millis)
#logging.info(log_msg)
#logging.info("start_time = {}".format(time.time()))
logging.info("start datetime = {}".format(datetime.now()))
if proxies.debug_device_is_on:
port = proxies.subarray[1].DebugDevice()
try:
proxies.clean_proxies()
if proxies.controller.State() == DevState.OFF:
proxies.controller.Init()
proxies.wait_timeout_dev([proxies.controller], DevState.STANDBY, 3, 1)
proxies.controller.On()
proxies.wait_timeout_dev([proxies.controller], DevState.ON, 3, 1)
proxies.clean_proxies()
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].State() == DevState.ON
assert proxies.subarray[1].obsState == ObsState.EMPTY
# receptor list should be empty right after initialization
assert len(proxies.subarray[1].receptors) == 0
assert all([proxies.vcc[i + 1].subarrayMembership == 0 for i in range(4)])
input_receptors = [1, 3, 4]
# add some receptors
proxies.subarray[1].AddReceptors(input_receptors)
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert [proxies.subarray[1].receptors[i] for i in range(3)] == input_receptors
assert all([proxies.vcc[proxies.receptor_to_vcc[i]].subarrayMembership == 1 for i in input_receptors])
assert proxies.subarray[1].obsState == ObsState.IDLE
# add more receptors...
proxies.subarray[1].AddReceptors([2])
time.sleep(1)
assert [proxies.subarray[1].receptors[i] for i in range(4)] == [1, 3, 4, 2]
assert proxies.vcc[proxies.receptor_to_vcc[2]].subarrayMembership == 1
# remove some receptors
proxies.subarray[1].RemoveReceptors([2, 1, 4])
time.sleep(1)
assert proxies.subarray[1].receptors == ([3])
assert all([proxies.vcc[proxies.receptor_to_vcc[i]].subarrayMembership == 0 for i in [1, 2, 4]])
assert proxies.vcc[proxies.receptor_to_vcc[3]].subarrayMembership == 1
# remove remaining receptors
proxies.subarray[1].RemoveReceptors([3])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.EMPTY, 1, 1)
assert len(proxies.subarray[1].receptors) == 0
assert proxies.vcc[proxies.receptor_to_vcc[3]].subarrayMembership == 0
assert proxies.subarray[1].obsState == ObsState.EMPTY
proxies.subarray[1].Off()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.OFF, 3, 1)
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_AddRemoveReceptors_invalid_single(self, proxies):
"""
Test invalid AddReceptors commands involving a single subarray:
- when a receptor ID is invalid (e.g. out of range)
- when a receptor to be removed is not assigned to the subarray
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].State() == DevState.ON
assert proxies.subarray[1].obsState == ObsState.EMPTY
# receptor list should be empty right after initialization
assert len(proxies.subarray[1].receptors) == 0
assert all([proxies.vcc[i + 1].subarrayMembership == 0 for i in range(4)])
# add some receptors to subarray 1
proxies.subarray[1].AddReceptors([1, 3])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert proxies.subarray[1].receptors[0] == 1
assert proxies.subarray[1].receptors[1] == 3
assert all([proxies.vcc[proxies.receptor_to_vcc[i]].subarrayMembership == 1 for i in [1, 3]])
assert proxies.subarray[1].obsState == ObsState.IDLE
# TODO: fix this
# try adding an invalid receptor ID
# with pytest.raises(tango.DevFailed) as df:
# proxies.subarray[1].AddReceptors([5])
# time.sleep(1)
# assert "Invalid receptor ID" in str(df.value.args[0].desc)
# try removing a receptor not assigned to subarray 1
# doing this doesn't actually throw an error
proxies.subarray[1].RemoveReceptors([2])
assert proxies.subarray[1].receptors[0] == 1
assert proxies.subarray[1].receptors[1] == 3
proxies.subarray[1].RemoveAllReceptors()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.EMPTY, 1, 1)
proxies.subarray[1].Off()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.OFF, 3, 1)
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
@pytest.mark.skip(reason="Since there's only a single subarray, this test is currently broken.")
def test_AddRemoveReceptors_invalid_multiple(self, proxies):
"""
Test invalid AddReceptors commands involving multiple subarrays:
- when a receptor to be added is already in use by a different subarray
"""
# for proxy in vcc_proxies:
# proxy.Init()
# proxies.subarray[1].set_timeout_millis(60000)
# subarray_2_proxy.set_timeout_millis(60000)
# proxies.subarray[1].Init()
# subarray_2_proxy.Init()
# time.sleep(3)
# cbf_controller_proxy.set_timeout_millis(60000)
# cbf_controller_proxy.Init()
# time.sleep(60) # takes pretty long for CBF controller to initialize
# receptor_to_vcc = dict([*map(int, pair.split(":"))] for pair in
# cbf_controller_proxy.receptorToVcc)
# cbf_controller_proxy.On()
# time.sleep(3)
# # receptor list should be empty right after initialization
# assert proxies.subarray[1].receptors == ()
# assert subarray_2_proxy.receptors == ()
# assert all([proxy.subarrayMembership == 0 for proxy in vcc_proxies])
# assert proxies.subarray[1].State() == DevState.OFF
# assert subarray_2_proxy.State() == DevState.OFF
# # add some receptors to subarray 1
# proxies.subarray[1].AddReceptors([1, 3])
# time.sleep(1)
# assert proxies.subarray[1].receptors == (1, 3)
# assert all([vcc_proxies[receptor_to_vcc[i] - 1].subarrayMembership == 1 for i in [1, 3]])
# assert proxies.subarray[1].State() == DevState.ON
# # try adding some receptors (including an invalid one) to subarray 2
# with pytest.raises(tango.DevFailed) as df:
# subarray_2_proxy.AddReceptors([1, 2, 4])
# time.sleep(1)
# assert "already in use" in str(df.value.args[0].desc)
# assert subarray_2_proxy.receptors == (2, 4)
# assert all([vcc_proxies[receptor_to_vcc[i] - 1].subarrayMembership == 1 for i in [1, 3]])
# assert all([vcc_proxies[receptor_to_vcc[i] - 1].subarrayMembership == 2 for i in [2, 4]])
# assert subarray_2_proxy.State() == DevState.ON
def test_RemoveAllReceptors(self, proxies):
"""
Test RemoveAllReceptors command
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].State() == DevState.ON
assert proxies.subarray[1].obsState == ObsState.EMPTY
# receptor list should be empty right after initialization
assert len(proxies.subarray[1].receptors) == 0
assert all([proxies.vcc[i + 1].subarrayMembership == 0 for i in range(4)])
# add some receptors
proxies.subarray[1].AddReceptors([1, 3, 4])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(3), [1, 3, 4])])
assert all([proxies.vcc[proxies.receptor_to_vcc[i]].subarrayMembership == 1 for i in [1, 3, 4]])
assert proxies.subarray[1].obsState == ObsState.IDLE
# remove all receptors
proxies.subarray[1].RemoveAllReceptors()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.EMPTY, 1, 1)
assert len(proxies.subarray[1].receptors) == 0
assert all([proxies.vcc[proxies.receptor_to_vcc[i]].subarrayMembership == 0 for i in [1, 3, 4]])
assert proxies.subarray[1].obsState == ObsState.EMPTY
proxies.subarray[1].Off()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.OFF, 3, 1)
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
#TODO: fix; currently tests break if multiple scan configurations are tested
def test_ConfigureScan_basic(self, proxies):
"""
Test a successful scan configuration
"""
proxies.subarray[1].loggingLevel = LoggingLevel.DEBUG
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
# check initial value of attributes of CBF subarray
vcc_index = proxies.receptor_to_vcc[4]
logging.info("vcc_index = {}".format( vcc_index ))
assert len(proxies.subarray[1].receptors) == 0
assert proxies.subarray[1].configID == ''
# TODO in CbfSubarray, at end of scan, clear all private data
#assert proxies.subarray[1].frequencyBand == 0
assert proxies.subarray[1].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(3), [1, 3, 4])])
# configure scan
config_file_name = "/../data/ConfigureScan_basic.json"
f = open(file_path + config_file_name)
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 15, 1)
# check configured attributes of CBF subarray
assert proxies.subarray[1].configID == "band:5a, fsp1, 744 channels average factor 8"
assert proxies.subarray[1].frequencyBand == 4 # means 5a
assert proxies.subarray[1].obsState == ObsState.READY
proxies.wait_timeout_obs([proxies.vcc[i + 1] for i in range(4)], ObsState.READY, 1, 1)
# check frequency band of VCCs, including states of
# frequency band capabilities
logging.info( ("proxies.vcc[vcc_index].frequencyBand = {}".
format( proxies.vcc[vcc_index].frequencyBand)) )
vcc_band_proxies = proxies.vccBand[vcc_index - 1]
assert proxies.vcc[proxies.receptor_to_vcc[4]].frequencyBand == 4
assert proxies.vcc[proxies.receptor_to_vcc[1]].frequencyBand == 4
for proxy in proxies.vccBand[proxies.receptor_to_vcc[4] - 1]:
logging.info("VCC proxy.State() = {}".format(proxy.State()))
assert [proxy.State() for proxy in proxies.vccBand[proxies.receptor_to_vcc[4] - 1]] == [
DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
assert [proxy.State() for proxy in proxies.vccBand[proxies.receptor_to_vcc[1] - 1]] == [
DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
# check the rest of the configured attributes of VCCs
# first for VCC belonging to receptor 10...
assert proxies.vcc[proxies.receptor_to_vcc[4]].subarrayMembership == 1
assert proxies.vcc[proxies.receptor_to_vcc[4]].band5Tuning[0] == 5.85
assert proxies.vcc[proxies.receptor_to_vcc[4]].band5Tuning[1] == 7.25
assert proxies.vcc[proxies.receptor_to_vcc[4]].frequencyBandOffsetStream1 == 0
assert proxies.vcc[proxies.receptor_to_vcc[4]].frequencyBandOffsetStream2 == 0
assert proxies.vcc[proxies.receptor_to_vcc[4]].rfiFlaggingMask == "{}"
# then for VCC belonging to receptor 1...
assert proxies.vcc[proxies.receptor_to_vcc[1]].subarrayMembership == 1
assert proxies.vcc[proxies.receptor_to_vcc[1]].band5Tuning[0] == 5.85
assert proxies.vcc[proxies.receptor_to_vcc[1]].band5Tuning[1] == 7.25
# check configured attributes of search windows
# first for search window 1...
# TODO - SearchWidow device test is disabled since the same
# functionality is implemented by the VccSearchWindow device;
# to be decide which one to keep.
# print("proxies.sw[1].State() = {}".format(proxies.sw[1].State()))
# print("proxies.sw[2].State() = {}".format(proxies.sw[2].State()))
# assert proxies.sw[1].State() == DevState.ON
# assert proxies.sw[1].searchWindowTuning == 6000000000
# assert proxies.sw[1].tdcEnable == True
# assert proxies.sw[1].tdcNumBits == 8
# assert proxies.sw[1].tdcPeriodBeforeEpoch == 5
# assert proxies.sw[1].tdcPeriodAfterEpoch == 25
# assert "".join(proxies.sw[1].tdcDestinationAddress.split()) in [
# "[{\"receptorID\":4,\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"]},{\"receptorID\":1,\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"]}]",
# "[{\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"],\"receptorID\":4},{\"receptorID\":1,\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"]}]",
# "[{\"receptorID\":4,\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"]},{\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"],\"receptorID\":1}]",
# "[{\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"],\"receptorID\":4},{\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"],\"receptorID\":1}]",
# ]
# # then for search window 2...
# assert proxies.sw[2].State() == DevState.DISABLE
# assert proxies.sw[2].searchWindowTuning == 7000000000
# assert proxies.sw[2].tdcEnable == False
time.sleep(1)
# check configured attributes of VCC search windows
# first for search window 1 of VCC belonging to receptor 10...
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].State() == DevState.ON
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].searchWindowTuning == 6000000000
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].tdcEnable == True
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].tdcNumBits == 8
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].tdcPeriodBeforeEpoch == 5
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].tdcPeriodAfterEpoch == 25
# TODO - re-enable and debug!
# assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][0].tdcDestinationAddress == (
# "foo", "bar", "8080"
# )
# then for search window 1 of VCC belonging to receptor 1...
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].State() == DevState.ON
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].searchWindowTuning == 6000000000
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].tdcEnable == True
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].tdcNumBits == 8
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].tdcPeriodBeforeEpoch == 5
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].tdcPeriodAfterEpoch == 25
# TODO - re-enable and debug!
# assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][0].tdcDestinationAddress == (
# "fizz", "buzz", "80"
# )
# then for search window 2 of VCC belonging to receptor 10...
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][1].State() == DevState.DISABLE
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][1].searchWindowTuning == 7000000000
assert proxies.vccTdc[proxies.receptor_to_vcc[4] - 1][1].tdcEnable == False
# and lastly for search window 2 of VCC belonging to receptor 1...
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][1].State() == DevState.DISABLE
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][1].searchWindowTuning == 7000000000
assert proxies.vccTdc[proxies.receptor_to_vcc[1] - 1][1].tdcEnable == False
# check configured attributes of FSPs, including states of function mode capabilities
assert proxies.fsp[1].functionMode == 1
assert 1 in proxies.fsp[1].subarrayMembership
assert [proxy.State() for proxy in proxies.fsp1FunctionMode] == [
DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
]
# TODO -
# assert [proxy.State() for proxy in fsp_2_function_mode_proxy] == [
# DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
# ]
# check configured attributes of FSP subarrays
# first for FSP 3 ... (this is a PSS fsp device)
assert proxies.fspSubarray[3].receptors[0] == 3
assert proxies.fspSubarray[3].receptors[1] == 1
assert proxies.fspSubarray[3].searchWindowID == 2
assert proxies.fspSubarray[3].searchBeamID[0] == 300
assert proxies.fspSubarray[3].searchBeamID[1] == 400
# TODO: currently searchBeams is stored by the device
# as a json string ( via attribute 'searchBeams');
# this has to be updated in FspPssSubarray
# to read/write individual members
searchBeam = proxies.fspSubarray[3].searchBeams
searchBeam0 = json.loads(searchBeam[0])
searchBeam1 = json.loads(searchBeam[1])
assert searchBeam0["search_beam_id"] == 300
assert searchBeam0["receptor_ids"][0] == 3
assert searchBeam0["enable_output"] == True
assert searchBeam0["averaging_interval"] == 4
# TODO - this does not pass - to debug & fix
#assert searchBeam0["searchBeamDestinationAddress"] == "10.05.1.1"
assert searchBeam1["search_beam_id"] == 400
assert searchBeam1["receptor_ids"][0] == 1
assert searchBeam1["enable_output"] == True
assert searchBeam1["averaging_interval"] == 2
# TODO - this does not pass - to debug & fix
#assert searchBeam1["searchBeamDestinationAddress"] == "10.05.2.1"
# check configured attributes of FSP subarrays
# first for FSP 1... (this is a CORR fsp device)
assert proxies.fspSubarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[1].receptors == 4
assert proxies.fspSubarray[1].frequencyBand == 4
assert proxies.fspSubarray[1].band5Tuning[0] == 5.85
assert proxies.fspSubarray[1].band5Tuning[1] == 7.25
assert proxies.fspSubarray[1].frequencyBandOffsetStream1 == 0
assert proxies.fspSubarray[1].frequencyBandOffsetStream2 == 0
assert proxies.fspSubarray[1].frequencySliceID == 1
assert proxies.fspSubarray[1].corrBandwidth == 1
assert proxies.fspSubarray[1].zoomWindowTuning == 4700000
assert proxies.fspSubarray[1].integrationTime == 1
assert proxies.fspSubarray[1].fspChannelOffset == 14880
assert proxies.fspSubarray[1].channelAveragingMap[0][0] == 0
assert proxies.fspSubarray[1].channelAveragingMap[0][1] == 8
assert proxies.fspSubarray[1].channelAveragingMap[1][0] == 744
assert proxies.fspSubarray[1].channelAveragingMap[1][1] == 8
assert proxies.fspSubarray[1].channelAveragingMap[2][0] == 1488
assert proxies.fspSubarray[1].channelAveragingMap[2][1] == 8
assert proxies.fspSubarray[1].channelAveragingMap[3][0] == 2232
assert proxies.fspSubarray[1].channelAveragingMap[3][1] == 8
assert proxies.fspSubarray[1].channelAveragingMap[4][0] == 2976
assert proxies.fspSubarray[1].outputLinkMap[0][0] == 0
assert proxies.fspSubarray[1].outputLinkMap[0][1] == 4
assert proxies.fspSubarray[1].outputLinkMap[1][0] == 744
assert proxies.fspSubarray[1].outputLinkMap[1][1] == 8
assert proxies.fspSubarray[1].outputLinkMap[2][0] == 1488
assert proxies.fspSubarray[1].outputLinkMap[2][1] == 12
assert proxies.fspSubarray[1].outputLinkMap[3][0] == 2232
assert proxies.fspSubarray[1].outputLinkMap[3][1] == 16
assert str(proxies.fspSubarray[1].visDestinationAddress).replace('"',"'") == \
str({"outputHost": [[0, "192.168.0.1"], [8184, "192.168.0.2"]],
"outputMac": [[0, "06-00-00-00-00-01"]],
"outputPort": [[0, 9000, 1], [8184, 9000, 1]]}).replace('"',"'")
# Clean Up
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
@pytest.mark.skip(reason="pst not currently supported")
def test_ConfigureScan_onlyPst_basic(self, proxies):
"""
Test a successful PST-BF scan configuration
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
proxy.loggingLevel = "DEBUG"
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
# check initial value of attributes of CBF subarray
assert len(proxies.subarray[1].receptors) == 0
assert proxies.subarray[1].configID == ''
assert proxies.subarray[1].frequencyBand == 0
assert proxies.subarray[1].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[1].AddReceptors([4, 1, 3, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(4), [4, 1, 3, 2])])
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 15, 1)
# check configured attributes of CBF subarray
assert proxies.subarray[1].configID == "band:5a, fsp1, 744 channels average factor 8"
assert proxies.subarray[1].frequencyBand == 4
assert proxies.subarray[1].obsState == ObsState.READY
proxies.wait_timeout_obs([proxies.vcc[i + 1] for i in range(4)], ObsState.READY, 1, 1)
# check frequency band of VCCs, including states of frequency band capabilities
assert proxies.vcc[proxies.receptor_to_vcc[2]].frequencyBand == 4
assert [proxy.State() for proxy in proxies.vccBand[proxies.receptor_to_vcc[2] - 1]] == [
DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
# check the rest of the configured attributes of VCCs
# first for VCC belonging to receptor 2...
assert proxies.vcc[proxies.receptor_to_vcc[2]].subarrayMembership == 1
assert proxies.vcc[proxies.receptor_to_vcc[2]].frequencyBandOffsetStream1 == 0
assert proxies.vcc[proxies.receptor_to_vcc[2]].frequencyBandOffsetStream2 == 0
assert proxies.vcc[proxies.receptor_to_vcc[2]].rfiFlaggingMask == "{}"
# check configured attributes of FSPs, including states of function mode capabilities
assert proxies.fsp[2].State() == DevState.ON
assert proxies.fsp[2].functionMode == 3
assert 1 in proxies.fsp[2].subarrayMembership
assert [proxy.State() for proxy in proxies.fsp2FunctionMode] == [
DevState.DISABLE, DevState.DISABLE, DevState.ON, DevState.DISABLE
]
# check configured attributes of FSP subarrays
# FSP 2
assert proxies.fspSubarray[6].obsState == ObsState.READY
assert all([proxies.fspSubarray[6].receptors[i] == j for i, j in zip(range(1), [2])])
assert all([proxies.fspSubarray[6].timingBeamID[i] == j for i, j in zip(range(1), [10])])
# Clean Up
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
@pytest.mark.skip(reason="pst not currently supported")
def test_ConfigureScan_onlyPst_basic_FSP_scan_parameters(self, proxies):
"""
Test a successful transmission of PST-BF parameters to FSP
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
# check initial value of attributes of CBF subarray
assert len(proxies.subarray[1].receptors) == 0
assert proxies.subarray[1].configID == ''
assert proxies.subarray[1].frequencyBand == 0
assert proxies.subarray[1].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[1].AddReceptors([4, 1, 3, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(4), [4, 1, 3, 2])])
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 15, 1)
# update jones matrices from tm emulator
f = open(file_path + "/../data/jonesmatrix_fsp.json")
jones_matrix = json.loads(f.read().replace("\n", ""))
epoch = str(int(time.time()))
for matrix in jones_matrix["jonesMatrix"]:
matrix["epoch"] = epoch
if matrix["destinationType"] == "fsp":
epoch = str(int(epoch) + 10)
# update Jones Matrix
proxies.tm.jonesMatrix = json.dumps(jones_matrix)
time.sleep(1)
for matrix in jones_matrix["jonesMatrix"]:
if matrix["destinationType"] == "fsp":
for receptor in matrix["matrixDetails"]:
rec_id = int(receptor["receptor"])
fs_id = receptor["receptorMatrix"][0]["fsid"]
for index, value in enumerate(receptor["receptorMatrix"][0]["matrix"]):
try:
assert proxies.fsp[fs_id].jonesMatrix[rec_id - 1][index] == value
except AssertionError as ae:
raise ae
except Exception as e:
raise e
time.sleep(10)
# update delay models from tm emulator
f = open(file_path + "/../data/delaymodel_fsp.json")
delay_model = json.loads(f.read().replace("\n", ""))
epoch = str(int(time.time()))
for model in delay_model["delayModel"]:
model["epoch"] = epoch
if model["destinationType"] == "fsp":
epoch = str(int(epoch) + 10)
# update delay model
proxies.tm.delayModel = json.dumps(delay_model)
time.sleep(1)
for model in delay_model["delayModel"]:
if model["destinationType"] == "fsp":
for receptor in model["delayDetails"]:
rec_id = int(receptor["receptor"])
fs_id = receptor["receptorDelayDetails"][0]["fsid"]
for index, value in enumerate(receptor["receptorDelayDetails"][0]["delayCoeff"]):
try:
assert proxies.fsp[fs_id].delayModel[rec_id - 1][index] == value
except AssertionError as ae:
raise ae
except Exception as e:
raise e
time.sleep(10)
# update timing beam weights from tm emulator
f = open(file_path + "/../data/timingbeamweights.json")
timing_beam_weights = json.loads(f.read().replace("\n", ""))
epoch = str(int(time.time()))
for weights in timing_beam_weights["beamWeights"]:
weights["epoch"] = epoch
epoch = str(int(epoch) + 10)
# update delay model
proxies.tm.beamWeights = json.dumps(timing_beam_weights)
time.sleep(1)
for weights in timing_beam_weights["beamWeights"]:
for receptor in weights["beamWeightsDetails"]:
rec_id = int(receptor["receptor"])
fs_id = receptor["receptorWeightsDetails"][0]["fsid"]
for index, value in enumerate(receptor["receptorWeightsDetails"][0]["weights"]):
try:
assert proxies.fsp[fs_id].timingBeamWeights[rec_id - 1][index] == value
except AssertionError as ae:
raise ae
except Exception as e:
raise e
time.sleep(10)
# Clean Up
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_EndScan(self, proxies, input_test_data):
"""
Test the EndScan command
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
# Input test data:
input_receptors = input_test_data[0]
config_file_name = input_test_data[1]
subarr_index = 1;
logging.info( "input_receptors = {}".format(input_receptors) )
logging.info( "config_file_name = {}".format(config_file_name) )
num_receptors = len(input_receptors)
vcc_ids = [None for _ in range(num_receptors)]
for receptor_id, ii in zip(input_receptors, range(num_receptors)):
vcc_ids[ii] = proxies.receptor_to_vcc[receptor_id]
proxies.subarray[subarr_index].AddReceptors(input_receptors)
proxies.wait_timeout_obs([proxies.subarray[subarr_index]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[subarr_index].receptors[i] == j for i, j in zip(range(num_receptors), input_receptors)])
assert proxies.subarray[subarr_index].obsState == ObsState.IDLE
# Check fsp obsState BEFORE scan configuration:
assert proxies.fspCorrSubarray[subarr_index-1].obsState == ObsState.IDLE
assert proxies.fspPssSubarray[subarr_index-1].obsState == ObsState.IDLE
assert proxies.fspPstSubarray[subarr_index-1].obsState == ObsState.IDLE
logging.info( "First vcc obsState BEFORE ConfigureScan = {}".
format(proxies.vcc[vcc_ids[0]].obsState) )
f = open(file_path + config_file_name)
json_string = f.read().replace("\n", "")
input_config_dict = json.loads(json_string)
proxies.subarray[subarr_index].ConfigureScan(json_string)
f.close()
proxies.wait_timeout_obs([proxies.subarray[subarr_index]], ObsState.READY, 15, 1)
logging.info( "First vcc obsState AFTER ConfigureScan = {}".
format(proxies.vcc[vcc_ids[0]].obsState) )
# check some configured attributes of CBF subarray
frequency_band = input_config_dict["common"]["frequency_band"]
input_band_index = freq_band_dict()[frequency_band]
assert proxies.subarray[subarr_index].configID == input_config_dict["common"]["config_id"]
assert proxies.subarray[subarr_index].frequencyBand == input_band_index
assert proxies.subarray[subarr_index].obsState == ObsState.READY
# Send the Scan command
f2 = open(file_path + "/../data/Scan1_basic.json")
json_string = f2.read().replace("\n", "")
input_scan_dict = json.loads(json_string)
proxies.subarray[subarr_index].Scan(json_string)
f2.close()
proxies.wait_timeout_obs([proxies.subarray[subarr_index]], ObsState.SCANNING, 1, 1)
# Note: scan_id is 1-based and of 'string' type
# scan_index is an index into an array, therefore 0-based
scan_index = int(input_scan_dict["scan_id"]) - 1
logging.info( "proxies.fspCorrSubarray[subarr_index-1].obsState = {}".
format(proxies.fspCorrSubarray[subarr_index-1].obsState) )
logging.info( "proxies.fspPssSubarray[subarr_index-1].obsState = {}".
format(proxies.fspPssSubarray[subarr_index-1].obsState) )
logging.info( "proxies.fspPstSubarray[subarr_index-1].obsState = {}".
format(proxies.fspPstSubarray[subarr_index-1].obsState) )
# Check obsStates BEFORE the EndScan() command
assert proxies.subarray[subarr_index].obsState == ObsState.SCANNING
assert proxies.vcc[vcc_ids[0]].obsState == ObsState.SCANNING
assert proxies.vcc[vcc_ids[num_receptors-1]].obsState == ObsState.SCANNING
for fsp in input_config_dict["cbf"]["fsp"]:
if fsp["function_mode"] == "CORR":
assert proxies.fspCorrSubarray[subarr_index-1].obsState == ObsState.SCANNING
elif fsp["function_mode"] == "PSS-BF":
assert proxies.fspPssSubarray[subarr_index-1].obsState == ObsState.SCANNING
# TODO: this check does not pass, to fix
#elif fsp["function_mode"] == "PST-BF":
# assert proxies.fspPstSubarray[subarr_index-1].obsState == ObsState.SCANNING
proxies.subarray[subarr_index].EndScan()
proxies.wait_timeout_obs([proxies.subarray[subarr_index]], ObsState.READY, 1, 1)
# Check obsStates AFTER the EndScan() command
assert proxies.subarray[subarr_index].obsState == ObsState.READY
assert proxies.vcc[vcc_ids[0]].obsState == ObsState.READY
assert proxies.vcc[vcc_ids[num_receptors -1]].obsState == ObsState.READY
assert proxies.fspCorrSubarray[subarr_index-1].obsState == ObsState.READY
for fsp in input_config_dict["cbf"]["fsp"]:
if fsp["function_mode"] == "CORR":
assert proxies.fspCorrSubarray[subarr_index-1].obsState == ObsState.READY
elif fsp["function_mode"] == "PSS-BF":
assert proxies.fspPssSubarray[subarr_index-1].obsState == ObsState.READY
# TODO: this check does not pass, to fix
#elif fsp["function_mode"] == "PST-BF":
# assert proxies.fspPstSubarray[subarr_index-1].obsState == ObsState.READY
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
#TODO refactor to verify delay model values against input json
@pytest.mark.skip(reason="test needs to be refactored")
def test_ConfigureScan_delayModel(self, proxies):
"""
Test the reception of delay models
"""
# Read delay model data from file
f = open(file_path + "/../data/delaymodel.json")
delay_model = json.loads(f.read().replace("\n", ""))
f.close()
aa = delay_model["delayModel"][0]["delayDetails"][0]["receptorDelayDetails"]
num_fsp_IDs = len(aa)
for jj in range(num_fsp_IDs):
logging.info( "delayCoeff = {}".format( aa[jj]["delayCoeff"]) )
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(3), [1, 3, 4])])
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
assert proxies.subarray[1].obsState == ObsState.READY
# create a delay model
# Insert the epoch
delay_model["delayModel"][0]["epoch"] = str(int(time.time()) + 20)
delay_model["delayModel"][1]["epoch"] = "0"
delay_model["delayModel"][2]["epoch"] = str(int(time.time()) + 10)
# update delay model
proxies.tm.delayModel = json.dumps(delay_model)
time.sleep(1)
for jj in range(4):
logging.info((" proxies.vcc[{}].receptorID = {}".
format(jj+1, proxies.vcc[jj+1].receptorID)))
logging.info( ("Vcc, receptor 1, ObsState = {}".
format(proxies.vcc[proxies.receptor_to_vcc[1]].ObsState)) )
#proxies.vcc[0].receptorID
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][0] == 1.1
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][1] == 1.2
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][2] == 1.3
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][3] == 1.4
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][4] == 1.5
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][5] == 1.6
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][0] == 1.7
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][1] == 1.8
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][2] == 1.9
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][3] == 2.0
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][4] == 2.1
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][5] == 2.2
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][0] == 2.3
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][1] == 2.4
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][2] == 2.5
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][3] == 2.6
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][4] == 2.7
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][5] == 2.8
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][0] == 2.9
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][1] == 3.0
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][2] == 3.1
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][3] == 3.2
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][4] == 3.3
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][5] == 3.4
# transition to obsState=SCANNING
f2 = open(file_path + "/../data/Scan1_basic.json")
proxies.subarray[1].Scan(f2.read().replace("\n", ""))
f2.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.SCANNING, 1, 1)
assert proxies.subarray[1].obsState == ObsState.SCANNING
time.sleep(10)
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][0] == 2.1
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][1] == 2.2
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][2] == 2.3
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][3] == 2.4
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][4] == 2.5
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][5] == 2.6
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][0] == 2.7
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][1] == 2.8
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][2] == 2.9
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][3] == 3.0
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][4] == 3.1
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][5] == 3.2
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][0] == 3.3
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][1] == 3.4
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][2] == 3.5
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][3] == 3.6
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][4] == 3.7
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][5] == 3.8
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][0] == 3.9
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][1] == 4.0
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][2] == 4.1
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][3] == 4.2
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][4] == 4.3
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][5] == 4.4
time.sleep(10)
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][0] == 0.1
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][1] == 0.2
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][2] == 0.3
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][3] == 0.4
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][4] == 0.5
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[0][5] == 0.6
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][0] == 0.7
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][1] == 0.8
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][2] == 0.9
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][3] == 1.0
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][4] == 1.1
assert proxies.vcc[proxies.receptor_to_vcc[1]].delayModel[1][5] == 1.2
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][0] == 1.3
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][1] == 1.4
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][2] == 1.5
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][3] == 1.6
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][4] == 1.7
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[0][5] == 1.8
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][0] == 1.9
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][1] == 2.0
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][2] == 2.1
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][3] == 2.2
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][4] == 2.3
assert proxies.vcc[proxies.receptor_to_vcc[4]].delayModel[1][5] == 2.4
proxies.subarray[1].EndScan()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 1, 1)
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_ConfigureScan_jonesMatrix(self, proxies):
"""
Test the reception of Jones matrices
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(3), [1, 3, 4])])
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
assert proxies.subarray[1].obsState == ObsState.READY
#create a Jones matrix
f = open(file_path + "/../data/jonesmatrix.json")
jones_matrix = json.loads(f.read().replace("\n", ""))
f.close()
jones_matrix["jonesMatrix"][0]["epoch"] = str(int(time.time()) + 20)
jones_matrix["jonesMatrix"][1]["epoch"] = "0"
jones_matrix["jonesMatrix"][2]["epoch"] = str(int(time.time()) + 10)
# update Jones Matrix
proxies.tm.jonesMatrix = json.dumps(jones_matrix)
time.sleep(5)
for receptor in jones_matrix["jonesMatrix"][1]["matrixDetails"]:
for frequency_slice in receptor["receptorMatrix"]:
for index, value in enumerate(frequency_slice["matrix"]):
vcc_id = proxies.receptor_to_vcc[receptor["receptor"]]
fs_id = frequency_slice["fsid"]
try:
assert proxies.vcc[vcc_id].jonesMatrix[fs_id-1][index] == value
except AssertionError as ae:
logging.error("AssertionError; incorrect Jones matrix entry: epoch {}, VCC {}, i = {}, jonesMatrix[{}] = {}".format(
jones_matrix["jonesMatrix"][1]["epoch"], vcc_id, index, fs_id-1, proxies.vcc[vcc_id].jonesMatrix[fs_id-1])
)
raise ae
except Exception as e:
raise e
# transition to obsState == SCANNING
f2 = open(file_path + "/../data/Scan1_basic.json")
proxies.subarray[1].Scan(f2.read().replace("\n", ""))
f2.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.SCANNING, 1, 1)
assert proxies.subarray[1].obsState == ObsState.SCANNING
time.sleep(10)
for receptor in jones_matrix["jonesMatrix"][2]["matrixDetails"]:
for frequency_slice in receptor["receptorMatrix"]:
for index, value in enumerate(frequency_slice["matrix"]):
vcc_id = proxies.receptor_to_vcc[receptor["receptor"]]
fs_id = frequency_slice["fsid"]
try:
assert proxies.vcc[vcc_id].jonesMatrix[fs_id-1][index] == value
except AssertionError as ae:
logging.error("AssertionError; incorrect Jones matrix entry: epoch {}, VCC {}, i = {}, jonesMatrix[{}] = {}".format(
jones_matrix["jonesMatrix"][1]["epoch"], vcc_id, index, fs_id-1, proxies.vcc[vcc_id].jonesMatrix[fs_id-1])
)
raise ae
except Exception as e:
raise e
time.sleep(10)
for receptor in jones_matrix["jonesMatrix"][0]["matrixDetails"]:
for frequency_slice in receptor["receptorMatrix"]:
for index, value in enumerate(frequency_slice["matrix"]):
vcc_id = proxies.receptor_to_vcc[receptor["receptor"]]
fs_id = frequency_slice["fsid"]
try:
assert proxies.vcc[vcc_id].jonesMatrix[fs_id-1][index] == value
except AssertionError as ae:
logging.error("AssertionError; incorrect Jones matrix entry: epoch {}, VCC {}, i = {}, jonesMatrix[{}] = {}".format(
jones_matrix["jonesMatrix"][1]["epoch"], vcc_id, index, fs_id-1, proxies.vcc[vcc_id].jonesMatrix[fs_id-1])
)
raise ae
except Exception as e:
raise e
proxies.subarray[1].EndScan()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 1, 1)
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_Scan(self, proxies):
"""
Test the Scan command
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(3), [1, 3, 4])])
# configure scan
f1 = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f1.read().replace("\n", ""))
f1.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
# check initial states
assert proxies.subarray[1].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.READY
assert proxies.fspSubarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[3].obsState == ObsState.READY
# send the Scan command
f2 = open(file_path + "/../data/Scan1_basic.json")
proxies.subarray[1].Scan(f2.read().replace("\n", ""))
f2.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.SCANNING, 1, 1)
# check scanID on VCC and FSP
assert proxies.fspSubarray[1].scanID == 1
assert proxies.vcc[proxies.receptor_to_vcc[4]].scanID ==1
# check states
assert proxies.subarray[1].obsState == ObsState.SCANNING
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.SCANNING
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.SCANNING
assert proxies.fspSubarray[1].obsState == ObsState.SCANNING
assert proxies.fspSubarray[3].obsState == ObsState.SCANNING
proxies.subarray[1].EndScan()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 1, 1)
assert proxies.subarray[1].obsState == ObsState.READY
# Clean Up
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_Abort_Reset(self, proxies):
"""
Test abort reset
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
############################# abort from READY ###########################
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
assert proxies.subarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[3].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.READY
# abort
proxies.subarray[1].Abort()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.ABORTED, 1, 1)
assert proxies.subarray[1].obsState == ObsState.ABORTED
# ObsReset
proxies.subarray[1].ObsReset()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert proxies.subarray[1].obsState == ObsState.IDLE
assert all([proxies.subarray[1].receptors[i] == j for i, j in zip(range(3), [1, 3, 4])])
assert proxies.fspSubarray[1].obsState == ObsState.IDLE
assert proxies.fspSubarray[3].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.IDLE
############################# abort from SCANNING ###########################
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
# scan
f2 = open(file_path + "/../data/Scan2_basic.json")
proxies.subarray[1].Scan(f2.read().replace("\n", ""))
f2.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.SCANNING, 1, 1)
assert proxies.subarray[1].obsState == ObsState.SCANNING
assert proxies.subarray[1].scanID == 2
assert proxies.fspSubarray[1].obsState == ObsState.SCANNING
assert proxies.fspSubarray[3].obsState == ObsState.SCANNING
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.SCANNING
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.SCANNING
# abort
proxies.subarray[1].Abort()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.ABORTED, 1, 1)
assert proxies.subarray[1].obsState == ObsState.ABORTED
assert proxies.fspSubarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[3].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.READY
# ObsReset
proxies.subarray[1].ObsReset()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert proxies.subarray[1].obsState == ObsState.IDLE
assert proxies.subarray[1].scanID == 0
assert proxies.fspSubarray[1].obsState == ObsState.IDLE
assert proxies.fspSubarray[3].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.IDLE
# Clean Up
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_Abort_Restart(self, proxies):
"""
Test abort restart
"""
try:
# turn on Subarray
if proxies.subarray[1].State() != DevState.ON:
proxies.subarray[1].On()
proxies.wait_timeout_dev([proxies.subarray[1]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
############################# abort from IDLE ###########################
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert proxies.subarray[1].obsState == ObsState.IDLE
# abort
proxies.subarray[1].Abort()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.ABORTED, 1, 1)
assert proxies.subarray[1].obsState == ObsState.ABORTED
# Restart: receptors should be empty
proxies.subarray[1].Restart()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.EMPTY, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
assert len(proxies.subarray[1].receptors) == 0
assert proxies.fspSubarray[1].obsState == ObsState.IDLE
assert proxies.fspSubarray[3].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.IDLE
############################# abort from READY ###########################
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
assert proxies.subarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[3].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.READY
# abort
proxies.subarray[1].Abort()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.ABORTED, 1, 1)
assert proxies.subarray[1].obsState == ObsState.ABORTED
# ObsReset
proxies.subarray[1].Restart()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.EMPTY, 1, 1)
assert proxies.subarray[1].obsState == ObsState.EMPTY
assert len(proxies.subarray[1].receptors) == 0
assert proxies.fspSubarray[1].obsState == ObsState.IDLE
assert proxies.fspSubarray[3].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.IDLE
############################# abort from SCANNING ###########################
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4, 2])
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
# configure scan
f = open(file_path + "/../data/ConfigureScan_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.READY, 30, 1)
# scan
f2 = open(file_path + "/../data/Scan2_basic.json")
proxies.subarray[1].Scan(f2.read().replace("\n", ""))
f2.close()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.SCANNING, 1, 1)
assert proxies.subarray[1].obsState == ObsState.SCANNING
assert proxies.subarray[1].scanID == 2
assert proxies.fspSubarray[1].obsState == ObsState.SCANNING
assert proxies.fspSubarray[3].obsState == ObsState.SCANNING
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.SCANNING
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.SCANNING
# abort
proxies.subarray[1].Abort()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.ABORTED, 1, 1)
assert proxies.subarray[1].obsState == ObsState.ABORTED
assert proxies.fspSubarray[1].obsState == ObsState.READY
assert proxies.fspSubarray[3].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.READY
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.READY
# ObsReset
proxies.subarray[1].Restart()
proxies.wait_timeout_obs([proxies.subarray[1]], ObsState.IDLE, 1, 1)
assert len(proxies.subarray[1].receptors) == 0
assert proxies.fspSubarray[1].obsState == ObsState.IDLE
assert proxies.fspSubarray[3].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[1]].obsState == ObsState.IDLE
assert proxies.vcc[proxies.receptor_to_vcc[4]].obsState == ObsState.IDLE
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
def test_ConfigureScan_minimal(self, proxies):
try:
sub_id = 1
#TODO currently only support for 1 receptor per fsp
test_receptor_ids = [4, 1]
#test_receptor_ids = [1]
vcc_index = proxies.receptor_to_vcc[test_receptor_ids[0]]
logging.info("vcc_index = {}".format(vcc_index))
vcc_band_proxies = proxies.vccBand[vcc_index - 1]
# turn on Subarray
if proxies.subarray[sub_id].State() != DevState.ON:
proxies.subarray[sub_id].On()
proxies.wait_timeout_dev([proxies.subarray[sub_id]], DevState.ON, 3, 1)
for proxy in [proxies.vcc[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
for proxy in [proxies.fsp[i + 1] for i in range(4)]:
if proxy.State() == DevState.OFF:
proxy.On()
proxies.wait_timeout_dev([proxy], DevState.ON, 1, 1)
# check initial value of attributes of CBF subarray
assert len(proxies.subarray[sub_id].receptors) == 0
assert proxies.subarray[sub_id].configID == ''
# TODO in CbfSubarray, at end of scan, clear all private data
#assert proxies.subarray[sub_id].frequencyBand == 0
assert proxies.subarray[sub_id].obsState == ObsState.EMPTY
# add receptors
proxies.subarray[sub_id].AddReceptors(test_receptor_ids)
proxies.wait_timeout_obs([proxies.subarray[sub_id]], ObsState.IDLE, 1, 1)
assert all([proxies.subarray[sub_id].receptors[i] == j
for i, j in zip(range(len(test_receptor_ids)), test_receptor_ids)])
# configure scan
f = open(file_path + "/../data/Configure_TM-CSP_v2.json")
configuration = f.read().replace("\n", "")
f.close()
proxies.subarray[sub_id].ConfigureScan(configuration)
proxies.wait_timeout_obs([proxies.subarray[sub_id]], ObsState.READY, 15, 1)
configuration = json.loads(configuration)
band_index = freq_band_dict()[configuration["common"]["frequency_band"]]
# check configured attributes of CBF subarray
assert sub_id == int(configuration["common"]["subarray_id"])
assert proxies.subarray[sub_id].configID == configuration["common"]["config_id"]
assert proxies.subarray[sub_id].frequencyBand == band_index
assert proxies.subarray[sub_id].obsState == ObsState.READY
proxies.wait_timeout_obs([proxies.vcc[i + 1] for i in range(4)], ObsState.READY, 1, 1)
# check frequency band of VCCs, including states of
# frequency band capabilities
logging.info( ("proxies.vcc[vcc_index].frequencyBand = {}".
format( proxies.vcc[vcc_index].frequencyBand)) )
assert proxies.vcc[vcc_index].configID == configuration["common"]["config_id"]
assert proxies.vcc[vcc_index].frequencyBand == band_index
assert proxies.vcc[vcc_index].subarrayMembership == sub_id
#TODO fix these tests; issue with VccBand devices either not reconfiguring in between
# configurations or causing a fault within the Vcc device
# for proxy in vcc_band_proxies:
# logging.info("VCC proxy.State() = {}".format(proxy.State()))
# for i in range(4):
# if (i == 0 and band_index == 0) or (i == (band_index - 1)):
# assert vcc_band_proxies[i].State() == DevState.ON
# else:
# assert vcc_band_proxies[i].State() == DevState.DISABLE
# check configured attributes of FSPs, including states of function mode capabilities
fsp_function_mode_proxies = [proxies.fsp1FunctionMode, proxies.fsp2FunctionMode,
proxies.fsp3FunctionMode, proxies.fsp4FunctionMode]
for fsp in configuration["cbf"]["fsp"]:
fsp_id = fsp["fsp_id"]
logging.info("{}".format(fsp_id))
#TODO add function mode to enum or edit attribute to accept string in FSP
if fsp["function_mode"] == "CORR": function_mode = 1
elif fsp["function_mode"] == "PSS-BF": function_mode = 2
elif fsp["function_mode"] == "PST-BF": function_mode = 3
elif fsp["function_mode"] == "VLBI": function_mode = 4
assert proxies.fsp[fsp_id].functionMode == function_mode
assert sub_id in proxies.fsp[fsp_id].subarrayMembership
assert [proxy.State() for proxy in fsp_function_mode_proxies[fsp_id-1]] == [
DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
]
# check configured attributes of FSP subarray
#TODO align IDs of fspSubarrays to fsp_id in conftest; currently works for fsps 1 and 2
assert proxies.fspSubarray[fsp_id].obsState == ObsState.READY
assert proxies.fspSubarray[fsp_id].receptors == test_receptor_ids[0]
assert proxies.fspSubarray[fsp_id].frequencyBand == band_index
assert proxies.fspSubarray[fsp_id].frequencySliceID == fsp["frequency_slice_id"]
assert proxies.fspSubarray[fsp_id].integrationTime == fsp["integration_factor"]
assert proxies.fspSubarray[fsp_id].corrBandwidth == fsp["zoom_factor"]
if fsp["zoom_factor"] > 0:
assert proxies.fspSubarray[fsp_id].zoomWindowTuning == fsp["zoom_window_tuning"]
assert proxies.fspSubarray[fsp_id].fspChannelOffset == fsp["channel_offset"]
for i in range(len(fsp["channel_averaging_map"])):
for j in range(len(fsp["channel_averaging_map"][i])):
assert proxies.fspSubarray[fsp_id].channelAveragingMap[i][j] == fsp["channel_averaging_map"][i][j]
for i in range(len(fsp["output_link_map"])):
for j in range(len(fsp["output_link_map"][i])):
assert proxies.fspSubarray[fsp_id].outputLinkMap[i][j] == fsp["output_link_map"][i][j]
proxies.clean_proxies()
except AssertionError as ae:
proxies.clean_proxies()
raise ae
except Exception as e:
proxies.clean_proxies()
raise e
'''
def test_ConfigureScan_onlyPss_basic(
self,
cbf_master_proxy,
proxies.subarray[1],
sw_1_proxy,
sw_2_proxy,
vcc_proxies,
vcc_band_proxies,
vcc_tdc_proxies,
fsp_1_proxy,
fsp_2_proxy,
fsp_1_function_mode_proxy,
fsp_2_function_mode_proxy,
fsp_3_proxies.subarray[1],
tm_telstate_proxy
):
"""
Test a minimal successful configuration
"""
for proxy in vcc_proxies:
proxy.Init()
fsp_3_proxies.subarray[1].Init()
fsp_1_proxy.Init()
fsp_2_proxy.Init()
proxies.subarray[1].set_timeout_millis(60000) # since the command takes a while
proxies.subarray[1].Init()
time.sleep(3)
cbf_master_proxy.set_timeout_millis(60000)
cbf_master_proxy.Init()
time.sleep(60) # takes pretty long for CBF Master to initialize
tm_telstate_proxy.Init()
time.sleep(1)
receptor_to_vcc = dict([*map(int, pair.split(":"))] for pair in
cbf_master_proxy.receptorToVcc)
cbf_master_proxy.On()
time.sleep(3)
# check initial value of attributes of CBF subarray
# assert proxies.subarray[1].receptors == ()
# assert proxies.subarray[1].configID == 0
assert proxies.subarray[1].frequencyBand == 0
assert proxies.subarray[1].obsState.value == ObsState.IDLE.value
# assert tm_telstate_proxy.visDestinationAddress == "{}"
assert tm_telstate_proxy.receivedOutputLinks == False
# add receptors
proxies.subarray[1].RemoveAllReceptors()
proxies.subarray[1].AddReceptors([1, 3, 4])
time.sleep(1)
assert proxies.subarray[1].receptors[0] == 1
assert proxies.subarray[1].receptors[1] == 3
assert proxies.subarray[1].receptors[2] == 4
# configure scan
f = open(file_path + "/test_json/test_ConfigureScan_onlyPss_basic.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
time.sleep(15)
# check configured attributes of CBF subarray # def test_ConfigureScan_basic(
# self,
# cbf_master_proxy,
# proxies.subarray[1],
# sw_1_proxy,
# sw_2_proxy,
# vcc_proxies,
# vcc_band_proxies,
# vcc_tdc_proxies,
# fsp_1_proxy,
# fsp_2_proxy,
# fsp_1_function_mode_proxy,
# fsp_2_function_mode_proxy,
# fsp_1_proxies.subarray[1],
# fsp_2_proxies.subarray[1],
# fsp_3_proxies.subarray[1],
# tm_telstate_proxy
# ):
# """
# Test a minimal successful configuration
# """
# for proxy in vcc_proxies:
# proxy.Init()
# fsp_1_proxies.subarray[1].Init()
# fsp_2_proxies.subarray[1].Init()
# fsp_3_proxies.subarray[1].Init()
# fsp_1_proxy.Init()
# fsp_2_proxy.Init()
# proxies.subarray[1].set_timeout_millis(60000) # since the command takes a while
# proxies.subarray[1].Init()
# time.sleep(3)
# cbf_master_proxy.set_timeout_millis(60000)
# cbf_master_proxy.Init()
# time.sleep(60) # takes pretty long for CBF Master to initialize
# tm_telstate_proxy.Init()
# time.sleep(1)
# receptor_to_vcc = dict([*map(int, pair.split(":"))] for pair in
# cbf_master_proxy.receptorToVcc)
# cbf_master_proxy.On()
# time.sleep(60)
# # turn on Subarray
# assert proxies.subarray[1].state()==DevState.OFF
# proxies.subarray[1].On()
# time.sleep(10)
# # check initial value of attributes of CBF subarray
# assert len(proxies.subarray[1].receptors) == 0
# assert proxies.subarray[1].configID == 0
# assert proxies.subarray[1].frequencyBand == 0
# assert proxies.subarray[1].State() == DevState.ON
# assert proxies.subarray[1].ObsState == ObsState.EMPTY
# # assert tm_telstate_proxy.visDestinationAddress == "{}"
# assert tm_telstate_proxy.receivedOutputLinks == False
# # add receptors
# proxies.subarray[1].RemoveAllReceptors()
# proxies.subarray[1].AddReceptors([1, 3, 4])
# time.sleep(1)
# assert proxies.subarray[1].receptors[0] == 1
# assert proxies.subarray[1].receptors[1] == 3
# assert proxies.subarray[1].receptors[2] == 4
# # configure scan
# f = open(file_path + "/test_json/test_ConfigureScan_basic.json")
# proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
# f.close()
# time.sleep(15)
# # check configured attributes of CBF subarray
# assert proxies.subarray[1].configID == "band:5a, fsp1, 744 channels average factor 8"
# assert proxies.subarray[1].frequencyBand == 4 # means 5a?
# assert proxies.subarray[1].obsState.value == ObsState.READY.value
# # check frequency band of VCCs, including states of frequency band capabilities
# assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBand == 4
# assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBand == 4
# assert [proxy.State() for proxy in vcc_band_proxies[receptor_to_vcc[4] - 1]] == [
# DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
# assert [proxy.State() for proxy in vcc_band_proxies[receptor_to_vcc[1] - 1]] == [
# DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
# # check the rest of the configured attributes of VCCs
# # first for VCC belonging to receptor 10...
# assert vcc_proxies[receptor_to_vcc[4] - 1].subarrayMembership == 1
# assert vcc_proxies[receptor_to_vcc[4] - 1].band5Tuning[0] == 5.85
# assert vcc_proxies[receptor_to_vcc[4] - 1].band5Tuning[1] == 7.25
# assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBandOffsetStream1 == 0
# assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBandOffsetStream2 == 0
# assert vcc_proxies[receptor_to_vcc[4] - 1].rfiFlaggingMask == "{}"
# # then for VCC belonging to receptor 1...
# assert vcc_proxies[receptor_to_vcc[1] - 1].subarrayMembership == 1
# assert vcc_proxies[receptor_to_vcc[1] - 1].band5Tuning[0] == 5.85
# assert vcc_proxies[receptor_to_vcc[1] - 1].band5Tuning[1] == 7.25
# assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBandOffsetStream1 == 0
# assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBandOffsetStream2 == 0
# assert vcc_proxies[receptor_to_vcc[1] - 1].rfiFlaggingMask == "{}"
# # check configured attributes of search windows
# # first for search window 1...
# assert sw_1_proxy.State() == DevState.ON
# assert sw_1_proxy.searchWindowTuning == 6000000000
# assert sw_1_proxy.tdcEnable == True
# assert sw_1_proxy.tdcNumBits == 8
# assert sw_1_proxy.tdcPeriodBeforeEpoch == 5
# assert sw_1_proxy.tdcPeriodAfterEpoch == 25
# assert "".join(sw_1_proxy.tdcDestinationAddress.split()) in [
# "[{\"receptorID\":4,\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"]},{\"receptorID\":1,\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"]}]",
# "[{\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"],\"receptorID\":4},{\"receptorID\":1,\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"]}]",
# "[{\"receptorID\":4,\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"]},{\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"],\"receptorID\":1}]",
# "[{\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"],\"receptorID\":4},{\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"],\"receptorID\":1}]",
# ]
# # then for search window 2...
# assert sw_2_proxy.State() == DevState.DISABLE
# assert sw_2_proxy.searchWindowTuning == 7000000000
# assert sw_2_proxy.tdcEnable == False
# # check configured attributes of VCC search windows
# # first for search window 1 of VCC belonging to receptor 10...
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].State() == DevState.ON
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].searchWindowTuning == 6000000000
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcEnable == True
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcNumBits == 8
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcPeriodBeforeEpoch == 5
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcPeriodAfterEpoch == 25
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcDestinationAddress == (
# "foo", "bar", "8080"
# )
# # then for search window 1 of VCC belonging to receptor 1...
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].State() == DevState.ON
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].searchWindowTuning == 6000000000
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcEnable == True
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcNumBits == 8
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcPeriodBeforeEpoch == 5
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcPeriodAfterEpoch == 25
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcDestinationAddress == (
# "fizz", "buzz", "80"
# )
# # then for search window 2 of VCC belonging to receptor 10...
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][1].State() == DevState.DISABLE
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][1].searchWindowTuning == 7000000000
# assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][1].tdcEnable == False
# # and lastly for search window 2 of VCC belonging to receptor 1...
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][1].State() == DevState.DISABLE
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][1].searchWindowTuning == 7000000000
# assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][1].tdcEnable == False
# # check configured attributes of FSPs, including states of function mode capabilities
# assert fsp_1_proxy.functionMode == 1
# assert 1 in fsp_1_proxy.subarrayMembership
# # assert 1 in fsp_2_proxy.subarrayMembership
# assert [proxy.State() for proxy in fsp_1_function_mode_proxy] == [
# DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
# ]
# # assert [proxy.State() for proxy in fsp_2_function_mode_proxy] == [
# # DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
# # ]
# # check configured attributes of FSP subarrays
# # first for FSP 1...
# assert fsp_1_proxies.subarray[1].obsState == ObsState.EMPTY
# assert fsp_1_proxies.subarray[1].receptors == 4
# assert fsp_1_proxies.subarray[1].frequencyBand == 4
# assert fsp_1_proxies.subarray[1].band5Tuning[0] == 5.85
# assert fsp_1_proxies.subarray[1].band5Tuning[1] == 7.25
# assert fsp_1_proxies.subarray[1].frequencyBandOffsetStream1 == 0
# assert fsp_1_proxies.subarray[1].frequencyBandOffsetStream2 == 0
# assert fsp_1_proxies.subarray[1].frequencySliceID == 1
# assert fsp_1_proxies.subarray[1].corrBandwidth == 1
# assert fsp_1_proxies.subarray[1].zoomWindowTuning == 4700000
# assert fsp_1_proxies.subarray[1].integrationTime == 140
# assert fsp_1_proxies.subarray[1].fspChannelOffset == 14880
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[0][0] == 0
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[0][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[1][0] == 744
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[1][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[2][0] == 1488
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[2][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[3][0] == 2232
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[3][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[4][0] == 2976
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[4][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[5][0] == 3720
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[5][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[6][0] == 4464
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[6][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[7][0] == 5208
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[7][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[8][0] == 5952
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[8][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[9][0] == 6696
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[9][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[10][0] == 7440
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[10][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[11][0] == 8184
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[11][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[12][0] == 8928
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[12][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[13][0] == 9672
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[13][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[14][0] == 10416
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[14][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[15][0] == 11160
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[15][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[16][0] == 11904
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[16][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[17][0] == 12648
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[17][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[18][0] == 13392
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[18][1] == 8
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[19][0] == 14136
# # assert fsp_1_proxies.subarray[1].channelAveragingMap[19][1] == 8
# assert fsp_1_proxies.subarray[1].outputLinkMap[0][0] == 0
# assert fsp_1_proxies.subarray[1].outputLinkMap[0][1] == 4
# assert fsp_1_proxies.subarray[1].outputLinkMap[1][0] == 744
# assert fsp_1_proxies.subarray[1].outputLinkMap[1][1] == 8
# assert fsp_1_proxies.subarray[1].outputLinkMap[2][0] == 1488
# assert fsp_1_proxies.subarray[1].outputLinkMap[2][1] == 12
# assert fsp_1_proxies.subarray[1].outputLinkMap[3][0] == 2232
# assert fsp_1_subarray_1_proroxy.receptors[2] == 4
# # assert fsp_2_proxies.subarray[1].frequencyBand == 4
# # assert fsp_2_proxies.subarray[1].band5Tuning[0] == 5.85
# # assert fsp_2_proxies.subarray[1].band5Tuning[1] == 7.25
# # assert fsp_2_proxies.subarray[1].frequencyBandOffsetStream1 == 0
# # assert fsp_2_proxies.subarray[1].frequencyBandOffsetStream2 == 0
# # assert fsp_2_proxies.subarray[1].frequencySliceID == 20
# # assert fsp_2_proxies.subarray[1].corrBandwidth == 0
# # assert fsp_2_proxies.subarray[1].integrationTime == 1400
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[0][0] == 1
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[0][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[1][0] == 745
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[1][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[2][0] == 1489
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[2][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[3][0] == 2233
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[3][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[4][0] == 2977
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[4][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[5][0] == 3721
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[5][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[6][0] == 4465
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[6][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[7][0] == 5209
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[7][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[8][0] == 5953
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[8][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[9][0] == 6697
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[9][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[10][0] == 7441
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[10][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[11][0] == 8185
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[11][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[12][0] == 8929
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[12][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[13][0] == 9673
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[13][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[14][0] == 10417
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[14][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[15][0] == 11161
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[15][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[16][0] == 11905
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[16][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[17][0] == 12649
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[17][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[18][0] == 13393
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[18][1] == 0
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[19][0] == 14137
# # assert fsp_2_proxies.subarray[1].channelAveragingMap[19][1] == 0
# # then for FSP 3...
# assert fsp_3_proxies.subarray[1].receptors[0] == 3
# assert fsp_3_proxies.subarray[1].receptors[1] == 1
# assert fsp_3_proxies.subarray[1].searchWindowID == 2
# assert fsp_3_proxies.subarray[1].searchBeamID[0] == 300
# assert fsp_3_proxies.subarray[1].searchBeamID[1] == 400
# searchBeam = fsp_3_proxies.subarray[1].searchBeams
# searchBeam300 = json.loads(searchBeam[0])
# searchBeam400 = json.loads(searchBeam[1])
# assert searchBeam300["searchBeamID"] == 300
# assert searchBeam300["receptors"][0] == 3
# assert searchBeam300["outputEnable"] == True
# assert searchBeam300["averagingInterval"] == 4
# assert searchBeam300["searchBeamDestinationAddress"] == "10.05.1.1"
# assert searchBeam400["searchBeamID"] == 400
# assert searchBeam400["receptors"][0] == 1
# assert searchBeam400["outputEnable"] == True
# assert searchBeam400["averagingInterval"] == 2
# assert searchBeam400["searchBeamDestinationAddress"] == "10.05.2.1"
# proxies.subarray[1].GoToIdle()
# time.sleep(3)
# assert proxies.subarray[1].obsState == ObsState.IDLE
# proxies.subarray[1].RemoveAllReceptors()
# time.sleep(3)
# assert proxies.subarray[1].state() == tango.DevState.OFFequency band capabilities
assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBand == 4
assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBand == 4
assert [proxy.State() for proxy in vcc_band_proxies[receptor_to_vcc[4] - 1]] == [
DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
assert [proxy.State() for proxy in vcc_band_proxies[receptor_to_vcc[1] - 1]] == [
DevState.DISABLE, DevState.DISABLE, DevState.DISABLE, DevState.ON]
# check the rest of the configured attributes of VCCs
# first for VCC belonging to receptor 10...
assert vcc_proxies[receptor_to_vcc[4] - 1].subarrayMembership == 1
assert vcc_proxies[receptor_to_vcc[4] - 1].band5Tuning[0] == 5.85
assert vcc_proxies[receptor_to_vcc[4] - 1].band5Tuning[1] == 7.25
assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBandOffsetStream1 == 0
assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBandOffsetStream2 == 0
assert vcc_proxies[receptor_to_vcc[4] - 1].rfiFlaggingMask == "{}"
# then for VCC belonging to receptor 1...
assert vcc_proxies[receptor_to_vcc[1] - 1].subarrayMembership == 1
assert vcc_proxies[receptor_to_vcc[1] - 1].band5Tuning[0] == 5.85
assert vcc_proxies[receptor_to_vcc[1] - 1].band5Tuning[1] == 7.25
assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBandOffsetStream1 == 0
assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBandOffsetStream2 == 0
assert vcc_proxies[receptor_to_vcc[1] - 1].rfiFlaggingMask == "{}"
# check configured attributes of search windows
# first for search window 1...
assert sw_1_proxy.State() == DevState.ON
assert sw_1_proxy.searchWindowTuning == 6000000000
assert sw_1_proxy.tdcEnable == True
assert sw_1_proxy.tdcNumBits == 8
assert sw_1_proxy.tdcPeriodBeforeEpoch == 5
assert sw_1_proxy.tdcPeriodAfterEpoch == 25
assert "".join(sw_1_proxy.tdcDestinationAddress.split()) in [
"[{\"receptorID\":4,\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"]},{\"receptorID\":1,\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"]}]",
"[{\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"],\"receptorID\":4},{\"receptorID\":1,\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"]}]",
"[{\"receptorID\":4,\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"]},{\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"],\"receptorID\":1}]",
"[{\"tdcDestinationAddress\":[\"foo\",\"bar\",\"8080\"],\"receptorID\":4},{\"tdcDestinationAddress\":[\"fizz\",\"buzz\",\"80\"],\"receptorID\":1}]",
]
# then for search window 2...
assert sw_2_proxy.State() == DevState.DISABLE
assert sw_2_proxy.searchWindowTuning == 7000000000
assert sw_2_proxy.tdcEnable == False
# check configured attributes of VCC search windows
# first for search window 1 of VCC belonging to receptor 10...
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].State() == DevState.ON
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].searchWindowTuning == 6000000000
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcEnable == True
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcNumBits == 8
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcPeriodBeforeEpoch == 5
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcPeriodAfterEpoch == 25
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][0].tdcDestinationAddress == (
"foo", "bar", "8080"
)
# then for search window 1 of VCC belonging to receptor 1...
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].State() == DevState.ON
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].searchWindowTuning == 6000000000
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcEnable == True
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcNumBits == 8
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcPeriodBeforeEpoch == 5
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcPeriodAfterEpoch == 25
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][0].tdcDestinationAddress == (
"fizz", "buzz", "80"
)
# then for search window 2 of VCC belonging to receptor 10...
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][1].State() == DevState.DISABLE
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][1].searchWindowTuning == 7000000000
assert vcc_tdc_proxies[receptor_to_vcc[4] - 1][1].tdcEnable == False
# and lastly for search window 2 of VCC belonging to receptor 1...
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][1].State() == DevState.DISABLE
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][1].searchWindowTuning == 7000000000
assert vcc_tdc_proxies[receptor_to_vcc[1] - 1][1].tdcEnable == False
assert fsp_3_proxies.subarray[1].receptors[0] == 3
assert fsp_3_proxies.subarray[1].receptors[1] == 1
assert fsp_3_proxies.subarray[1].searchWindowID == 2
assert fsp_3_proxies.subarray[1].searchBeamID[0] == 300
assert fsp_3_proxies.subarray[1].searchBeamID[1] == 400
searchBeam = fsp_3_proxies.subarray[1].searchBeams
searchBeam300 = json.loads(searchBeam[0])
searchBeam400 = json.loads(searchBeam[1])
assert searchBeam300["searchBeamID"] == 300
assert searchBeam300["receptors"][0] == 3
assert searchBeam300["outputEnable"] == True
assert searchBeam300["averagingInterval"] == 4
assert searchBeam300["searchBeamDestinationAddress"] == "10.05.1.1"
assert searchBeam400["searchBeamID"] == 400
assert searchBeam400["receptors"][0] == 1
assert searchBeam400["outputEnable"] == True
assert searchBeam400["averagingInterval"] == 2
assert searchBeam400["searchBeamDestinationAddress"] == "10.05.2.1"
proxies.subarray[1].GoToIdle()
time.sleep(3)
assert proxies.subarray[1].obsState == ObsState.IDLE
proxies.subarray[1].RemoveAllReceptors()
time.sleep(3)
assert proxies.subarray[1].state() == tango.DevState.OFF
def test_band1(
self,
cbf_master_proxy,
proxies.subarray[1],
sw_1_proxy,
sw_2_proxy,
vcc_proxies,
vcc_band_proxies,
vcc_tdc_proxies,
fsp_1_proxy,
fsp_2_proxy,
fsp_1_function_mode_proxy,
fsp_2_function_mode_proxy,
fsp_1_proxies.subarray[1],
fsp_2_proxies.subarray[1],
fsp_3_proxies.subarray[1],
tm_telstate_proxy
):
"""
Test a minimal successful configuration
"""
for proxy in vcc_proxies:
proxy.Init()
fsp_1_proxies.subarray[1].Init()
fsp_2_proxies.subarray[1].Init()
fsp_3_proxies.subarray[1].Init()
fsp_1_proxy.Init()
fsp_2_proxy.Init()
time.sleep(3)
cbf_master_proxy.set_timeout_millis(60000)
cbf_master_proxy.Init()
time.sleep(60) # takes pretty long for CBF Master to initialize
tm_telstate_proxy.Init()
time.sleep(1)
receptor_to_vcc = dict([*map(int, pair.split(":"))] for pair in
cbf_master_proxy.receptorToVcc)
cbf_master_proxy.On()
time.sleep(3)
# check initial value of attributes of CBF subarray
assert len(proxies.subarray[1].receptors) == 0
assert proxies.subarray[1].configID == ''
assert proxies.subarray[1].frequencyBand == 0
assert proxies.subarray[1].obsState.value == ObsState.IDLE.value
# assert tm_telstate_proxy.visDestinationAddress == "{}"
assert tm_telstate_proxy.receivedOutputLinks == False
# add receptors
proxies.subarray[1].AddReceptors([1, 3, 4])
time.sleep(1)
assert proxies.subarray[1].receptors[0] == 1
assert proxies.subarray[1].receptors[1] == 3
assert proxies.subarray[1].receptors[2] == 4
# configure scan
f = open(file_path + "/test_json/data_model_confluence.json")
proxies.subarray[1].ConfigureScan(f.read().replace("\n", ""))
f.close()
time.sleep(15)
# check configured attributes of CBF subarray
assert proxies.subarray[1].configID == "sbi-mvp01-20200325-00001-science_A"
assert proxies.subarray[1].frequencyBand == 0 # means 1
assert proxies.subarray[1].obsState.value == ObsState.READY.value
# check frequency band of VCCs, including states of frequency band capabilities
assert vcc_proxies[receptor_to_vcc[4] - 1].frequencyBand == 0
assert vcc_proxies[receptor_to_vcc[1] - 1].frequencyBand == 0
# check the rest of the configured attributes of VCCs
# first for VCC belonging to receptor 10...
assert vcc_proxies[receptor_to_vcc[4] - 1].subarrayMembership == 1
# then for VCC belonging to receptor 1...
assert vcc_proxies[receptor_to_vcc[1] - 1].subarrayMembership == 1
# check configured attributes of FSPs, including states of function mode capabilities
assert fsp_1_proxy.functionMode == 1
assert 1 in fsp_1_proxy.subarrayMembership
# assert 1 in fsp_2_proxy.subarrayMembership
assert [proxy.State() for proxy in fsp_1_function_mode_proxy] == [
DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
]
# assert [proxy.State() for proxy in fsp_2_function_mode_proxy] == [
# DevState.ON, DevState.DISABLE, DevState.DISABLE, DevState.DISABLE
# ]
# check configured attributes of FSP subarrays
# first for FSP 1...
assert fsp_1_proxies.subarray[1].obsState == ObsState.READY
assert fsp_1_proxies.subarray[1].frequencyBand == 0
assert fsp_1_proxies.subarray[1].frequencySliceID == 1
assert fsp_1_proxies.subarray[1].corrBandwidth == 0
assert fsp_1_proxies.subarray[1].integrationTime == 1400
assert fsp_1_proxies.subarray[1].outputLinkMap[0][0] == 1
assert fsp_1_proxies.subarray[1].outputLinkMap[0][1] == 0
assert fsp_1_proxies.subarray[1].outputLinkMap[1][0] == 201
assert fsp_1_proxies.subarray[1].outputLinkMap[1][1] == 1
proxies.subarray[1].GoToIdle()
time.sleep(3)
assert proxies.subarray[1].obsState == ObsState.IDLE
proxies.subarray[1].RemoveAllReceptors()
time.sleep(1)
proxies.subarray[1].Off()
assert proxies.subarray[1].state() == tango.DevState.OFF
'''
| 52.411175
| 166
| 0.597728
| 12,971
| 109,749
| 4.916737
| 0.041708
| 0.107722
| 0.107378
| 0.073069
| 0.88312
| 0.856417
| 0.826092
| 0.785167
| 0.728812
| 0.664445
| 0
| 0.044189
| 0.27418
| 109,749
| 2,093
| 167
| 52.436216
| 0.756421
| 0.092994
| 0
| 0.618577
| 0
| 0
| 0.047104
| 0.014819
| 0
| 0
| 0
| 0.002389
| 0.374506
| 1
| 0.013834
| false
| 0
| 0.012846
| 0
| 0.027668
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
efcfc18baec6488ebb364aa41f3dfe5708397b01
| 164
|
py
|
Python
|
app/posts/__init__.py
|
nchudleigh/yunite-blog
|
572d2a6594b464da69ec148a6a9a54ad594e9df1
|
[
"MIT"
] | 10
|
2016-07-21T13:28:58.000Z
|
2016-07-22T22:44:49.000Z
|
app/posts/__init__.py
|
nchudleigh/yunite-blog
|
572d2a6594b464da69ec148a6a9a54ad594e9df1
|
[
"MIT"
] | 2
|
2016-08-11T15:32:00.000Z
|
2016-08-11T15:32:17.000Z
|
app/posts/__init__.py
|
nchudleigh/yunite-blog
|
572d2a6594b464da69ec148a6a9a54ad594e9df1
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, print_function
from flask import Blueprint
posts = Blueprint('posts', __name__)
from . import views
from . import models
| 18.222222
| 54
| 0.79878
| 21
| 164
| 5.761905
| 0.571429
| 0.231405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 164
| 8
| 55
| 20.5
| 0.864286
| 0
| 0
| 0
| 0
| 0
| 0.030488
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0.6
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
4bd9705b955e0a45192ad52a20390686e86330a2
| 24
|
py
|
Python
|
detectionModules/wifi/frame/__init__.py
|
Impeekay/shop-analytics-pi
|
4e02068775b700da3f0e01a612fdc5cc29c85eaf
|
[
"MIT"
] | 1
|
2020-12-12T07:00:03.000Z
|
2020-12-12T07:00:03.000Z
|
detectionModules/wifi/frame/__init__.py
|
Impeekay/shop-analytics-pi
|
4e02068775b700da3f0e01a612fdc5cc29c85eaf
|
[
"MIT"
] | 7
|
2020-11-13T18:47:55.000Z
|
2022-03-12T00:30:13.000Z
|
detectionModules/wifi/frame/__init__.py
|
Impeekay/shop-analytics-pi
|
4e02068775b700da3f0e01a612fdc5cc29c85eaf
|
[
"MIT"
] | 3
|
2020-05-11T06:59:28.000Z
|
2020-06-08T16:59:54.000Z
|
from .main import Frame
| 12
| 23
| 0.791667
| 4
| 24
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4be356a169a29778acf1ae286955c2c3b568c56e
| 109
|
py
|
Python
|
src/uri/basic/basic_1004.py
|
gabrielDpadua21/code-challenges
|
0050bc9b358193aa6cacdda21e0670a9dc20450a
|
[
"MIT"
] | null | null | null |
src/uri/basic/basic_1004.py
|
gabrielDpadua21/code-challenges
|
0050bc9b358193aa6cacdda21e0670a9dc20450a
|
[
"MIT"
] | null | null | null |
src/uri/basic/basic_1004.py
|
gabrielDpadua21/code-challenges
|
0050bc9b358193aa6cacdda21e0670a9dc20450a
|
[
"MIT"
] | null | null | null |
class SimpleProduct:
def solution(self, value1, value2):
return "PROD = " + str(value1 * value2)
| 27.25
| 47
| 0.642202
| 12
| 109
| 5.833333
| 0.833333
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048193
| 0.238532
| 109
| 4
| 47
| 27.25
| 0.795181
| 0
| 0
| 0
| 0
| 0
| 0.063636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
4bef96f0024a7ae8bb097301df91f46b01e7fd63
| 117
|
py
|
Python
|
ope-backend/src/domain/dto/__init__.py
|
mthora/ope-talos
|
84b9a10e98dffb9dd654ce81b5419e3eb610961a
|
[
"CC0-1.0"
] | null | null | null |
ope-backend/src/domain/dto/__init__.py
|
mthora/ope-talos
|
84b9a10e98dffb9dd654ce81b5419e3eb610961a
|
[
"CC0-1.0"
] | null | null | null |
ope-backend/src/domain/dto/__init__.py
|
mthora/ope-talos
|
84b9a10e98dffb9dd654ce81b5419e3eb610961a
|
[
"CC0-1.0"
] | null | null | null |
from .user_dto import User
from .drink_dto import Drinks
from .dessert_dto import Dessert
from .role_dto import Roles
| 29.25
| 32
| 0.837607
| 20
| 117
| 4.7
| 0.45
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 117
| 4
| 33
| 29.25
| 0.921569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ef556bb95a1b8f97ad5d68caa3187c1cfe9d2731
| 136
|
py
|
Python
|
__init__.py
|
ScottHull/fEquilibrium
|
fbc352484d0e60d6b224950f81d6fd730e36cb82
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
ScottHull/fEquilibrium
|
fbc352484d0e60d6b224950f81d6fd730e36cb82
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
ScottHull/fEquilibrium
|
fbc352484d0e60d6b224950f81d6fd730e36cb82
|
[
"Apache-2.0"
] | null | null | null |
from radioactivity import *
from thermodynamics import *
from dynamics import *
from box import *
from stats import *
from meta import *
| 22.666667
| 28
| 0.786765
| 18
| 136
| 5.944444
| 0.444444
| 0.46729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169118
| 136
| 6
| 29
| 22.666667
| 0.946903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
32289abd0c37f2be084184114cf2ba93d491baa5
| 236
|
py
|
Python
|
explicalib/calibration/evaluation/metrics/classwise/__init__.py
|
euranova/estimating_eces
|
9bfa81dd7a39ebe069c5b11b8e7a9bf9017e9350
|
[
"MIT"
] | 2
|
2021-11-30T18:44:11.000Z
|
2021-11-30T18:44:19.000Z
|
explicalib/calibration/evaluation/metrics/classwise/__init__.py
|
euranova/estimating_eces
|
9bfa81dd7a39ebe069c5b11b8e7a9bf9017e9350
|
[
"MIT"
] | null | null | null |
explicalib/calibration/evaluation/metrics/classwise/__init__.py
|
euranova/estimating_eces
|
9bfa81dd7a39ebe069c5b11b8e7a9bf9017e9350
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@author: nicolas.posocco
"""
from .classwise_ece import classwise_ece
from .classwise_ece_c import classwise_ece_c
from .classwise_ece_a import classwise_ece_a
from .classwise_ece_ac import classwise_ece_ac
| 23.6
| 46
| 0.79661
| 36
| 236
| 4.833333
| 0.361111
| 0.551724
| 0.367816
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004762
| 0.110169
| 236
| 9
| 47
| 26.222222
| 0.82381
| 0.199153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3237e38f130787be578311db46d00c7459630014
| 104
|
py
|
Python
|
src/saturnv_ui/saturnv/ui/presenters/__init__.py
|
epkaz93/saturnv
|
b8a2c61bb0e833f2e31698050113038bab3ca5a4
|
[
"MIT"
] | 1
|
2022-03-12T07:38:09.000Z
|
2022-03-12T07:38:09.000Z
|
src/saturnv_ui/saturnv/ui/presenters/__init__.py
|
epkaz93/saturnv
|
b8a2c61bb0e833f2e31698050113038bab3ca5a4
|
[
"MIT"
] | null | null | null |
src/saturnv_ui/saturnv/ui/presenters/__init__.py
|
epkaz93/saturnv
|
b8a2c61bb0e833f2e31698050113038bab3ca5a4
|
[
"MIT"
] | null | null | null |
from .basepresenter import BasePresenter, PresenterWidgetMixin
from .mainpresenter import MainPresenter
| 34.666667
| 62
| 0.884615
| 9
| 104
| 10.222222
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 104
| 2
| 63
| 52
| 0.968421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
323c23d2ee41cb994fcc47795d92dc9ced175d82
| 49
|
py
|
Python
|
04-FaceRecognition-II/thetensorclan-backend-heroku/models/__init__.py
|
amitkml/TSAI-DeepVision-EVA4.0-Phase-2
|
f9e232b3eb6ce20f522136523e79208ed85a1f28
|
[
"MIT"
] | 1
|
2021-03-21T08:45:05.000Z
|
2021-03-21T08:45:05.000Z
|
04-FaceRecognition-II/thetensorclan-backend-heroku/models/__init__.py
|
amitkml/TSAI-DeepVision-EVA4.0-Phase-2
|
f9e232b3eb6ce20f522136523e79208ed85a1f28
|
[
"MIT"
] | null | null | null |
04-FaceRecognition-II/thetensorclan-backend-heroku/models/__init__.py
|
amitkml/TSAI-DeepVision-EVA4.0-Phase-2
|
f9e232b3eb6ce20f522136523e79208ed85a1f28
|
[
"MIT"
] | null | null | null |
from .utils import get_classifier, MODEL_REGISTER
| 49
| 49
| 0.877551
| 7
| 49
| 5.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
325afc954c7ca8c451a9daa377e0f8c69880c8be
| 74
|
py
|
Python
|
tests/messages/data/project/_hidden_by_default/hidden_file.py
|
kolonialno/babel
|
e44a1e632406853530126bf357ddcfe9a223a04e
|
[
"BSD-3-Clause"
] | 1
|
2021-12-25T20:08:01.000Z
|
2021-12-25T20:08:01.000Z
|
tests/messages/data/project/_hidden_by_default/hidden_file.py
|
kolonialno/babel
|
e44a1e632406853530126bf357ddcfe9a223a04e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/messages/data/project/_hidden_by_default/hidden_file.py
|
kolonialno/babel
|
e44a1e632406853530126bf357ddcfe9a223a04e
|
[
"BSD-3-Clause"
] | null | null | null |
from gettext import gettext
def foo():
print(gettext('ssshhh....'))
| 12.333333
| 32
| 0.648649
| 9
| 74
| 5.333333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175676
| 74
| 5
| 33
| 14.8
| 0.786885
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
087d93dd29ce88aab95620062cadf32936f8b8a4
| 37,597
|
py
|
Python
|
cottonformation/res/elasticbeanstalk.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/elasticbeanstalk.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/elasticbeanstalk.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class EnvironmentOptionSetting(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Environment.OptionSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html
Property Document:
- ``rp_Namespace``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-beanstalk-optionsettings-namespace
- ``rp_OptionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-beanstalk-optionsettings-optionname
- ``p_ResourceName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-elasticbeanstalk-environment-optionsetting-resourcename
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-beanstalk-optionsettings-value
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Environment.OptionSetting"
rp_Namespace: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Namespace"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-beanstalk-optionsettings-namespace"""
rp_OptionName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "OptionName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-beanstalk-optionsettings-optionname"""
p_ResourceName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ResourceName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-elasticbeanstalk-environment-optionsetting-resourcename"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-option-settings.html#cfn-beanstalk-optionsettings-value"""
@attr.s
class ApplicationVersionSourceBundle(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::ApplicationVersion.SourceBundle"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-sourcebundle.html
Property Document:
- ``rp_S3Bucket``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-sourcebundle.html#cfn-beanstalk-sourcebundle-s3bucket
- ``rp_S3Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-sourcebundle.html#cfn-beanstalk-sourcebundle-s3key
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::ApplicationVersion.SourceBundle"
rp_S3Bucket: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "S3Bucket"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-sourcebundle.html#cfn-beanstalk-sourcebundle-s3bucket"""
rp_S3Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "S3Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-sourcebundle.html#cfn-beanstalk-sourcebundle-s3key"""
@attr.s
class ApplicationMaxAgeRule(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Application.MaxAgeRule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html
Property Document:
- ``p_DeleteSourceFromS3``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html#cfn-elasticbeanstalk-application-maxagerule-deletesourcefroms3
- ``p_Enabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html#cfn-elasticbeanstalk-application-maxagerule-enabled
- ``p_MaxAgeInDays``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html#cfn-elasticbeanstalk-application-maxagerule-maxageindays
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Application.MaxAgeRule"
p_DeleteSourceFromS3: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeleteSourceFromS3"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html#cfn-elasticbeanstalk-application-maxagerule-deletesourcefroms3"""
p_Enabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Enabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html#cfn-elasticbeanstalk-application-maxagerule-enabled"""
p_MaxAgeInDays: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxAgeInDays"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxagerule.html#cfn-elasticbeanstalk-application-maxagerule-maxageindays"""
@attr.s
class ConfigurationTemplateSourceConfiguration(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::ConfigurationTemplate.SourceConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-sourceconfiguration.html
Property Document:
- ``rp_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-sourceconfiguration.html#cfn-elasticbeanstalk-configurationtemplate-sourceconfiguration-applicationname
- ``rp_TemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-sourceconfiguration.html#cfn-elasticbeanstalk-configurationtemplate-sourceconfiguration-templatename
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::ConfigurationTemplate.SourceConfiguration"
rp_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-sourceconfiguration.html#cfn-elasticbeanstalk-configurationtemplate-sourceconfiguration-applicationname"""
rp_TemplateName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TemplateName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-sourceconfiguration.html#cfn-elasticbeanstalk-configurationtemplate-sourceconfiguration-templatename"""
@attr.s
class EnvironmentTier(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Environment.Tier"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html
Property Document:
- ``p_Name``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html#cfn-beanstalk-env-tier-name
- ``p_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html#cfn-beanstalk-env-tier-type
- ``p_Version``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html#cfn-beanstalk-env-tier-version
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Environment.Tier"
p_Name: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Name"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html#cfn-beanstalk-env-tier-name"""
p_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html#cfn-beanstalk-env-tier-type"""
p_Version: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Version"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment-tier.html#cfn-beanstalk-env-tier-version"""
@attr.s
class ConfigurationTemplateConfigurationOptionSetting(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::ConfigurationTemplate.ConfigurationOptionSetting"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html
Property Document:
- ``rp_Namespace``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-namespace
- ``rp_OptionName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-optionname
- ``p_ResourceName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-resourcename
- ``p_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-value
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::ConfigurationTemplate.ConfigurationOptionSetting"
rp_Namespace: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Namespace"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-namespace"""
rp_OptionName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "OptionName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-optionname"""
p_ResourceName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ResourceName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-resourcename"""
p_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-configurationtemplate-configurationoptionsetting.html#cfn-elasticbeanstalk-configurationtemplate-configurationoptionsetting-value"""
@attr.s
class ApplicationMaxCountRule(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Application.MaxCountRule"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html
Property Document:
- ``p_DeleteSourceFromS3``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html#cfn-elasticbeanstalk-application-maxcountrule-deletesourcefroms3
- ``p_Enabled``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html#cfn-elasticbeanstalk-application-maxcountrule-enabled
- ``p_MaxCount``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html#cfn-elasticbeanstalk-application-maxcountrule-maxcount
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Application.MaxCountRule"
p_DeleteSourceFromS3: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "DeleteSourceFromS3"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html#cfn-elasticbeanstalk-application-maxcountrule-deletesourcefroms3"""
p_Enabled: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "Enabled"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html#cfn-elasticbeanstalk-application-maxcountrule-enabled"""
p_MaxCount: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "MaxCount"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-maxcountrule.html#cfn-elasticbeanstalk-application-maxcountrule-maxcount"""
@attr.s
class ApplicationApplicationVersionLifecycleConfig(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Application.ApplicationVersionLifecycleConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationversionlifecycleconfig.html
Property Document:
- ``p_MaxAgeRule``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationversionlifecycleconfig.html#cfn-elasticbeanstalk-application-applicationversionlifecycleconfig-maxagerule
- ``p_MaxCountRule``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationversionlifecycleconfig.html#cfn-elasticbeanstalk-application-applicationversionlifecycleconfig-maxcountrule
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Application.ApplicationVersionLifecycleConfig"
p_MaxAgeRule: typing.Union['ApplicationMaxAgeRule', dict] = attr.ib(
default=None,
converter=ApplicationMaxAgeRule.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ApplicationMaxAgeRule)),
metadata={AttrMeta.PROPERTY_NAME: "MaxAgeRule"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationversionlifecycleconfig.html#cfn-elasticbeanstalk-application-applicationversionlifecycleconfig-maxagerule"""
p_MaxCountRule: typing.Union['ApplicationMaxCountRule', dict] = attr.ib(
default=None,
converter=ApplicationMaxCountRule.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ApplicationMaxCountRule)),
metadata={AttrMeta.PROPERTY_NAME: "MaxCountRule"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationversionlifecycleconfig.html#cfn-elasticbeanstalk-application-applicationversionlifecycleconfig-maxcountrule"""
@attr.s
class ApplicationApplicationResourceLifecycleConfig(Property):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Application.ApplicationResourceLifecycleConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationresourcelifecycleconfig.html
Property Document:
- ``p_ServiceRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationresourcelifecycleconfig.html#cfn-elasticbeanstalk-application-applicationresourcelifecycleconfig-servicerole
- ``p_VersionLifecycleConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationresourcelifecycleconfig.html#cfn-elasticbeanstalk-application-applicationresourcelifecycleconfig-versionlifecycleconfig
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Application.ApplicationResourceLifecycleConfig"
p_ServiceRole: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ServiceRole"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationresourcelifecycleconfig.html#cfn-elasticbeanstalk-application-applicationresourcelifecycleconfig-servicerole"""
p_VersionLifecycleConfig: typing.Union['ApplicationApplicationVersionLifecycleConfig', dict] = attr.ib(
default=None,
converter=ApplicationApplicationVersionLifecycleConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ApplicationApplicationVersionLifecycleConfig)),
metadata={AttrMeta.PROPERTY_NAME: "VersionLifecycleConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticbeanstalk-application-applicationresourcelifecycleconfig.html#cfn-elasticbeanstalk-application-applicationresourcelifecycleconfig-versionlifecycleconfig"""
#--- Resource declaration ---
@attr.s
class ConfigurationTemplate(Resource):
"""
AWS Object Type = "AWS::ElasticBeanstalk::ConfigurationTemplate"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html
Property Document:
- ``rp_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-applicationname
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-description
- ``p_EnvironmentId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-environmentid
- ``p_OptionSettings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-optionsettings
- ``p_PlatformArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-platformarn
- ``p_SolutionStackName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-solutionstackname
- ``p_SourceConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-sourceconfiguration
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::ConfigurationTemplate"
rp_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-applicationname"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-description"""
p_EnvironmentId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EnvironmentId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-environmentid"""
p_OptionSettings: typing.List[typing.Union['ConfigurationTemplateConfigurationOptionSetting', dict]] = attr.ib(
default=None,
converter=ConfigurationTemplateConfigurationOptionSetting.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(ConfigurationTemplateConfigurationOptionSetting), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "OptionSettings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-optionsettings"""
p_PlatformArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PlatformArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-platformarn"""
p_SolutionStackName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SolutionStackName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-solutionstackname"""
p_SourceConfiguration: typing.Union['ConfigurationTemplateSourceConfiguration', dict] = attr.ib(
default=None,
converter=ConfigurationTemplateSourceConfiguration.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ConfigurationTemplateSourceConfiguration)),
metadata={AttrMeta.PROPERTY_NAME: "SourceConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticbeanstalk-configurationtemplate.html#cfn-elasticbeanstalk-configurationtemplate-sourceconfiguration"""
@attr.s
class Application(Resource):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Application"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html
Property Document:
- ``p_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html#cfn-elasticbeanstalk-application-name
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html#cfn-elasticbeanstalk-application-description
- ``p_ResourceLifecycleConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html#cfn-elasticbeanstalk-application-resourcelifecycleconfig
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Application"
p_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html#cfn-elasticbeanstalk-application-name"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html#cfn-elasticbeanstalk-application-description"""
p_ResourceLifecycleConfig: typing.Union['ApplicationApplicationResourceLifecycleConfig', dict] = attr.ib(
default=None,
converter=ApplicationApplicationResourceLifecycleConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(ApplicationApplicationResourceLifecycleConfig)),
metadata={AttrMeta.PROPERTY_NAME: "ResourceLifecycleConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk.html#cfn-elasticbeanstalk-application-resourcelifecycleconfig"""
@attr.s
class Environment(Resource):
"""
AWS Object Type = "AWS::ElasticBeanstalk::Environment"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html
Property Document:
- ``rp_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-applicationname
- ``p_CNAMEPrefix``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-cnameprefix
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-description
- ``p_EnvironmentName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-name
- ``p_OperationsRole``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-operations-role
- ``p_OptionSettings``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-optionsettings
- ``p_PlatformArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn
- ``p_SolutionStackName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-solutionstackname
- ``p_TemplateName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-templatename
- ``p_Tier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-tier
- ``p_VersionLabel``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-versionlabel
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-elasticbeanstalk-environment-tags
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::Environment"
rp_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-applicationname"""
p_CNAMEPrefix: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "CNAMEPrefix"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-cnameprefix"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-description"""
p_EnvironmentName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "EnvironmentName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-name"""
p_OperationsRole: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "OperationsRole"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-operations-role"""
p_OptionSettings: typing.List[typing.Union['EnvironmentOptionSetting', dict]] = attr.ib(
default=None,
converter=EnvironmentOptionSetting.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(EnvironmentOptionSetting), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "OptionSettings"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-optionsettings"""
p_PlatformArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "PlatformArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn"""
p_SolutionStackName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SolutionStackName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-solutionstackname"""
p_TemplateName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "TemplateName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-templatename"""
p_Tier: typing.Union['EnvironmentTier', dict] = attr.ib(
default=None,
converter=EnvironmentTier.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(EnvironmentTier)),
metadata={AttrMeta.PROPERTY_NAME: "Tier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-tier"""
p_VersionLabel: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "VersionLabel"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-versionlabel"""
p_Tags: typing.List[typing.Union[Tag, dict]] = attr.ib(
default=None,
converter=Tag.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(Tag), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-elasticbeanstalk-environment-tags"""
@property
def rv_EndpointURL(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#aws-properties-beanstalk-environment-return-values"""
return GetAtt(resource=self, attr_name="EndpointURL")
@attr.s
class ApplicationVersion(Resource):
"""
AWS Object Type = "AWS::ElasticBeanstalk::ApplicationVersion"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html
Property Document:
- ``rp_ApplicationName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html#cfn-elasticbeanstalk-applicationversion-applicationname
- ``rp_SourceBundle``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html#cfn-elasticbeanstalk-applicationversion-sourcebundle
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html#cfn-elasticbeanstalk-applicationversion-description
"""
AWS_OBJECT_TYPE = "AWS::ElasticBeanstalk::ApplicationVersion"
rp_ApplicationName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ApplicationName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html#cfn-elasticbeanstalk-applicationversion-applicationname"""
rp_SourceBundle: typing.Union['ApplicationVersionSourceBundle', dict] = attr.ib(
default=None,
converter=ApplicationVersionSourceBundle.from_dict,
validator=attr.validators.instance_of(ApplicationVersionSourceBundle),
metadata={AttrMeta.PROPERTY_NAME: "SourceBundle"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html#cfn-elasticbeanstalk-applicationversion-sourcebundle"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-version.html#cfn-elasticbeanstalk-applicationversion-description"""
| 66.543363
| 272
| 0.781791
| 3,759
| 37,597
| 7.725725
| 0.032189
| 0.031404
| 0.04318
| 0.066733
| 0.919734
| 0.918322
| 0.892152
| 0.850212
| 0.850212
| 0.850212
| 0
| 0.000618
| 0.096577
| 37,597
| 564
| 273
| 66.661348
| 0.854384
| 0.341463
| 0
| 0.506494
| 0
| 0
| 0.100966
| 0.064179
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003247
| false
| 0
| 0.012987
| 0
| 0.266234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
08c306aac02f4ce492686b32f0d28de7dc128a2b
| 135
|
py
|
Python
|
docs/00.Python/demo_pacages/test.py
|
mheanng/PythonNote
|
e3e5ede07968fab0a45f6ac4db96e62092c17026
|
[
"Apache-2.0"
] | null | null | null |
docs/00.Python/demo_pacages/test.py
|
mheanng/PythonNote
|
e3e5ede07968fab0a45f6ac4db96e62092c17026
|
[
"Apache-2.0"
] | null | null | null |
docs/00.Python/demo_pacages/test.py
|
mheanng/PythonNote
|
e3e5ede07968fab0a45f6ac4db96e62092c17026
|
[
"Apache-2.0"
] | null | null | null |
from p1 import *
import p1.m1
import p1.m2
import p1.m3
import p1.m4
p1.m4.mm_main()
import p1.pp1.a1
import p1.pp1.a2
import p1.pp1.a3
| 15
| 16
| 0.755556
| 31
| 135
| 3.258065
| 0.419355
| 0.554455
| 0.326733
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17094
| 0.133333
| 135
| 9
| 17
| 15
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.888889
| 0
| 0.888889
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
08d4998270cd048e4219ae306f1efe90e6f9fa97
| 471
|
py
|
Python
|
pymoo/model/repair.py
|
gabicavalcante/pymoo
|
1711ce3a96e5ef622d0116d6c7ea4d26cbe2c846
|
[
"Apache-2.0"
] | 762
|
2018-06-05T20:56:09.000Z
|
2021-09-14T09:09:42.000Z
|
pymoo/model/repair.py
|
gabicavalcante/pymoo
|
1711ce3a96e5ef622d0116d6c7ea4d26cbe2c846
|
[
"Apache-2.0"
] | 176
|
2018-09-05T18:37:05.000Z
|
2021-09-14T01:18:43.000Z
|
pymoo/model/repair.py
|
gabicavalcante/pymoo
|
1711ce3a96e5ef622d0116d6c7ea4d26cbe2c846
|
[
"Apache-2.0"
] | 160
|
2018-08-05T05:31:20.000Z
|
2021-09-14T09:09:45.000Z
|
from abc import abstractmethod
class Repair:
"""
This class is allows to repair individuals after crossover if necessary.
"""
def do(self, problem, pop, **kwargs):
return self._do(problem, pop, **kwargs)
@abstractmethod
def _do(self, problem, pop, **kwargs):
pass
class NoRepair(Repair):
"""
A dummy class which can be used to simply do no repair.
"""
def do(self, problem, pop, **kwargs):
return pop
| 20.478261
| 76
| 0.622081
| 60
| 471
| 4.85
| 0.533333
| 0.137457
| 0.219931
| 0.164948
| 0.298969
| 0.298969
| 0.213058
| 0
| 0
| 0
| 0
| 0
| 0.271762
| 471
| 23
| 77
| 20.478261
| 0.848397
| 0.271762
| 0
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0.1
| 0.1
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
08d6a904273735038ed32023270f404f4f8e51d6
| 189
|
py
|
Python
|
Server/Python/src/dbs/dao/MySQL/BlockSite/Insert.py
|
vkuznet/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 8
|
2015-08-14T04:01:32.000Z
|
2021-06-03T00:56:42.000Z
|
Server/Python/src/dbs/dao/MySQL/BlockSite/Insert.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 162
|
2015-01-07T21:34:47.000Z
|
2021-10-13T09:42:41.000Z
|
Server/Python/src/dbs/dao/MySQL/BlockSite/Insert.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 16
|
2015-01-22T15:27:29.000Z
|
2021-04-28T09:23:28.000Z
|
#!/usr/bin/env python
""" DAO Object for BlockSite table """
from dbs.dao.Oracle.BlockSite.Insert import Insert as OraBlockSiteInsert
class Insert(OraBlockSiteInsert):
pass
| 21
| 72
| 0.724868
| 23
| 189
| 5.956522
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179894
| 189
| 8
| 73
| 23.625
| 0.883871
| 0.275132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
08f59b4aefa6a378acc15555e4c9db4ae67fc70b
| 27
|
py
|
Python
|
awesomo.py
|
buurz-forks/awesomo
|
31b64812df0600df3b4ab73061c1863e118f2496
|
[
"CC0-1.0"
] | null | null | null |
awesomo.py
|
buurz-forks/awesomo
|
31b64812df0600df3b4ab73061c1863e118f2496
|
[
"CC0-1.0"
] | null | null | null |
awesomo.py
|
buurz-forks/awesomo
|
31b64812df0600df3b4ab73061c1863e118f2496
|
[
"CC0-1.0"
] | null | null | null |
print("Hello, AWESOME-O!")
| 13.5
| 26
| 0.666667
| 4
| 27
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 27
| 1
| 27
| 27
| 0.72
| 0
| 0
| 0
| 0
| 0
| 0.62963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
08f5cb69b24666e8de90ff6526ea128b828f1d54
| 117
|
py
|
Python
|
goto/gotomagic/text/__init__.py
|
technocake/goto
|
3bd2997ccd7ef4ee9bcf4f9b72b1e13f11738faf
|
[
"MIT"
] | 10
|
2017-11-05T16:12:08.000Z
|
2021-10-05T08:18:12.000Z
|
goto/gotomagic/text/__init__.py
|
technocake/goto
|
3bd2997ccd7ef4ee9bcf4f9b72b1e13f11738faf
|
[
"MIT"
] | 81
|
2017-11-05T20:22:27.000Z
|
2021-11-12T12:40:50.000Z
|
goto/gotomagic/text/__init__.py
|
technocake/goto
|
3bd2997ccd7ef4ee9bcf4f9b72b1e13f11738faf
|
[
"MIT"
] | 3
|
2017-12-14T18:20:52.000Z
|
2019-05-24T09:37:56.000Z
|
# -*- coding: utf-8 -*-
"""
Text used by GOTO to do UX.
"""
from .text import GotoError, GotoWarning, print_text
| 19.5
| 52
| 0.632479
| 17
| 117
| 4.294118
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.205128
| 117
| 6
| 52
| 19.5
| 0.774194
| 0.42735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
3ea99f43b23d014aab95704d96534c05fd09a13c
| 41
|
py
|
Python
|
boo/okved/__init__.py
|
vishalbelsare/boo
|
96d08857abd790bc44f48256e7be7da130543a84
|
[
"MIT"
] | 14
|
2019-07-06T13:00:54.000Z
|
2021-09-14T16:05:23.000Z
|
boo/okved/__init__.py
|
vishalbelsare/boo
|
96d08857abd790bc44f48256e7be7da130543a84
|
[
"MIT"
] | 31
|
2019-07-05T09:31:40.000Z
|
2021-08-03T21:16:56.000Z
|
boo/okved/__init__.py
|
vishalbelsare/boo
|
96d08857abd790bc44f48256e7be7da130543a84
|
[
"MIT"
] | 4
|
2019-12-06T21:13:11.000Z
|
2021-03-12T10:11:18.000Z
|
from .okved import all_codes_v2, name_v2
| 20.5
| 40
| 0.829268
| 8
| 41
| 3.875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.121951
| 41
| 1
| 41
| 41
| 0.805556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3eae04857448ba499723c07972f80a557e0b8d22
| 29
|
py
|
Python
|
devel/.private/hector_uav_msgs/lib/python2.7/dist-packages/hector_uav_msgs/srv/__init__.py
|
arijitnoobstar/UAVProjectileCatcher
|
3c1bed80df167192cb4b971b58c891187628142e
|
[
"Apache-2.0"
] | 10
|
2021-03-15T03:58:06.000Z
|
2021-12-30T15:33:38.000Z
|
Chapter_7_code/devel/.private/hector_uav_msgs/lib/python2.7/dist-packages/hector_uav_msgs/srv/__init__.py
|
crepuscularlight/ROSbyExample
|
fa7b1a60cacca9b1034e318a2ac16ce4c8530d7c
|
[
"MIT"
] | 1
|
2021-09-09T15:29:31.000Z
|
2021-09-09T15:29:31.000Z
|
Chapter_7_code/devel/.private/hector_uav_msgs/lib/python2.7/dist-packages/hector_uav_msgs/srv/__init__.py
|
crepuscularlight/ROSbyExample
|
fa7b1a60cacca9b1034e318a2ac16ce4c8530d7c
|
[
"MIT"
] | 4
|
2021-03-06T09:35:58.000Z
|
2021-05-24T14:34:11.000Z
|
from ._EnableMotors import *
| 14.5
| 28
| 0.793103
| 3
| 29
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ec0dd173342080588eb543eccb5ca429728c8ef
| 41
|
py
|
Python
|
pyresx/__init__.py
|
cola314/pyresx
|
447431255a2781b4d0b1d4d1ecf8b11b32de3075
|
[
"MIT"
] | 3
|
2021-03-17T23:08:51.000Z
|
2021-07-26T04:41:19.000Z
|
pyresx/__init__.py
|
cola314/pyresx
|
447431255a2781b4d0b1d4d1ecf8b11b32de3075
|
[
"MIT"
] | null | null | null |
pyresx/__init__.py
|
cola314/pyresx
|
447431255a2781b4d0b1d4d1ecf8b11b32de3075
|
[
"MIT"
] | 1
|
2021-07-14T16:49:41.000Z
|
2021-07-14T16:49:41.000Z
|
from pyresx.ResXWriter import ResXWriter
| 20.5
| 40
| 0.878049
| 5
| 41
| 7.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3ecb119bff169758a9e83acc05a1ba916048bce1
| 141
|
py
|
Python
|
openprocurement/auctions/core/plugins/awarding/v3/tests/migration.py
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
52bd59f193f25e4997612fca0f87291decf06966
|
[
"Apache-2.0"
] | 2
|
2016-09-15T20:17:43.000Z
|
2017-01-08T03:32:43.000Z
|
openprocurement/auctions/core/plugins/awarding/v3/tests/migration.py
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
52bd59f193f25e4997612fca0f87291decf06966
|
[
"Apache-2.0"
] | 183
|
2017-12-21T11:04:37.000Z
|
2019-03-27T08:14:34.000Z
|
openprocurement/auctions/core/plugins/awarding/v3/tests/migration.py
|
EBRD-ProzorroSale/openprocurement.auctions.core
|
52bd59f193f25e4997612fca0f87291decf06966
|
[
"Apache-2.0"
] | 12
|
2016-09-05T12:07:48.000Z
|
2019-02-26T09:24:17.000Z
|
from zope import deprecation
deprecation.moved('openprocurement.auctions.core.tests.plugins.awarding.v3.tests.migration', 'version update')
| 35.25
| 110
| 0.829787
| 17
| 141
| 6.882353
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007519
| 0.056738
| 141
| 3
| 111
| 47
| 0.87218
| 0
| 0
| 0
| 0
| 0
| 0.602837
| 0.503546
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
41081088f29849d8763c958db4b2478b4d4eb8be
| 42
|
py
|
Python
|
study-hall/__main__.py
|
matt-ketk/study-hall
|
6a6837278daefb336643aca7b203c41cab5debcb
|
[
"MIT"
] | null | null | null |
study-hall/__main__.py
|
matt-ketk/study-hall
|
6a6837278daefb336643aca7b203c41cab5debcb
|
[
"MIT"
] | null | null | null |
study-hall/__main__.py
|
matt-ketk/study-hall
|
6a6837278daefb336643aca7b203c41cab5debcb
|
[
"MIT"
] | null | null | null |
import sys
from .classmodule import MyCla
| 14
| 30
| 0.833333
| 6
| 42
| 5.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 42
| 2
| 31
| 21
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4112ed0df1dc96a017ed171a1c78cb6cc588602b
| 43
|
py
|
Python
|
je_auto_control/osx/listener/__init__.py
|
JE-Chen/Python_JEAutoControl
|
477bf9612e28e9ab6d0a8e269db2f699e50a3744
|
[
"MIT"
] | 9
|
2020-10-12T06:33:36.000Z
|
2021-09-13T07:07:36.000Z
|
je_auto_control/osx/listener/__init__.py
|
JE-Chen/Python_JEAutoControl
|
477bf9612e28e9ab6d0a8e269db2f699e50a3744
|
[
"MIT"
] | null | null | null |
je_auto_control/osx/listener/__init__.py
|
JE-Chen/Python_JEAutoControl
|
477bf9612e28e9ab6d0a8e269db2f699e50a3744
|
[
"MIT"
] | null | null | null |
from je_auto_control.osx.listener import *
| 21.5
| 42
| 0.837209
| 7
| 43
| 4.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
eb0e76f7a7798eea817a72f7b3b6cb642361a51a
| 125
|
py
|
Python
|
almanak/file/__init__.py
|
clausjuhl/almanak
|
e29f98e2ebc7150930602b9dccb222354954fdc8
|
[
"MIT"
] | null | null | null |
almanak/file/__init__.py
|
clausjuhl/almanak
|
e29f98e2ebc7150930602b9dccb222354954fdc8
|
[
"MIT"
] | 1
|
2021-04-30T20:58:01.000Z
|
2021-04-30T20:58:01.000Z
|
almanak/file/__init__.py
|
almanak/almanak-module
|
e29f98e2ebc7150930602b9dccb222354954fdc8
|
[
"MIT"
] | null | null | null |
from almanak.file import compress, decompress, extract, fileinfo
__ALL__ = ['compress', 'decompress', 'extract', 'fileinfo']
| 41.666667
| 64
| 0.752
| 13
| 125
| 6.923077
| 0.692308
| 0.4
| 0.555556
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104
| 125
| 3
| 65
| 41.666667
| 0.803571
| 0
| 0
| 0
| 0
| 0
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
eb3f28657517706802ade37f4e397260967b11eb
| 21,438
|
py
|
Python
|
retrograph/training/preprocessors.py
|
ai-nikolai/Retrograph-1
|
54bd534d47218ca437c422a1abe5b1e995f55d71
|
[
"Apache-2.0"
] | 14
|
2020-06-01T14:22:43.000Z
|
2022-02-01T15:46:13.000Z
|
retrograph/training/preprocessors.py
|
ai-nikolai/Retrograph-1
|
54bd534d47218ca437c422a1abe5b1e995f55d71
|
[
"Apache-2.0"
] | 8
|
2020-12-17T08:23:46.000Z
|
2021-11-10T14:59:06.000Z
|
retrograph/training/preprocessors.py
|
ai-nikolai/Retrograph-1
|
54bd534d47218ca437c422a1abe5b1e995f55d71
|
[
"Apache-2.0"
] | 5
|
2020-06-01T14:21:17.000Z
|
2021-10-01T09:47:38.000Z
|
#####################################################
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
# Copyright 2019 Anne Lauscher, Nikolai Rozanov, Olga Majewska, Leonardo Ribeiro, Goran Glavas
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
####################################################
####################################################
# IMPORT STATEMENTS
####################################################
# >>>>>> Native Imports <<<<<<<
import os
# >>>>>> Package Imports <<<<<<<
import tensorflow as tf
import csv
# >>>>>> Local Imports <<<<<<<
from retrograph.models import tokenization
####################################################
# CODE
####################################################
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, data_dir):
"""Gets a collection of `InputExample`s for prediction."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
class XnliProcessor(DataProcessor):
"""Processor for the XNLI data set."""
def __init__(self):
self.language = "zh"
def get_train_examples(self, data_dir):
"""See base class."""
lines = self._read_tsv(
os.path.join(data_dir, "multinli",
"multinli.train.%s.tsv" % self.language))
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "train-%d" % (i)
text_a = tokenization.convert_to_unicode(line[0])
text_b = tokenization.convert_to_unicode(line[1])
label = tokenization.convert_to_unicode(line[2])
if label == tokenization.convert_to_unicode("contradictory"):
label = tokenization.convert_to_unicode("contradiction")
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def get_dev_examples(self, data_dir):
"""See base class."""
lines = self._read_tsv(os.path.join(data_dir, "xnli.dev.tsv"))
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "dev-%d" % (i)
language = tokenization.convert_to_unicode(line[0])
if language != tokenization.convert_to_unicode(self.language):
continue
text_a = tokenization.convert_to_unicode(line[6])
text_b = tokenization.convert_to_unicode(line[7])
label = tokenization.convert_to_unicode(line[1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
class MnliProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir, matched=True):
"""See base class."""
if matched:
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")),
"dev_matched")
else:
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev_mismatched.tsv")),
"dev_mismatched")
def get_test_examples(self, data_dir, matched=True):
"""See base class."""
if matched:
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test_matched.tsv")), "test")
else:
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test_mismatched.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[8])
text_b = tokenization.convert_to_unicode(line[9])
if set_type == "test":
label = "contradiction"
else:
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class DiagnosticProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "diagnostic.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
if set_type == "test":
label = "contradiction"
else:
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class WNLIProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")),
"dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
if set_type == "test":
label = "0"
else:
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class MrpcProcessor(DataProcessor):
"""Processor for the MRPC data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
text_a = tokenization.convert_to_unicode(line[3])
text_b = tokenization.convert_to_unicode(line[4])
if set_type == "test":
label = "0"
else:
label = tokenization.convert_to_unicode(line[0])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class ColaProcessor(DataProcessor):
"""Processor for the CoLA data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
# Only the test set has a header
if set_type == "test" and i == 0:
continue
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[3])
label = tokenization.convert_to_unicode(line[1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
'''Added by Anne'''
class SST2Processor(DataProcessor):
"""Processor for the CoLA data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[0])
label = tokenization.convert_to_unicode(line[1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
class QQPProcessor(DataProcessor):
"""Processor for the QQP data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = "0"
else:
if len(line) != 6:
# there is a problematic line
print(line)
continue
text_a = tokenization.convert_to_unicode(line[3])
text_b = tokenization.convert_to_unicode(line[4])
label = tokenization.convert_to_unicode(line[5])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class QNLIProcessor(DataProcessor):
"""Processor for the QQP data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["entailment", "not_entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
if set_type == "test":
label = "entailment"
else:
label = tokenization.convert_to_unicode(line[3])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class STSBProcessor(DataProcessor):
"""Processor for the STS-B data set."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
if set_type == 'test':
text_a = tokenization.convert_to_unicode(line[-2])
text_b = tokenization.convert_to_unicode(line[-1])
label = 0.0
else:
text_a = tokenization.convert_to_unicode(line[-3])
text_b = tokenization.convert_to_unicode(line[-2])
label = float(line[-1])
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class RTEProcessor(DataProcessor):
"""Processor for the RTE data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["entailment", "not_entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
if set_type == "test":
label = "entailment"
else:
label = tokenization.convert_to_unicode(line[3])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class SICKEntailmentProcessor(DataProcessor):
"""Processor for the SICK data set (SentEval version)."""
def loadFile(self, fpath):
skipFirstLine = True
sick_data = {'X_A': [], 'X_B': [], 'y': []}
with os.open(fpath, 'r', encoding='utf-8') as f:
for line in f:
if skipFirstLine:
skipFirstLine = False
else:
text = line.strip().split('\t')
sick_data['X_A'].append(text[1].split())
sick_data['X_B'].append(text[2].split())
sick_data['y'].append(text[4])
return sick_data
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(self.loadFile(os.path.join(data_dir, 'SICK_train.txt')), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(self.loadFile(os.path.join(data_dir, 'SICK_trial.txt')), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(self.loadFile(os.path.join(data_dir, 'SICK_test_annotated.txt')), "test")
def get_labels(self):
"""See base class."""
return ['CONTRADICTION', 'NEUTRAL', 'ENTAILMENT']
def _create_examples(self, dicts, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, dict) in enumerate(dicts):
guid = "%s-%s" % (set_type, str(i))
text_a = tokenization.convert_to_unicode(dict['X_A'])
text_b = tokenization.convert_to_unicode(dict['X_B'])
label = tokenization.convert_to_unicode(dict['y'])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class TRECProcessor(DataProcessor):
"""Processor for the TREC data set (SentEval version)."""
def loadFile(self, fpath):
trec_data = {'X': [], 'y': []}
with os.open(fpath, 'r', encoding='latin-1') as f:
for line in f:
target, sample = line.strip().split(':', 1)
sample = sample.split(' ', 1)[1].split()
trec_data['X'].append(sample)
trec_data['y'].append(target)
return trec_data
def get_train_examples(self, data_dir):
"""See base class."""
data = self.loadFile(os.path.join(data_dir, 'train_5500.label'))
split_index = len(data)*0.7
return self._create_examples(data[:split_index], "train")
def get_dev_examples(self, data_dir):
"""See base class."""
data = self.loadFile(os.path.join(data_dir, 'train_5500.label'))
split_index = len(data)*0.7
return self._create_examples(data[split_index:], "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(self.loadFile(os.path.join(data_dir, 'TREC_10.label')), "test")
def get_labels(self):
"""See base class."""
return ['ABBR', 'DESC', 'ENTY', 'HUM', 'LOC', 'NUM']
def _create_examples(self, dicts, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, dict) in enumerate(dicts):
guid = "%s-%s" % (set_type, str(i))
text_a = tokenization.convert_to_unicode(dict['X'])
label = tokenization.convert_to_unicode(dict['y'])
examples.append(
InputExample(guid=guid, text_a=text_a, label=label))
return examples
####################################################
# MAIN
####################################################
# EOF
| 33.496875
| 106
| 0.638073
| 2,864
| 21,438
| 4.563198
| 0.090782
| 0.078965
| 0.088377
| 0.117836
| 0.799602
| 0.785906
| 0.772133
| 0.754228
| 0.740608
| 0.730278
| 0
| 0.006694
| 0.205663
| 21,438
| 639
| 107
| 33.549296
| 0.760761
| 0.170911
| 0
| 0.746929
| 0
| 0
| 0.060811
| 0.002608
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169533
| false
| 0
| 0.009828
| 0
| 0.375921
| 0.002457
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
de336f4d11fd522e369219d283cb262644772035
| 20
|
py
|
Python
|
src/__init__.py
|
btrnt/butternut_backend
|
fa4639ad3c7e3e5a1674f391c751880dbd7311ee
|
[
"Apache-2.0"
] | null | null | null |
src/__init__.py
|
btrnt/butternut_backend
|
fa4639ad3c7e3e5a1674f391c751880dbd7311ee
|
[
"Apache-2.0"
] | null | null | null |
src/__init__.py
|
btrnt/butternut_backend
|
fa4639ad3c7e3e5a1674f391c751880dbd7311ee
|
[
"Apache-2.0"
] | null | null | null |
from .gltr import *
| 10
| 19
| 0.7
| 3
| 20
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 20
| 1
| 20
| 20
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de49be5c731b379d7e42ccc304c2ff995cf25d32
| 90
|
py
|
Python
|
cubes/__init__.py
|
lmjohns3/cube-experiment
|
ab6d1a9df95efebc369d184ab1c748d73d5c3313
|
[
"MIT"
] | null | null | null |
cubes/__init__.py
|
lmjohns3/cube-experiment
|
ab6d1a9df95efebc369d184ab1c748d73d5c3313
|
[
"MIT"
] | null | null | null |
cubes/__init__.py
|
lmjohns3/cube-experiment
|
ab6d1a9df95efebc369d184ab1c748d73d5c3313
|
[
"MIT"
] | null | null | null |
from .database import Experiment, Movement, Trial
from . import plots
from . import utils
| 22.5
| 49
| 0.788889
| 12
| 90
| 5.916667
| 0.666667
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 90
| 3
| 50
| 30
| 0.934211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
de53b7d985455ab72f9873d1114a71d97a573093
| 199
|
py
|
Python
|
src/schemathesis/runner/impl/__init__.py
|
gluhar2006/schemathesis
|
3cb6b0b4f5d93242da1f2e79575b6b7b3b7a63d1
|
[
"MIT"
] | 659
|
2020-09-03T13:27:50.000Z
|
2022-03-31T17:07:16.000Z
|
src/schemathesis/runner/impl/__init__.py
|
gluhar2006/schemathesis
|
3cb6b0b4f5d93242da1f2e79575b6b7b3b7a63d1
|
[
"MIT"
] | 651
|
2019-08-23T09:16:35.000Z
|
2020-09-02T08:30:10.000Z
|
src/schemathesis/runner/impl/__init__.py
|
gluhar2006/schemathesis
|
3cb6b0b4f5d93242da1f2e79575b6b7b3b7a63d1
|
[
"MIT"
] | 66
|
2020-09-05T07:09:03.000Z
|
2022-03-17T08:17:55.000Z
|
from .core import BaseRunner
from .solo import SingleThreadASGIRunner, SingleThreadRunner, SingleThreadWSGIRunner
from .threadpool import ThreadPoolASGIRunner, ThreadPoolRunner, ThreadPoolWSGIRunner
| 49.75
| 84
| 0.884422
| 16
| 199
| 11
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080402
| 199
| 3
| 85
| 66.333333
| 0.961749
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7205413d99b26bf52b71bbd9bf8fe81080939756
| 116
|
py
|
Python
|
overload/connectors/goo_settings/scopes.py
|
Mfgloger/overload
|
b34f88750d9272db268324807f7e8ba308940d1e
|
[
"MIT"
] | null | null | null |
overload/connectors/goo_settings/scopes.py
|
Mfgloger/overload
|
b34f88750d9272db268324807f7e8ba308940d1e
|
[
"MIT"
] | 58
|
2017-10-16T13:09:33.000Z
|
2022-01-10T15:35:58.000Z
|
overload/connectors/goo_settings/scopes.py
|
Mfgloger/overload
|
b34f88750d9272db268324807f7e8ba308940d1e
|
[
"MIT"
] | 2
|
2019-04-15T16:04:38.000Z
|
2020-11-03T17:58:33.000Z
|
SHEET_SCOPE = 'https://www.googleapis.com/auth/spreadsheets'
FDRIVE_SCOPE = 'https://www.googleapis.com/auth/drive'
| 38.666667
| 60
| 0.775862
| 16
| 116
| 5.5
| 0.625
| 0.227273
| 0.295455
| 0.522727
| 0.681818
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051724
| 116
| 2
| 61
| 58
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.698276
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
72150968d30e95d745fb3e4402026e6dd5f2dbe7
| 141
|
py
|
Python
|
tests/missing_data/test_missing_data_air_passengers_DiscardRow_Interpolate.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/missing_data/test_missing_data_air_passengers_DiscardRow_Interpolate.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/missing_data/test_missing_data_air_passengers_DiscardRow_Interpolate.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import tests.missing_data.test_missing_data_air_passengers_generic as gen
gen.test_air_passengers_missing_data('DiscardRow', 'Interpolate')
| 35.25
| 73
| 0.879433
| 20
| 141
| 5.7
| 0.6
| 0.289474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049645
| 141
| 3
| 74
| 47
| 0.850746
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
a0e110fff10fa9712a9f0bc71061820eb72b5e76
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/poetry/core/_vendor/tomlkit/parser.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/poetry/core/_vendor/tomlkit/parser.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/poetry/core/_vendor/tomlkit/parser.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/91/d9/5a/b639458555d29521c69f6e86669fd9f1c58fd54da9ad8ed8f7ad5a49d4
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 0
| 96
| 1
| 96
| 96
| 0.479167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a0f39ead98903d234151ddcb446e13e08044e851
| 49
|
py
|
Python
|
consolemenu/__init__.py
|
EgorAlmikeev/ConsoleMenu
|
e0e7845e9903a4f22ceda100a5fb194e8e567599
|
[
"MIT"
] | null | null | null |
consolemenu/__init__.py
|
EgorAlmikeev/ConsoleMenu
|
e0e7845e9903a4f22ceda100a5fb194e8e567599
|
[
"MIT"
] | null | null | null |
consolemenu/__init__.py
|
EgorAlmikeev/ConsoleMenu
|
e0e7845e9903a4f22ceda100a5fb194e8e567599
|
[
"MIT"
] | null | null | null |
from consolemenu.console_menu import ConsoleMenu
| 24.5
| 48
| 0.897959
| 6
| 49
| 7.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9d51e036a637bc87e3f1a3072cca7e076433431e
| 36
|
py
|
Python
|
__init__.py
|
esanzy87/codef-python
|
cd60a3b58781937c17f14732fb0dd546186c4cc8
|
[
"MIT"
] | 1
|
2020-04-23T07:19:22.000Z
|
2020-04-23T07:19:22.000Z
|
__init__.py
|
esanzy87/codef-python
|
cd60a3b58781937c17f14732fb0dd546186c4cc8
|
[
"MIT"
] | null | null | null |
__init__.py
|
esanzy87/codef-python
|
cd60a3b58781937c17f14732fb0dd546186c4cc8
|
[
"MIT"
] | null | null | null |
from .invoker import InvokerFactory
| 18
| 35
| 0.861111
| 4
| 36
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9d5da08bdec79cfdc75975ab4a140ed74fa28c9d
| 37,698
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int1/33.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int1/33.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int1/33.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 2314
passenger_arriving = (
(4, 9, 8, 4, 2, 0, 5, 6, 9, 2, 2, 0), # 0
(1, 7, 3, 4, 3, 0, 5, 6, 5, 4, 0, 0), # 1
(5, 4, 2, 2, 0, 0, 5, 2, 3, 5, 2, 0), # 2
(7, 7, 5, 4, 2, 0, 4, 11, 4, 4, 1, 0), # 3
(2, 5, 6, 1, 1, 0, 1, 11, 1, 2, 0, 0), # 4
(2, 4, 2, 4, 2, 0, 2, 7, 2, 2, 0, 0), # 5
(2, 2, 3, 1, 2, 0, 10, 9, 13, 0, 0, 0), # 6
(2, 2, 3, 5, 4, 0, 5, 7, 4, 5, 2, 0), # 7
(2, 6, 5, 4, 0, 0, 11, 6, 5, 5, 2, 0), # 8
(2, 1, 2, 2, 0, 0, 5, 5, 7, 3, 2, 0), # 9
(5, 2, 5, 4, 1, 0, 4, 3, 1, 5, 1, 0), # 10
(2, 5, 4, 4, 0, 0, 3, 7, 3, 1, 0, 0), # 11
(1, 5, 4, 3, 3, 0, 10, 6, 7, 8, 1, 0), # 12
(7, 4, 7, 1, 3, 0, 3, 10, 4, 3, 1, 0), # 13
(7, 3, 5, 3, 1, 0, 4, 4, 1, 2, 0, 0), # 14
(3, 7, 8, 2, 2, 0, 6, 7, 6, 2, 0, 0), # 15
(4, 10, 4, 1, 1, 0, 3, 8, 6, 9, 1, 0), # 16
(2, 7, 3, 4, 1, 0, 4, 7, 2, 4, 0, 0), # 17
(2, 4, 3, 0, 1, 0, 3, 4, 5, 3, 1, 0), # 18
(2, 9, 7, 1, 3, 0, 5, 10, 6, 5, 1, 0), # 19
(4, 6, 7, 3, 2, 0, 10, 9, 3, 3, 1, 0), # 20
(3, 3, 0, 0, 3, 0, 3, 9, 2, 5, 0, 0), # 21
(5, 12, 7, 1, 1, 0, 4, 7, 3, 5, 3, 0), # 22
(2, 4, 7, 2, 3, 0, 2, 6, 6, 4, 1, 0), # 23
(3, 7, 5, 4, 0, 0, 6, 7, 5, 6, 3, 0), # 24
(6, 1, 7, 4, 1, 0, 4, 4, 3, 5, 0, 0), # 25
(2, 3, 3, 1, 2, 0, 4, 6, 6, 2, 2, 0), # 26
(2, 5, 5, 2, 1, 0, 9, 11, 4, 3, 0, 0), # 27
(3, 5, 3, 2, 0, 0, 6, 5, 6, 4, 1, 0), # 28
(3, 5, 11, 1, 2, 0, 4, 3, 2, 2, 0, 0), # 29
(1, 4, 4, 1, 0, 0, 4, 5, 8, 3, 2, 0), # 30
(3, 6, 3, 3, 0, 0, 8, 7, 3, 3, 2, 0), # 31
(1, 5, 4, 2, 1, 0, 6, 4, 4, 1, 1, 0), # 32
(6, 6, 4, 6, 1, 0, 4, 5, 4, 3, 3, 0), # 33
(5, 5, 7, 5, 0, 0, 3, 6, 4, 4, 2, 0), # 34
(6, 6, 6, 3, 2, 0, 2, 9, 7, 6, 2, 0), # 35
(5, 5, 8, 1, 0, 0, 4, 2, 6, 5, 1, 0), # 36
(4, 8, 4, 6, 5, 0, 4, 8, 7, 7, 0, 0), # 37
(3, 6, 4, 7, 2, 0, 2, 14, 9, 3, 1, 0), # 38
(2, 4, 7, 1, 0, 0, 1, 6, 1, 2, 1, 0), # 39
(3, 4, 6, 2, 1, 0, 11, 10, 5, 5, 2, 0), # 40
(2, 7, 4, 1, 5, 0, 4, 10, 2, 6, 1, 0), # 41
(4, 6, 8, 6, 3, 0, 9, 7, 2, 3, 1, 0), # 42
(3, 9, 4, 1, 0, 0, 4, 7, 7, 3, 2, 0), # 43
(5, 3, 9, 3, 2, 0, 5, 2, 3, 4, 0, 0), # 44
(3, 9, 4, 4, 1, 0, 3, 6, 3, 5, 2, 0), # 45
(2, 6, 5, 1, 0, 0, 8, 5, 4, 4, 3, 0), # 46
(3, 2, 3, 6, 1, 0, 5, 6, 3, 3, 3, 0), # 47
(2, 3, 5, 2, 2, 0, 4, 11, 3, 3, 3, 0), # 48
(2, 4, 5, 5, 1, 0, 7, 6, 3, 5, 3, 0), # 49
(5, 3, 6, 2, 1, 0, 7, 13, 4, 4, 1, 0), # 50
(4, 6, 4, 3, 4, 0, 5, 7, 6, 4, 3, 0), # 51
(5, 6, 5, 3, 1, 0, 4, 4, 3, 3, 0, 0), # 52
(5, 9, 5, 3, 2, 0, 3, 6, 0, 9, 1, 0), # 53
(2, 5, 5, 3, 3, 0, 1, 4, 5, 3, 3, 0), # 54
(3, 8, 8, 2, 1, 0, 5, 5, 1, 4, 1, 0), # 55
(8, 6, 3, 2, 1, 0, 1, 6, 5, 6, 1, 0), # 56
(2, 11, 5, 2, 3, 0, 4, 5, 4, 8, 1, 0), # 57
(2, 9, 5, 4, 5, 0, 2, 3, 5, 3, 2, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(2.649651558384548, 6.796460700757575, 7.9942360218509, 6.336277173913043, 7.143028846153846, 4.75679347826087), # 0
(2.6745220100478, 6.872041598712823, 8.037415537524994, 6.371564387077295, 7.196566506410256, 4.7551721391908215), # 1
(2.699108477221734, 6.946501402918069, 8.07957012282205, 6.406074879227053, 7.248974358974359, 4.753501207729468), # 2
(2.72339008999122, 7.019759765625, 8.120668982969152, 6.4397792119565205, 7.300204326923078, 4.7517809103260875), # 3
(2.747345978441128, 7.091736339085298, 8.160681323193373, 6.472647946859904, 7.350208333333334, 4.750011473429951), # 4
(2.7709552726563262, 7.162350775550646, 8.199576348721793, 6.504651645531401, 7.39893830128205, 4.748193123490338), # 5
(2.794197102721686, 7.231522727272727, 8.237323264781493, 6.535760869565218, 7.446346153846154, 4.746326086956522), # 6
(2.817050598722076, 7.299171846503226, 8.273891276599542, 6.565946180555556, 7.492383814102565, 4.744410590277778), # 7
(2.8394948907423667, 7.365217785493826, 8.309249589403029, 6.595178140096618, 7.537003205128205, 4.7424468599033816), # 8
(2.8615091088674274, 7.429580196496212, 8.343367408419024, 6.623427309782609, 7.580156249999999, 4.740435122282609), # 9
(2.8830723831821286, 7.492178731762065, 8.376213938874606, 6.65066425120773, 7.621794871794872, 4.738375603864734), # 10
(2.9041638437713395, 7.55293304354307, 8.407758385996857, 6.676859525966184, 7.661870993589743, 4.736268531099034), # 11
(2.92476262071993, 7.611762784090908, 8.437969955012854, 6.7019836956521734, 7.700336538461538, 4.734114130434782), # 12
(2.944847844112769, 7.668587605657268, 8.46681785114967, 6.726007321859903, 7.737143429487181, 4.731912628321256), # 13
(2.9643986440347283, 7.723327160493828, 8.494271279634388, 6.748900966183574, 7.772243589743589, 4.729664251207729), # 14
(2.9833941505706756, 7.775901100852272, 8.520299445694086, 6.770635190217391, 7.8055889423076925, 4.7273692255434785), # 15
(3.001813493805482, 7.826229078984287, 8.544871554555842, 6.791180555555555, 7.8371314102564105, 4.725027777777778), # 16
(3.019635803824017, 7.874230747141554, 8.567956811446729, 6.810507623792271, 7.866822916666667, 4.722640134359904), # 17
(3.03684021071115, 7.919825757575757, 8.589524421593831, 6.82858695652174, 7.894615384615387, 4.72020652173913), # 18
(3.053405844551751, 7.962933762538579, 8.609543590224222, 6.845389115338164, 7.9204607371794875, 4.717727166364734), # 19
(3.0693118354306894, 8.003474414281705, 8.62798352256498, 6.860884661835749, 7.944310897435898, 4.71520229468599), # 20
(3.084537313432836, 8.041367365056816, 8.644813423843189, 6.875044157608696, 7.9661177884615375, 4.712632133152174), # 21
(3.099061408643059, 8.076532267115601, 8.660002499285918, 6.887838164251208, 7.985833333333332, 4.710016908212561), # 22
(3.1128632511462295, 8.108888772709737, 8.673519954120252, 6.899237243357488, 8.003409455128205, 4.707356846316426), # 23
(3.125921971027217, 8.138356534090908, 8.685334993573264, 6.909211956521739, 8.018798076923076, 4.704652173913043), # 24
(3.1382166983708903, 8.164855203510802, 8.695416822872037, 6.917732865338165, 8.03195112179487, 4.701903117451691), # 25
(3.1497265632621207, 8.188304433221099, 8.703734647243644, 6.9247705314009655, 8.042820512820512, 4.699109903381642), # 26
(3.160430695785777, 8.208623875473483, 8.710257671915166, 6.930295516304349, 8.051358173076924, 4.696272758152174), # 27
(3.1703082260267292, 8.22573318251964, 8.714955102113683, 6.934278381642512, 8.057516025641025, 4.69339190821256), # 28
(3.1793382840698468, 8.239552006611252, 8.717796143066266, 6.936689689009662, 8.061245993589743, 4.690467580012077), # 29
(3.1875, 8.25, 8.71875, 6.9375, 8.0625, 4.6875), # 30
(3.1951370284526854, 8.258678799715907, 8.718034948671496, 6.937353656045752, 8.062043661347518, 4.683376259786773), # 31
(3.202609175191816, 8.267242897727273, 8.715910024154589, 6.93691748366013, 8.06068439716312, 4.677024758454107), # 32
(3.2099197969948845, 8.275691228693182, 8.712405570652175, 6.936195772058824, 8.058436835106383, 4.66850768365817), # 33
(3.217072250639386, 8.284022727272728, 8.70755193236715, 6.935192810457517, 8.05531560283688, 4.657887223055139), # 34
(3.224069892902813, 8.292236328124998, 8.701379453502415, 6.933912888071895, 8.051335328014185, 4.645225564301183), # 35
(3.23091608056266, 8.300330965909092, 8.69391847826087, 6.932360294117648, 8.046510638297873, 4.630584895052474), # 36
(3.2376141703964194, 8.308305575284091, 8.68519935084541, 6.9305393178104575, 8.040856161347516, 4.614027402965184), # 37
(3.2441675191815853, 8.31615909090909, 8.675252415458937, 6.9284542483660125, 8.034386524822695, 4.595615275695485), # 38
(3.250579483695652, 8.323890447443182, 8.664108016304347, 6.926109375, 8.027116356382978, 4.57541070089955), # 39
(3.2568534207161126, 8.331498579545455, 8.651796497584542, 6.923508986928105, 8.019060283687942, 4.5534758662335495), # 40
(3.26299268702046, 8.338982421874999, 8.638348203502416, 6.920657373366013, 8.010232934397163, 4.529872959353657), # 41
(3.269000639386189, 8.34634090909091, 8.62379347826087, 6.917558823529411, 8.000648936170213, 4.504664167916042), # 42
(3.2748806345907933, 8.353572975852272, 8.608162666062801, 6.914217626633987, 7.990322916666666, 4.477911679576878), # 43
(3.2806360294117645, 8.360677556818182, 8.591486111111111, 6.910638071895424, 7.979269503546099, 4.449677681992337), # 44
(3.286270180626598, 8.367653586647727, 8.573794157608697, 6.906824448529411, 7.967503324468085, 4.420024362818591), # 45
(3.291786445012788, 8.374500000000001, 8.555117149758455, 6.902781045751634, 7.955039007092199, 4.389013909711811), # 46
(3.297188179347826, 8.381215731534091, 8.535485431763284, 6.898512152777777, 7.941891179078015, 4.356708510328169), # 47
(3.3024787404092075, 8.387799715909091, 8.514929347826087, 6.894022058823529, 7.928074468085106, 4.323170352323839), # 48
(3.307661484974424, 8.39425088778409, 8.493479242149759, 6.889315053104576, 7.91360350177305, 4.288461623354989), # 49
(3.312739769820972, 8.40056818181818, 8.471165458937199, 6.884395424836602, 7.898492907801418, 4.252644511077794), # 50
(3.317716951726343, 8.406750532670454, 8.448018342391304, 6.879267463235294, 7.882757313829787, 4.215781203148426), # 51
(3.322596387468031, 8.412796875, 8.424068236714975, 6.87393545751634, 7.86641134751773, 4.177933887223055), # 52
(3.3273814338235295, 8.41870614346591, 8.39934548611111, 6.868403696895425, 7.849469636524823, 4.139164750957854), # 53
(3.332075447570333, 8.424477272727271, 8.373880434782608, 6.8626764705882355, 7.831946808510638, 4.099535982008995), # 54
(3.336681785485933, 8.430109197443182, 8.347703426932366, 6.856758067810458, 7.813857491134752, 4.05910976803265), # 55
(3.341203804347826, 8.435600852272726, 8.320844806763285, 6.8506527777777775, 7.795216312056738, 4.017948296684991), # 56
(3.345644860933504, 8.440951171875001, 8.29333491847826, 6.844364889705882, 7.77603789893617, 3.9761137556221886), # 57
(3.3500083120204605, 8.44615909090909, 8.265204106280192, 6.837898692810458, 7.756336879432624, 3.9336683325004165), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(4, 9, 8, 4, 2, 0, 5, 6, 9, 2, 2, 0), # 0
(5, 16, 11, 8, 5, 0, 10, 12, 14, 6, 2, 0), # 1
(10, 20, 13, 10, 5, 0, 15, 14, 17, 11, 4, 0), # 2
(17, 27, 18, 14, 7, 0, 19, 25, 21, 15, 5, 0), # 3
(19, 32, 24, 15, 8, 0, 20, 36, 22, 17, 5, 0), # 4
(21, 36, 26, 19, 10, 0, 22, 43, 24, 19, 5, 0), # 5
(23, 38, 29, 20, 12, 0, 32, 52, 37, 19, 5, 0), # 6
(25, 40, 32, 25, 16, 0, 37, 59, 41, 24, 7, 0), # 7
(27, 46, 37, 29, 16, 0, 48, 65, 46, 29, 9, 0), # 8
(29, 47, 39, 31, 16, 0, 53, 70, 53, 32, 11, 0), # 9
(34, 49, 44, 35, 17, 0, 57, 73, 54, 37, 12, 0), # 10
(36, 54, 48, 39, 17, 0, 60, 80, 57, 38, 12, 0), # 11
(37, 59, 52, 42, 20, 0, 70, 86, 64, 46, 13, 0), # 12
(44, 63, 59, 43, 23, 0, 73, 96, 68, 49, 14, 0), # 13
(51, 66, 64, 46, 24, 0, 77, 100, 69, 51, 14, 0), # 14
(54, 73, 72, 48, 26, 0, 83, 107, 75, 53, 14, 0), # 15
(58, 83, 76, 49, 27, 0, 86, 115, 81, 62, 15, 0), # 16
(60, 90, 79, 53, 28, 0, 90, 122, 83, 66, 15, 0), # 17
(62, 94, 82, 53, 29, 0, 93, 126, 88, 69, 16, 0), # 18
(64, 103, 89, 54, 32, 0, 98, 136, 94, 74, 17, 0), # 19
(68, 109, 96, 57, 34, 0, 108, 145, 97, 77, 18, 0), # 20
(71, 112, 96, 57, 37, 0, 111, 154, 99, 82, 18, 0), # 21
(76, 124, 103, 58, 38, 0, 115, 161, 102, 87, 21, 0), # 22
(78, 128, 110, 60, 41, 0, 117, 167, 108, 91, 22, 0), # 23
(81, 135, 115, 64, 41, 0, 123, 174, 113, 97, 25, 0), # 24
(87, 136, 122, 68, 42, 0, 127, 178, 116, 102, 25, 0), # 25
(89, 139, 125, 69, 44, 0, 131, 184, 122, 104, 27, 0), # 26
(91, 144, 130, 71, 45, 0, 140, 195, 126, 107, 27, 0), # 27
(94, 149, 133, 73, 45, 0, 146, 200, 132, 111, 28, 0), # 28
(97, 154, 144, 74, 47, 0, 150, 203, 134, 113, 28, 0), # 29
(98, 158, 148, 75, 47, 0, 154, 208, 142, 116, 30, 0), # 30
(101, 164, 151, 78, 47, 0, 162, 215, 145, 119, 32, 0), # 31
(102, 169, 155, 80, 48, 0, 168, 219, 149, 120, 33, 0), # 32
(108, 175, 159, 86, 49, 0, 172, 224, 153, 123, 36, 0), # 33
(113, 180, 166, 91, 49, 0, 175, 230, 157, 127, 38, 0), # 34
(119, 186, 172, 94, 51, 0, 177, 239, 164, 133, 40, 0), # 35
(124, 191, 180, 95, 51, 0, 181, 241, 170, 138, 41, 0), # 36
(128, 199, 184, 101, 56, 0, 185, 249, 177, 145, 41, 0), # 37
(131, 205, 188, 108, 58, 0, 187, 263, 186, 148, 42, 0), # 38
(133, 209, 195, 109, 58, 0, 188, 269, 187, 150, 43, 0), # 39
(136, 213, 201, 111, 59, 0, 199, 279, 192, 155, 45, 0), # 40
(138, 220, 205, 112, 64, 0, 203, 289, 194, 161, 46, 0), # 41
(142, 226, 213, 118, 67, 0, 212, 296, 196, 164, 47, 0), # 42
(145, 235, 217, 119, 67, 0, 216, 303, 203, 167, 49, 0), # 43
(150, 238, 226, 122, 69, 0, 221, 305, 206, 171, 49, 0), # 44
(153, 247, 230, 126, 70, 0, 224, 311, 209, 176, 51, 0), # 45
(155, 253, 235, 127, 70, 0, 232, 316, 213, 180, 54, 0), # 46
(158, 255, 238, 133, 71, 0, 237, 322, 216, 183, 57, 0), # 47
(160, 258, 243, 135, 73, 0, 241, 333, 219, 186, 60, 0), # 48
(162, 262, 248, 140, 74, 0, 248, 339, 222, 191, 63, 0), # 49
(167, 265, 254, 142, 75, 0, 255, 352, 226, 195, 64, 0), # 50
(171, 271, 258, 145, 79, 0, 260, 359, 232, 199, 67, 0), # 51
(176, 277, 263, 148, 80, 0, 264, 363, 235, 202, 67, 0), # 52
(181, 286, 268, 151, 82, 0, 267, 369, 235, 211, 68, 0), # 53
(183, 291, 273, 154, 85, 0, 268, 373, 240, 214, 71, 0), # 54
(186, 299, 281, 156, 86, 0, 273, 378, 241, 218, 72, 0), # 55
(194, 305, 284, 158, 87, 0, 274, 384, 246, 224, 73, 0), # 56
(196, 316, 289, 160, 90, 0, 278, 389, 250, 232, 74, 0), # 57
(198, 325, 294, 164, 95, 0, 280, 392, 255, 235, 76, 0), # 58
(198, 325, 294, 164, 95, 0, 280, 392, 255, 235, 76, 0), # 59
)
passenger_arriving_rate = (
(2.649651558384548, 5.43716856060606, 4.79654161311054, 2.534510869565217, 1.428605769230769, 0.0, 4.75679347826087, 5.714423076923076, 3.801766304347826, 3.1976944087403596, 1.359292140151515, 0.0), # 0
(2.6745220100478, 5.497633278970258, 4.822449322514997, 2.5486257548309177, 1.439313301282051, 0.0, 4.7551721391908215, 5.757253205128204, 3.8229386322463768, 3.2149662150099974, 1.3744083197425645, 0.0), # 1
(2.699108477221734, 5.557201122334455, 4.8477420736932295, 2.562429951690821, 1.4497948717948717, 0.0, 4.753501207729468, 5.799179487179487, 3.8436449275362317, 3.23182804912882, 1.3893002805836137, 0.0), # 2
(2.72339008999122, 5.6158078125, 4.872401389781491, 2.575911684782608, 1.4600408653846155, 0.0, 4.7517809103260875, 5.840163461538462, 3.863867527173912, 3.2482675931876606, 1.403951953125, 0.0), # 3
(2.747345978441128, 5.673389071268238, 4.896408793916024, 2.589059178743961, 1.4700416666666667, 0.0, 4.750011473429951, 5.880166666666667, 3.883588768115942, 3.2642725292773487, 1.4183472678170594, 0.0), # 4
(2.7709552726563262, 5.729880620440516, 4.919745809233076, 2.6018606582125603, 1.47978766025641, 0.0, 4.748193123490338, 5.91915064102564, 3.9027909873188404, 3.279830539488717, 1.432470155110129, 0.0), # 5
(2.794197102721686, 5.785218181818181, 4.942393958868895, 2.614304347826087, 1.4892692307692306, 0.0, 4.746326086956522, 5.957076923076922, 3.9214565217391306, 3.294929305912597, 1.4463045454545453, 0.0), # 6
(2.817050598722076, 5.83933747720258, 4.964334765959725, 2.626378472222222, 1.498476762820513, 0.0, 4.744410590277778, 5.993907051282052, 3.939567708333333, 3.309556510639817, 1.459834369300645, 0.0), # 7
(2.8394948907423667, 5.89217422839506, 4.985549753641817, 2.638071256038647, 1.5074006410256409, 0.0, 4.7424468599033816, 6.0296025641025635, 3.9571068840579704, 3.3236998357612113, 1.473043557098765, 0.0), # 8
(2.8615091088674274, 5.943664157196969, 5.006020445051414, 2.649370923913043, 1.5160312499999997, 0.0, 4.740435122282609, 6.064124999999999, 3.9740563858695652, 3.3373469633676094, 1.4859160392992423, 0.0), # 9
(2.8830723831821286, 5.993742985409652, 5.025728363324764, 2.660265700483092, 1.5243589743589743, 0.0, 4.738375603864734, 6.097435897435897, 3.990398550724638, 3.3504855755498424, 1.498435746352413, 0.0), # 10
(2.9041638437713395, 6.042346434834456, 5.044655031598114, 2.6707438103864733, 1.5323741987179484, 0.0, 4.736268531099034, 6.129496794871794, 4.0061157155797105, 3.3631033543987425, 1.510586608708614, 0.0), # 11
(2.92476262071993, 6.089410227272726, 5.062781973007712, 2.680793478260869, 1.5400673076923075, 0.0, 4.734114130434782, 6.16026923076923, 4.021190217391304, 3.375187982005141, 1.5223525568181815, 0.0), # 12
(2.944847844112769, 6.134870084525814, 5.080090710689802, 2.690402928743961, 1.547428685897436, 0.0, 4.731912628321256, 6.189714743589744, 4.035604393115942, 3.386727140459868, 1.5337175211314535, 0.0), # 13
(2.9643986440347283, 6.1786617283950624, 5.096562767780632, 2.699560386473429, 1.5544487179487176, 0.0, 4.729664251207729, 6.217794871794871, 4.049340579710144, 3.397708511853755, 1.5446654320987656, 0.0), # 14
(2.9833941505706756, 6.220720880681816, 5.112179667416451, 2.708254076086956, 1.5611177884615384, 0.0, 4.7273692255434785, 6.2444711538461535, 4.062381114130434, 3.408119778277634, 1.555180220170454, 0.0), # 15
(3.001813493805482, 6.26098326318743, 5.126922932733505, 2.716472222222222, 1.5674262820512819, 0.0, 4.725027777777778, 6.2697051282051275, 4.074708333333333, 3.4179486218223363, 1.5652458157968574, 0.0), # 16
(3.019635803824017, 6.299384597713242, 5.140774086868038, 2.724203049516908, 1.5733645833333332, 0.0, 4.722640134359904, 6.293458333333333, 4.0863045742753625, 3.4271827245786914, 1.5748461494283106, 0.0), # 17
(3.03684021071115, 6.3358606060606055, 5.153714652956299, 2.7314347826086958, 1.578923076923077, 0.0, 4.72020652173913, 6.315692307692308, 4.097152173913043, 3.435809768637532, 1.5839651515151514, 0.0), # 18
(3.053405844551751, 6.370347010030863, 5.165726154134533, 2.738155646135265, 1.5840921474358973, 0.0, 4.717727166364734, 6.336368589743589, 4.107233469202898, 3.4438174360896885, 1.5925867525077158, 0.0), # 19
(3.0693118354306894, 6.402779531425363, 5.1767901135389875, 2.7443538647342995, 1.5888621794871793, 0.0, 4.71520229468599, 6.355448717948717, 4.11653079710145, 3.4511934090259917, 1.6006948828563408, 0.0), # 20
(3.084537313432836, 6.433093892045452, 5.186888054305913, 2.750017663043478, 1.5932235576923073, 0.0, 4.712632133152174, 6.372894230769229, 4.125026494565217, 3.4579253695372754, 1.608273473011363, 0.0), # 21
(3.099061408643059, 6.46122581369248, 5.19600149957155, 2.7551352657004826, 1.5971666666666662, 0.0, 4.710016908212561, 6.388666666666665, 4.132702898550725, 3.464000999714367, 1.61530645342312, 0.0), # 22
(3.1128632511462295, 6.487111018167789, 5.204111972472151, 2.759694897342995, 1.6006818910256408, 0.0, 4.707356846316426, 6.402727564102563, 4.139542346014493, 3.4694079816481005, 1.6217777545419472, 0.0), # 23
(3.125921971027217, 6.5106852272727265, 5.211200996143958, 2.763684782608695, 1.6037596153846152, 0.0, 4.704652173913043, 6.415038461538461, 4.1455271739130435, 3.474133997429305, 1.6276713068181816, 0.0), # 24
(3.1382166983708903, 6.531884162808641, 5.217250093723222, 2.7670931461352657, 1.606390224358974, 0.0, 4.701903117451691, 6.425560897435896, 4.150639719202899, 3.4781667291488145, 1.6329710407021603, 0.0), # 25
(3.1497265632621207, 6.550643546576878, 5.222240788346187, 2.7699082125603858, 1.6085641025641022, 0.0, 4.699109903381642, 6.434256410256409, 4.154862318840579, 3.4814938588974575, 1.6376608866442195, 0.0), # 26
(3.160430695785777, 6.566899100378786, 5.226154603149099, 2.772118206521739, 1.6102716346153847, 0.0, 4.696272758152174, 6.441086538461539, 4.158177309782609, 3.484103068766066, 1.6417247750946966, 0.0), # 27
(3.1703082260267292, 6.580586546015712, 5.228973061268209, 2.7737113526570045, 1.6115032051282048, 0.0, 4.69339190821256, 6.446012820512819, 4.160567028985507, 3.4859820408454727, 1.645146636503928, 0.0), # 28
(3.1793382840698468, 6.591641605289001, 5.230677685839759, 2.7746758756038647, 1.6122491987179486, 0.0, 4.690467580012077, 6.448996794871794, 4.162013813405797, 3.487118457226506, 1.6479104013222503, 0.0), # 29
(3.1875, 6.6, 5.23125, 2.775, 1.6124999999999998, 0.0, 4.6875, 6.449999999999999, 4.1625, 3.4875, 1.65, 0.0), # 30
(3.1951370284526854, 6.606943039772726, 5.230820969202898, 2.7749414624183006, 1.6124087322695035, 0.0, 4.683376259786773, 6.449634929078014, 4.162412193627451, 3.4872139794685983, 1.6517357599431814, 0.0), # 31
(3.202609175191816, 6.613794318181818, 5.229546014492753, 2.7747669934640515, 1.6121368794326238, 0.0, 4.677024758454107, 6.448547517730495, 4.162150490196078, 3.4863640096618354, 1.6534485795454545, 0.0), # 32
(3.2099197969948845, 6.620552982954545, 5.227443342391305, 2.774478308823529, 1.6116873670212764, 0.0, 4.66850768365817, 6.446749468085105, 4.161717463235294, 3.4849622282608697, 1.6551382457386363, 0.0), # 33
(3.217072250639386, 6.627218181818182, 5.224531159420289, 2.7740771241830067, 1.6110631205673758, 0.0, 4.657887223055139, 6.444252482269503, 4.16111568627451, 3.4830207729468596, 1.6568045454545455, 0.0), # 34
(3.224069892902813, 6.633789062499998, 5.220827672101449, 2.773565155228758, 1.6102670656028368, 0.0, 4.645225564301183, 6.441068262411347, 4.160347732843137, 3.480551781400966, 1.6584472656249996, 0.0), # 35
(3.23091608056266, 6.6402647727272734, 5.2163510869565215, 2.7729441176470586, 1.6093021276595745, 0.0, 4.630584895052474, 6.437208510638298, 4.159416176470589, 3.477567391304347, 1.6600661931818184, 0.0), # 36
(3.2376141703964194, 6.6466444602272725, 5.211119610507246, 2.7722157271241827, 1.6081712322695032, 0.0, 4.614027402965184, 6.432684929078013, 4.158323590686274, 3.474079740338164, 1.6616611150568181, 0.0), # 37
(3.2441675191815853, 6.652927272727272, 5.205151449275362, 2.7713816993464047, 1.6068773049645388, 0.0, 4.595615275695485, 6.427509219858155, 4.157072549019607, 3.4701009661835744, 1.663231818181818, 0.0), # 38
(3.250579483695652, 6.659112357954545, 5.198464809782608, 2.7704437499999996, 1.6054232712765955, 0.0, 4.57541070089955, 6.421693085106382, 4.155665625, 3.4656432065217384, 1.6647780894886361, 0.0), # 39
(3.2568534207161126, 6.6651988636363635, 5.191077898550724, 2.7694035947712417, 1.6038120567375882, 0.0, 4.5534758662335495, 6.415248226950353, 4.154105392156863, 3.4607185990338163, 1.6662997159090909, 0.0), # 40
(3.26299268702046, 6.671185937499998, 5.1830089221014495, 2.768262949346405, 1.6020465868794325, 0.0, 4.529872959353657, 6.40818634751773, 4.152394424019608, 3.455339281400966, 1.6677964843749995, 0.0), # 41
(3.269000639386189, 6.677072727272728, 5.174276086956522, 2.767023529411764, 1.6001297872340425, 0.0, 4.504664167916042, 6.40051914893617, 4.150535294117646, 3.4495173913043478, 1.669268181818182, 0.0), # 42
(3.2748806345907933, 6.682858380681817, 5.164897599637681, 2.7656870506535944, 1.5980645833333331, 0.0, 4.477911679576878, 6.3922583333333325, 4.148530575980392, 3.4432650664251203, 1.6707145951704543, 0.0), # 43
(3.2806360294117645, 6.688542045454545, 5.154891666666667, 2.7642552287581696, 1.5958539007092198, 0.0, 4.449677681992337, 6.383415602836879, 4.146382843137254, 3.4365944444444443, 1.6721355113636363, 0.0), # 44
(3.286270180626598, 6.694122869318181, 5.144276494565218, 2.7627297794117642, 1.593500664893617, 0.0, 4.420024362818591, 6.374002659574468, 4.144094669117647, 3.4295176630434785, 1.6735307173295453, 0.0), # 45
(3.291786445012788, 6.6996, 5.133070289855073, 2.761112418300653, 1.5910078014184397, 0.0, 4.389013909711811, 6.364031205673759, 4.14166862745098, 3.4220468599033818, 1.6749, 0.0), # 46
(3.297188179347826, 6.704972585227273, 5.12129125905797, 2.759404861111111, 1.588378235815603, 0.0, 4.356708510328169, 6.353512943262412, 4.139107291666666, 3.4141941727053133, 1.6762431463068181, 0.0), # 47
(3.3024787404092075, 6.710239772727273, 5.108957608695651, 2.757608823529411, 1.5856148936170211, 0.0, 4.323170352323839, 6.3424595744680845, 4.136413235294117, 3.4059717391304343, 1.6775599431818182, 0.0), # 48
(3.307661484974424, 6.715400710227271, 5.096087545289855, 2.75572602124183, 1.5827207003546098, 0.0, 4.288461623354989, 6.330882801418439, 4.133589031862745, 3.3973916968599034, 1.6788501775568176, 0.0), # 49
(3.312739769820972, 6.720454545454543, 5.082699275362319, 2.7537581699346405, 1.5796985815602835, 0.0, 4.252644511077794, 6.318794326241134, 4.130637254901961, 3.388466183574879, 1.6801136363636358, 0.0), # 50
(3.317716951726343, 6.725400426136363, 5.068811005434783, 2.7517069852941174, 1.5765514627659571, 0.0, 4.215781203148426, 6.306205851063829, 4.127560477941176, 3.3792073369565214, 1.6813501065340908, 0.0), # 51
(3.322596387468031, 6.730237499999999, 5.054440942028985, 2.7495741830065357, 1.573282269503546, 0.0, 4.177933887223055, 6.293129078014184, 4.124361274509804, 3.3696272946859898, 1.6825593749999999, 0.0), # 52
(3.3273814338235295, 6.7349649147727275, 5.039607291666666, 2.7473614787581697, 1.5698939273049646, 0.0, 4.139164750957854, 6.279575709219858, 4.121042218137255, 3.359738194444444, 1.6837412286931819, 0.0), # 53
(3.332075447570333, 6.739581818181817, 5.024328260869565, 2.745070588235294, 1.5663893617021276, 0.0, 4.099535982008995, 6.2655574468085105, 4.117605882352941, 3.3495521739130427, 1.6848954545454542, 0.0), # 54
(3.336681785485933, 6.744087357954545, 5.008622056159419, 2.7427032271241827, 1.5627714982269503, 0.0, 4.05910976803265, 6.251085992907801, 4.114054840686275, 3.3390813707729463, 1.6860218394886362, 0.0), # 55
(3.341203804347826, 6.74848068181818, 4.9925068840579705, 2.740261111111111, 1.5590432624113475, 0.0, 4.017948296684991, 6.23617304964539, 4.110391666666667, 3.328337922705314, 1.687120170454545, 0.0), # 56
(3.345644860933504, 6.752760937500001, 4.976000951086956, 2.7377459558823527, 1.5552075797872338, 0.0, 3.9761137556221886, 6.220830319148935, 4.106618933823529, 3.317333967391304, 1.6881902343750002, 0.0), # 57
(3.3500083120204605, 6.756927272727271, 4.959122463768115, 2.7351594771241827, 1.5512673758865245, 0.0, 3.9336683325004165, 6.205069503546098, 4.102739215686275, 3.3060816425120767, 1.6892318181818178, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
32, # 1
)
| 112.531343
| 215
| 0.727731
| 5,147
| 37,698
| 5.327958
| 0.216242
| 0.315064
| 0.249426
| 0.472596
| 0.333406
| 0.330817
| 0.329942
| 0.329942
| 0.329942
| 0.329942
| 0
| 0.817985
| 0.119741
| 37,698
| 334
| 216
| 112.868263
| 0.008408
| 0.032124
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a05fcd29ab90068ac11128039642865053e52f6f
| 23
|
py
|
Python
|
yamwapi/__init__.py
|
eggpi/mwapi
|
8a03ff8fe0e9578d444302b1c3b15695b9b4589b
|
[
"MIT"
] | 2
|
2018-05-10T20:46:17.000Z
|
2019-12-11T13:15:56.000Z
|
yamwapi/__init__.py
|
eggpi/yamwapi
|
8a03ff8fe0e9578d444302b1c3b15695b9b4589b
|
[
"MIT"
] | null | null | null |
yamwapi/__init__.py
|
eggpi/yamwapi
|
8a03ff8fe0e9578d444302b1c3b15695b9b4589b
|
[
"MIT"
] | null | null | null |
from .yamwapi import *
| 11.5
| 22
| 0.73913
| 3
| 23
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0a3f7d93e66483ca7d59f2700d6128c603d7c44
| 106
|
py
|
Python
|
nnlib/datasets/__init__.py
|
AleksaC/nnlib
|
5ad0fd570471626e9994100c844e1ed1493d94bd
|
[
"MIT"
] | 5
|
2019-07-09T20:56:10.000Z
|
2020-02-13T19:31:47.000Z
|
nnlib/datasets/__init__.py
|
AleksaC/nnlib
|
5ad0fd570471626e9994100c844e1ed1493d94bd
|
[
"MIT"
] | 1
|
2021-06-01T23:59:21.000Z
|
2021-06-01T23:59:21.000Z
|
nnlib/datasets/__init__.py
|
AleksaC/nnlib
|
5ad0fd570471626e9994100c844e1ed1493d94bd
|
[
"MIT"
] | 1
|
2019-08-19T11:00:55.000Z
|
2019-08-19T11:00:55.000Z
|
from . import mnist
from . import cifar10
from . import cifar100
from . import housing
from . import imdb
| 17.666667
| 22
| 0.764151
| 15
| 106
| 5.4
| 0.466667
| 0.617284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 0.188679
| 106
| 5
| 23
| 21.2
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0b8522d74cc2abca062b681974ac28c4be19053
| 27
|
py
|
Python
|
scout/server/blueprints/diagnoses/__init__.py
|
gmc-norr/scout
|
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
|
[
"BSD-3-Clause"
] | 111
|
2015-01-15T11:53:20.000Z
|
2022-03-26T19:55:24.000Z
|
scout/server/blueprints/diagnoses/__init__.py
|
gmc-norr/scout
|
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
|
[
"BSD-3-Clause"
] | 2,995
|
2015-01-15T16:14:20.000Z
|
2022-03-31T13:36:32.000Z
|
scout/server/blueprints/diagnoses/__init__.py
|
gmc-norr/scout
|
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
|
[
"BSD-3-Clause"
] | 55
|
2015-05-31T19:09:49.000Z
|
2021-11-01T10:50:31.000Z
|
from .views import omim_bp
| 13.5
| 26
| 0.814815
| 5
| 27
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a0c4ce48fb652ba016f6a2a9fadee2d350a3ca1b
| 5,624
|
py
|
Python
|
tests/test_reward.py
|
Spectre5/apologies
|
3b66ccf261e913b3b02725b6053684a53b6737da
|
[
"Apache-2.0"
] | 24
|
2020-06-15T16:38:27.000Z
|
2022-03-21T00:34:14.000Z
|
tests/test_reward.py
|
Spectre5/apologies
|
3b66ccf261e913b3b02725b6053684a53b6737da
|
[
"Apache-2.0"
] | 10
|
2020-12-25T17:36:31.000Z
|
2021-02-08T00:33:47.000Z
|
tests/test_reward.py
|
Spectre5/apologies
|
3b66ccf261e913b3b02725b6053684a53b6737da
|
[
"Apache-2.0"
] | 9
|
2020-12-25T01:34:11.000Z
|
2022-02-17T19:06:27.000Z
|
# -*- coding: utf-8 -*-
# vim: set ft=python ts=4 sw=4 expandtab:
from apologies.game import Game, PlayerColor
from apologies.reward import RewardCalculatorV1
class TestRewardCalculatorV1:
"""
Unit tests for RewardCalculatorV1.
"""
def test_range(self):
assert RewardCalculatorV1().range(2) == (0, 400)
assert RewardCalculatorV1().range(3) == (0, 800)
assert RewardCalculatorV1().range(4) == (0, 1200)
def test_empty_game(self):
for playercount in [2, 3, 4]:
for color in list(PlayerColor)[:playercount]:
game = Game(playercount=playercount)
view = game.create_player_view(color)
assert RewardCalculatorV1().calculate(view) == 0 # score is always zero if all pawns are in start
def test_equivalent_state(self):
game = Game(playercount=4)
game.players[PlayerColor.RED].pawns[0].position.move_to_square(4)
game.players[PlayerColor.YELLOW].pawns[0].position.move_to_square(34)
game.players[PlayerColor.GREEN].pawns[0].position.move_to_square(49)
game.players[PlayerColor.BLUE].pawns[0].position.move_to_square(19)
for color in PlayerColor:
view = game.create_player_view(color)
assert RewardCalculatorV1().calculate(view) == 0 # score is always zero if all players are equivalent
def test_safe_zone(self):
game = Game(playercount=4)
game.players[PlayerColor.RED].pawns[0].position.move_to_safe(4) # last safe square before home
view = game.create_player_view(PlayerColor.RED)
assert RewardCalculatorV1().calculate(view) == 222
for color in [PlayerColor.BLUE, PlayerColor.YELLOW, PlayerColor.GREEN]:
view = game.create_player_view(color)
assert RewardCalculatorV1().calculate(view) == 0 # score is always zero if all pawns are in start
def test_winner(self):
game = Game(playercount=2)
game.players[PlayerColor.RED].pawns[0].position.move_to_home()
game.players[PlayerColor.RED].pawns[1].position.move_to_home()
game.players[PlayerColor.RED].pawns[2].position.move_to_home()
game.players[PlayerColor.RED].pawns[3].position.move_to_home()
view = game.create_player_view(PlayerColor.RED)
assert RewardCalculatorV1().calculate(view) == 400
for color in [PlayerColor.YELLOW]:
view = game.create_player_view(color)
assert RewardCalculatorV1().calculate(view) == 0 # score is always zero if all pawns are in start
game = Game(playercount=3)
game.players[PlayerColor.RED].pawns[0].position.move_to_home()
game.players[PlayerColor.RED].pawns[1].position.move_to_home()
game.players[PlayerColor.RED].pawns[2].position.move_to_home()
game.players[PlayerColor.RED].pawns[3].position.move_to_home()
view = game.create_player_view(PlayerColor.RED)
assert RewardCalculatorV1().calculate(view) == 800
for color in [PlayerColor.YELLOW, PlayerColor.GREEN]:
view = game.create_player_view(color)
assert RewardCalculatorV1().calculate(view) == 0 # score is always zero if all pawns are in start
game = Game(playercount=4)
game.players[PlayerColor.RED].pawns[0].position.move_to_home()
game.players[PlayerColor.RED].pawns[1].position.move_to_home()
game.players[PlayerColor.RED].pawns[2].position.move_to_home()
game.players[PlayerColor.RED].pawns[3].position.move_to_home()
view = game.create_player_view(PlayerColor.RED)
assert RewardCalculatorV1().calculate(view) == 1200
for color in [PlayerColor.YELLOW, PlayerColor.GREEN, PlayerColor.BLUE]:
view = game.create_player_view(color)
assert RewardCalculatorV1().calculate(view) == 0 # score is always zero if all pawns are in start
def test_arbitrary(self):
game = Game(playercount=4)
game.players[PlayerColor.RED].pawns[0].position.move_to_home()
game.players[PlayerColor.RED].pawns[1].position.move_to_safe(0)
game.players[PlayerColor.RED].pawns[2].position.move_to_square(6)
game.players[PlayerColor.RED].pawns[3].position.move_to_square(10)
game.players[PlayerColor.YELLOW].pawns[0].position.move_to_square(34)
game.players[PlayerColor.YELLOW].pawns[1].position.move_to_square(32)
game.players[PlayerColor.YELLOW].pawns[2].position.move_to_start()
game.players[PlayerColor.YELLOW].pawns[3].position.move_to_home()
game.players[PlayerColor.GREEN].pawns[0].position.move_to_start()
game.players[PlayerColor.GREEN].pawns[1].position.move_to_start()
game.players[PlayerColor.GREEN].pawns[2].position.move_to_square(59)
game.players[PlayerColor.GREEN].pawns[3].position.move_to_start()
game.players[PlayerColor.BLUE].pawns[0].position.move_to_start()
game.players[PlayerColor.BLUE].pawns[1].position.move_to_start()
game.players[PlayerColor.BLUE].pawns[2].position.move_to_start()
game.players[PlayerColor.BLUE].pawns[3].position.move_to_start()
view = game.create_player_view(PlayerColor.RED)
assert RewardCalculatorV1().calculate(view) == 319
view = game.create_player_view(PlayerColor.YELLOW)
assert RewardCalculatorV1().calculate(view) == 239
view = game.create_player_view(PlayerColor.GREEN)
assert RewardCalculatorV1().calculate(view) == 0
view = game.create_player_view(PlayerColor.BLUE)
assert RewardCalculatorV1().calculate(view) == 0
| 50.214286
| 114
| 0.690612
| 715
| 5,624
| 5.288112
| 0.113287
| 0.096006
| 0.192013
| 0.119016
| 0.820947
| 0.770696
| 0.72256
| 0.703782
| 0.703782
| 0.559111
| 0
| 0.027656
| 0.1899
| 5,624
| 111
| 115
| 50.666667
| 0.802239
| 0.073257
| 0
| 0.436782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195402
| 1
| 0.068966
| false
| 0
| 0.022989
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
26324cf1f842cd43422c390516e1297b9e5b749e
| 171
|
py
|
Python
|
aiosnow/models/_schema/fields/string.py
|
michaeldcanady/aiosnow
|
db515b1560d651fc7696a184990c2a2d68db8961
|
[
"MIT"
] | 38
|
2020-08-03T17:58:48.000Z
|
2022-03-30T19:39:24.000Z
|
aiosnow/models/_schema/fields/string.py
|
michaeldcanady/aiosnow
|
db515b1560d651fc7696a184990c2a2d68db8961
|
[
"MIT"
] | 34
|
2020-01-20T10:11:46.000Z
|
2020-06-05T21:25:23.000Z
|
aiosnow/models/_schema/fields/string.py
|
michaeldcanady/aiosnow
|
db515b1560d651fc7696a184990c2a2d68db8961
|
[
"MIT"
] | 5
|
2021-03-26T19:35:20.000Z
|
2022-01-23T20:09:55.000Z
|
import marshmallow
from aiosnow.query import StringQueryable
from .base import BaseField
class String(marshmallow.fields.String, BaseField, StringQueryable):
pass
| 17.1
| 68
| 0.812865
| 19
| 171
| 7.315789
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134503
| 171
| 9
| 69
| 19
| 0.939189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
265225df7750796ccf70a1421b26d09d20b15a57
| 849
|
py
|
Python
|
src/tests/test_exceptions.py
|
drygdryg/yandex-speechkit-lib-python
|
b45bbb6550d5bb77289ff0462e794cbc8b0669a0
|
[
"MIT"
] | 2
|
2021-12-13T20:29:00.000Z
|
2022-01-28T13:02:54.000Z
|
src/tests/test_exceptions.py
|
drygdryg/yandex-speechkit-lib-python
|
b45bbb6550d5bb77289ff0462e794cbc8b0669a0
|
[
"MIT"
] | 1
|
2021-07-26T00:36:18.000Z
|
2021-07-26T13:52:59.000Z
|
src/tests/test_exceptions.py
|
drygdryg/yandex-speechkit-lib-python
|
b45bbb6550d5bb77289ff0462e794cbc8b0669a0
|
[
"MIT"
] | 2
|
2022-02-08T12:39:26.000Z
|
2022-03-12T19:08:18.000Z
|
import unittest
from speechkit.exceptions import RequestError
class RequestErrorTestCase(unittest.TestCase):
def test_raise_data_1(self):
with self.assertRaises(RequestError) as cm:
raise RequestError({'code': 3, 'message': 'message'})
the_exception = cm.exception
self.assertEqual(the_exception.error_code, '3')
self.assertEqual(the_exception.message, 'message')
self.assertEqual(str(the_exception), '3 message')
def test_raise_data_2(self):
with self.assertRaises(RequestError) as cm:
raise RequestError({'error_code': 3, 'error_message': 'message'})
the_exception = cm.exception
self.assertEqual(the_exception.error_code, '3')
self.assertEqual(the_exception.message, 'message')
self.assertEqual(str(the_exception), '3 message')
| 35.375
| 77
| 0.69258
| 97
| 849
| 5.876289
| 0.28866
| 0.168421
| 0.126316
| 0.189474
| 0.736842
| 0.736842
| 0.736842
| 0.736842
| 0.736842
| 0.536842
| 0
| 0.011747
| 0.19788
| 849
| 23
| 78
| 36.913043
| 0.825257
| 0
| 0
| 0.588235
| 0
| 0
| 0.096584
| 0
| 0
| 0
| 0
| 0
| 0.470588
| 1
| 0.117647
| false
| 0
| 0.117647
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
26893ccd73afaf4edc47a6bf15ee0d53feabb370
| 2,642
|
py
|
Python
|
integration_tests/cirrus_tests/test_voting_endpoints.py
|
madrazzl3/pystratis
|
8b78552e753ae1d12f2afb39e9a322a270fbb7b3
|
[
"MIT"
] | null | null | null |
integration_tests/cirrus_tests/test_voting_endpoints.py
|
madrazzl3/pystratis
|
8b78552e753ae1d12f2afb39e9a322a270fbb7b3
|
[
"MIT"
] | null | null | null |
integration_tests/cirrus_tests/test_voting_endpoints.py
|
madrazzl3/pystratis
|
8b78552e753ae1d12f2afb39e9a322a270fbb7b3
|
[
"MIT"
] | null | null | null |
import pytest
from pystratis.api.voting.requestmodels import VoteKey
from pystratis.api.voting.responsemodels import *
from pystratis.nodes import CirrusMinerNode
from pystratis.api.global_responsemodels import PollViewModel
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_executed_polls(cirrusminer_node: CirrusMinerNode, generate_compressed_pubkey):
response = cirrusminer_node.voting.executed_polls(
vote_type=VoteKey.KickFederationMember,
pubkey_of_member_being_voted_on=generate_compressed_pubkey
)
assert isinstance(response, list)
for item in response:
assert isinstance(item, PollViewModel)
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_pending_polls(cirrusminer_node: CirrusMinerNode, generate_compressed_pubkey):
response = cirrusminer_node.voting.pending_polls(
vote_type=VoteKey.KickFederationMember,
pubkey_of_member_being_voted_on=generate_compressed_pubkey
)
assert isinstance(response, list)
for item in response:
assert isinstance(item, PollViewModel)
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_finished_polls(cirrusminer_node: CirrusMinerNode, generate_compressed_pubkey):
response = cirrusminer_node.voting.finished_polls(
vote_type=VoteKey.KickFederationMember,
pubkey_of_member_being_voted_on=generate_compressed_pubkey
)
assert isinstance(response, list)
for item in response:
assert isinstance(item, PollViewModel)
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_scheduledvote_whitelisthash(cirrusminer_node: CirrusMinerNode, generate_uint256):
cirrusminer_node.voting.schedulevote_whitelisthash(hash_id=generate_uint256)
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_scheduledvote_removehash(cirrusminer_node: CirrusMinerNode, generate_uint256):
cirrusminer_node.voting.schedulevote_removehash(hash_id=generate_uint256)
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_whitelistedhashes(cirrusminer_node: CirrusMinerNode):
response = cirrusminer_node.voting.whitelisted_hashes()
assert isinstance(response, list)
for item in response:
assert isinstance(item, WhitelistedHashesModel)
@pytest.mark.integration_test
@pytest.mark.cirrus_integration_test
def test_scheduledvotes(cirrusminer_node: CirrusMinerNode):
response = cirrusminer_node.voting.scheduled_votes()
assert isinstance(response, list)
for item in response:
assert isinstance(item, VotingDataModel)
| 36.694444
| 90
| 0.817184
| 297
| 2,642
| 6.962963
| 0.191919
| 0.067698
| 0.071083
| 0.084623
| 0.819149
| 0.819149
| 0.819149
| 0.762089
| 0.762089
| 0.686654
| 0
| 0.005159
| 0.119606
| 2,642
| 71
| 91
| 37.211268
| 0.883921
| 0
| 0
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175439
| 1
| 0.122807
| false
| 0
| 0.087719
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
cd5512c1d2a62baf1887da60321c919e905796f6
| 7,748
|
py
|
Python
|
spcdist/scipy_1d.py
|
deep-spin/sparse_continuous_distributions
|
7cc7bc7140738ebd4585d36e47bddd9be6ebed12
|
[
"MIT"
] | 8
|
2021-08-06T18:28:15.000Z
|
2021-08-24T07:16:02.000Z
|
spcdist/scipy_1d.py
|
deep-spin/sparse_continuous_distributions
|
7cc7bc7140738ebd4585d36e47bddd9be6ebed12
|
[
"MIT"
] | null | null | null |
spcdist/scipy_1d.py
|
deep-spin/sparse_continuous_distributions
|
7cc7bc7140738ebd4585d36e47bddd9be6ebed12
|
[
"MIT"
] | null | null | null |
"""Simpler expressions for 1-d special cases."""
import numpy as np
from .scipy import _radius
class EntmaxGaussian1D(object):
def __init__(self, alpha, mu=0, sigma_sq=None, support_size=None):
"""Create 1D (2-alpha)-Gaussian with parameter alpha.
The density is
p(x) = [(alpha-1)*(-tau - .5*(x-mu)**2/sigma_sq)]_+**(1/(alpha-1)).
If sigma_sq == None, it can be paremetrized by the support_size instead
(convenient for uniform distributions, where alpha=inf).
"""
self._alpha = alpha
self._R = _radius(1, alpha) if alpha != 1 else np.inf
self._mu = mu
if sigma_sq is None:
self._a = support_size/2
self._sigma_sq = self._sigma_sq_from_a(self._a)
self._tau = self._compute_tau()
else:
self._sigma_sq = sigma_sq
self._tau = self._compute_tau()
self._a = self._compute_a()
def _compute_tau(self):
"""Return the threshold tau in the density expression."""
return (-(self._R**2)/2 * self._sigma_sq **
(-(self._alpha-1) / (self._alpha+1)))
def _compute_a(self):
"""Return the value a = |x-mu| where the density vanishes."""
return np.sqrt(-2 * self._tau * self._sigma_sq)
def _sigma_sq_from_a(self, a):
return (a / self._R) ** (self._alpha+1)
def _sigma_sq_from_variance(self, variance):
return ((1 + 2*self._alpha/(self._alpha-1)) / (self._R**2)
* variance) ** ((self._alpha + 1)/2)
def mean(self):
return self._mu
def variance(self):
if self._alpha == np.inf:
return self._a**2 / 3
else:
# Equivalently (without tau):
# return ((self._R**2) / (1 + 2*self._alpha/(self._alpha-1)) *
# self._sigma_sq ** (2/(self._alpha + 1)))
return ((-2*self._tau)/(1 + 2*self._alpha/(self._alpha-1)) *
self._sigma_sq)
def support_size(self):
return 2*self._a
def pdf(self, x):
"""Return the probability density function value for `x`."""
if self._alpha == np.inf:
p = np.zeros_like(x)
mask = (x >= self._mu - self._a) & (x <= self._mu + self._a)
p[mask] = 1/self.support_size()
return p
else:
return np.maximum(0, (self._alpha-1)*(
-self._tau - .5*(x-self._mu)**2/self._sigma_sq)
)**(1/(self._alpha-1))
def sample(self, m):
"""Generate a random sample of size `m`."""
raise NotImplementedError
def tsallis_entropy(self):
"""Compute Tsallis alpha-entropy."""
raise NotImplementedError
class Gaussian1D(object):
def __init__(self, mu=0, sigma_sq=1):
"""Create 1D beta-Gaussian with alpha=1 (Gaussian)."""
self._alpha = 1
self._mu = mu
self._sigma_sq = sigma_sq
def mean(self):
return self._mu
def variance(self):
return self._sigma_sq
def pdf(self, x):
return (1/np.sqrt(2*np.pi*self._sigma_sq) *
np.exp(-.5*(x-self._mu)**2/self._sigma_sq))
def sample(self, m):
"""Generate a random sample of size `m`."""
raise NotImplementedError
def tsallis_entropy(self):
"""Compute Tsallis alpha-entropy."""
raise NotImplementedError
class SparsemaxGaussian1D(object):
def __init__(self, mu=0, sigma_sq=None, support_size=None):
"""Create 1D beta-Gaussian with alpha=2 (sparsemax)."""
self._alpha = 2
self._R = (3/2)**(1/3)
self._mu = mu
if sigma_sq is None:
self._a = support_size/2
self._sigma_sq = self._sigma_sq_from_a(self._a)
self._tau = self._compute_tau()
else:
self._sigma_sq = sigma_sq
self._tau = self._compute_tau()
self._a = self._compute_a()
def _compute_tau(self):
return -.5*((3/2)**2/self._sigma_sq)**(1/3)
def _compute_a(self):
return ((3/2)*self._sigma_sq)**(1/3)
def _sigma_sq_from_a(self, a):
return (2/3) * a**3
def _sigma_sq_from_variance(self, variance):
return 2/3 * (5*variance)**(3/2)
def mean(self):
return self._mu
def variance(self):
return 1/5 * ((3/2) * self._sigma_sq)**(2/3)
def support_size(self):
return 2*self._a
def pdf(self, x):
return np.maximum(0, -self._tau - .5*(x-self._mu)**2/self._sigma_sq)
def sample(self, m):
"""Generate a random sample of size `m`."""
raise NotImplementedError
def tsallis_entropy(self):
"""Compute Tsallis alpha-entropy."""
raise NotImplementedError
class BiweightGaussian1D(object):
def __init__(self, mu=0, sigma_sq=None, support_size=None):
"""Create 1D beta-Gaussian with alpha=1.5 (biweight)."""
self._alpha = 1.5
self._R = _radius(1, self._alpha) # 15**(1/5)
self._mu = mu
if sigma_sq is None:
self._a = support_size/2
self._sigma_sq = self._sigma_sq_from_a(self._a)
self._tau = self._compute_tau()
else:
self._sigma_sq = sigma_sq
self._tau = self._compute_tau()
self._a = self._compute_a()
def _compute_tau(self):
return -.5*(15**2/self._sigma_sq)**(1/5)
def _compute_a(self):
return (15*self._sigma_sq**2)**(1/5)
def _sigma_sq_from_a(self, a):
return (a / self._R) ** (self._alpha+1)
def _sigma_sq_from_variance(self, variance):
return (1/15)**(1/2) * (7*variance)**(5/4)
def mean(self):
return self._mu
def variance(self):
return ((-2*self._tau)/(1 + 2*self._alpha/(self._alpha-1)) *
self._sigma_sq)
def support_size(self):
return 2*self._a
def pdf(self, x):
return np.maximum(
0, .5*(-self._tau - .5*(x-self._mu)**2/self._sigma_sq))**2
def sample(self, m):
"""Generate a random sample of size `m`."""
raise NotImplementedError
def tsallis_entropy(self):
"""Compute Tsallis alpha-entropy."""
raise NotImplementedError
class TriweightGaussian1D(object):
def __init__(self, mu=0, sigma_sq=None, support_size=None):
"""Create 1D beta-Gaussian with alpha=4/3 (triweight)."""
self._alpha = 4/3
self._R = _radius(1, self._alpha) # (945/4)**(1/7)
self._mu = mu
if sigma_sq is None:
self._a = support_size/2
self._sigma_sq = self._sigma_sq_from_a(self._a)
self._tau = self._compute_tau()
else:
self._sigma_sq = sigma_sq
self._tau = self._compute_tau()
self._a = self._compute_a()
def _compute_tau(self):
return -.5*((945/4)**2/self._sigma_sq)**(1/7)
def _compute_a(self):
return ((945/4)*self._sigma_sq**3)**(1/7)
def _sigma_sq_from_a(self, a):
return (a / self._R) ** (self._alpha+1)
def _sigma_sq_from_variance(self, variance):
return (4/945)**(1/3) * (9*variance)**(7/6)
def mean(self):
return self._mu
def variance(self):
return ((-2*self._tau)/(1 + 2*self._alpha/(self._alpha-1)) *
self._sigma_sq)
def support_size(self):
return 2*self._a
def pdf(self, x):
return np.maximum(
0, (1/3)*(-self._tau - .5*(x-self._mu)**2/self._sigma_sq))**3
def sample(self, m):
"""Generate a random sample of size `m`."""
raise NotImplementedError
def tsallis_entropy(self):
"""Compute Tsallis alpha-entropy."""
raise NotImplementedError
| 30.868526
| 79
| 0.564791
| 1,075
| 7,748
| 3.794419
| 0.093953
| 0.097818
| 0.088992
| 0.044128
| 0.769306
| 0.701888
| 0.691591
| 0.676391
| 0.647708
| 0.647708
| 0
| 0.033994
| 0.29001
| 7,748
| 250
| 80
| 30.992
| 0.707508
| 0.152556
| 0
| 0.72619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.297619
| false
| 0
| 0.011905
| 0.184524
| 0.559524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
cd6532fdadf709ff6b5f0cfdbc0e83c886fdf047
| 147
|
py
|
Python
|
backend/durhack/usersettings/models.py
|
OhmGeek/Durhack
|
41711e7cd449e69f072265651a3fc4b5441dddeb
|
[
"MIT"
] | null | null | null |
backend/durhack/usersettings/models.py
|
OhmGeek/Durhack
|
41711e7cd449e69f072265651a3fc4b5441dddeb
|
[
"MIT"
] | 3
|
2020-02-11T23:38:40.000Z
|
2021-03-19T21:53:23.000Z
|
backend/durhack/usersettings/models.py
|
OhmGeek/Durhack
|
41711e7cd449e69f072265651a3fc4b5441dddeb
|
[
"MIT"
] | null | null | null |
""" User settings, and the users themselves """
from django.db import models
# Create your models here.
class Settings(models.Model):
pass
| 14.7
| 47
| 0.714286
| 20
| 147
| 5.25
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 147
| 9
| 48
| 16.333333
| 0.882353
| 0.44898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
26d4a70ee1a881e988d0ff14e8b381e5beb2252f
| 30
|
py
|
Python
|
dot/__init__.py
|
seperman/dotobject
|
ca8583891d37c80e7816fd77a2a008ad8536604b
|
[
"BSD-3-Clause"
] | 8
|
2016-08-22T22:26:51.000Z
|
2021-10-31T10:08:15.000Z
|
dot/__init__.py
|
seperman/dotobject
|
ca8583891d37c80e7816fd77a2a008ad8536604b
|
[
"BSD-3-Clause"
] | 1
|
2016-12-30T07:46:04.000Z
|
2016-12-30T07:47:38.000Z
|
dot/__init__.py
|
seperman/dotobject
|
ca8583891d37c80e7816fd77a2a008ad8536604b
|
[
"BSD-3-Clause"
] | 1
|
2016-06-19T12:36:49.000Z
|
2016-06-19T12:36:49.000Z
|
from .dot import Dot, LazyDot
| 15
| 29
| 0.766667
| 5
| 30
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 1
| 30
| 30
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
26fd30c9a46f5cd0e108ef09208f1a3226e9ea5b
| 39
|
py
|
Python
|
DTL/django/__init__.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | 1
|
2015-03-23T18:52:12.000Z
|
2015-03-23T18:52:12.000Z
|
DTL/django/__init__.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | null | null | null |
DTL/django/__init__.py
|
rocktavious/DevToolsLib
|
117200c91a3361e04f7c8e07d2ed4999bbcfc469
|
[
"MIT"
] | 2
|
2017-05-21T12:50:41.000Z
|
2021-10-17T03:32:45.000Z
|
from .app import App
import djangoUtils
| 19.5
| 20
| 0.846154
| 6
| 39
| 5.5
| 0.666667
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 2
| 21
| 19.5
| 0.970588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
26fd5e8bb987d9d15422a2ce456f104156b838b0
| 14,643
|
py
|
Python
|
core/layers.py
|
csm9493/FBI-Denoiser
|
f5581326c580c9847a953577f72241dafa09218a
|
[
"MIT"
] | 31
|
2021-04-14T08:48:22.000Z
|
2022-03-30T03:13:41.000Z
|
core/layers.py
|
csm9493/FBI-Denoiser
|
f5581326c580c9847a953577f72241dafa09218a
|
[
"MIT"
] | 3
|
2021-06-22T19:20:05.000Z
|
2022-03-14T12:34:59.000Z
|
core/layers.py
|
csm9493/FBI-Denoiser
|
f5581326c580c9847a953577f72241dafa09218a
|
[
"MIT"
] | 9
|
2021-04-20T07:33:38.000Z
|
2022-03-14T12:35:47.000Z
|
import torch
import torch.nn as nn
import numpy as np
import torch.nn.functional as F
class New1(nn.Module):
def __init__(self, in_ch, out_ch):
super(New1, self).__init__()
self.mask = torch.from_numpy(np.array([[1,1,1],[1,0,1],[1,1,1]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, padding = 1, kernel_size = 3)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class New2(nn.Module):
def __init__(self, in_ch, out_ch):
super(New2, self).__init__()
self.mask = torch.from_numpy(np.array([[0,1,0,1,0],[1,0,0,0,1],[0,0,1,0,0],[1,0,0,0,1],[0,1,0,1,0]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, padding = 2, kernel_size = 5)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class New3(nn.Module):
def __init__(self, in_ch, out_ch, dilated_value):
super(New3, self).__init__()
self.mask = torch.from_numpy(np.array([[1,0,1],[0,1,0],[1,0,1]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, kernel_size = 3, padding=dilated_value, dilation=dilated_value)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class Residual_module(nn.Module):
def __init__(self, in_ch, mul = 1):
super(Residual_module, self).__init__()
self.activation1 = nn.PReLU(in_ch*mul,0).cuda()
self.activation2 = nn.PReLU(in_ch,0).cuda()
self.conv1_1by1 = nn.Conv2d(in_channels=in_ch, out_channels=in_ch*mul, kernel_size = 1)
self.conv2_1by1 = nn.Conv2d(in_channels=in_ch*mul, out_channels=in_ch, kernel_size = 1)
def forward(self, input):
output_residual = self.conv1_1by1(input)
output_residual = self.activation1(output_residual)
output_residual = self.conv2_1by1(output_residual)
output = (input + output_residual) / 2.
output = self.activation2(output)
return output
class Gaussian(nn.Module):
def forward(self,input):
return torch.exp(-torch.mul(input,input))
class Receptive_attention(nn.Module):
def __init__(self, in_ch, at_type = 'softmax'):
super(Receptive_attention, self).__init__()
self.activation1 = nn.ReLU().cuda()
self.activation2 = nn.ReLU().cuda()
self.activation3 = nn.PReLU(in_ch,0).cuda()
self.conv1_1by1 = nn.Conv2d(in_channels=in_ch, out_channels=in_ch*4, kernel_size = 1)
self.conv2_1by1 = nn.Conv2d(in_channels=in_ch*4, out_channels=in_ch*4, kernel_size = 1)
self.conv3_1by1 = nn.Conv2d(in_channels=in_ch*4, out_channels=9, kernel_size = 1)
self.at_type = at_type
if at_type == 'softmax':
self.softmax = nn.Softmax()
else:
self.gaussian = Gaussian()
self.sigmoid = nn.Sigmoid()
def forward(self, input, receptive):
if self.at_type == 'softmax':
output_residual = self.conv1_1by1(input)
output_residual = self.activation1(output_residual)
output_residual = self.conv2_1by1(output_residual)
output_residual = self.activation2(output_residual)
output_residual = self.conv3_1by1(output_residual)
output_residual = F.adaptive_avg_pool2d(output_residual, (1, 1))
# output_residual = self.Gaussian(output_residual)
output_residual = self.softmax(output_residual).permute((1,0,2,3)).unsqueeze(-1)
else:
output_residual = self.conv1_1by1(input)
output_residual = self.activation1(output_residual)
output_residual = self.conv2_1by1(output_residual)
output_residual = self.activation2(output_residual)
output_residual = self.conv3_1by1(output_residual)
output_residual = F.adaptive_avg_pool2d(output_residual, (1, 1))
output_residual = self.gaussian(output_residual)
output_residual = self.sigmoid(output_residual).permute((1,0,2,3)).unsqueeze(-1)
output = torch.sum(receptive * output_residual, dim = 0)
output = self.activation3(output)
return output
class New1_layer(nn.Module):
def __init__(self, in_ch, out_ch, case = 'FBI_Net', mul = 1):
super(New1_layer, self).__init__()
self.case = case
self.new1 = New1(in_ch,out_ch).cuda()
if case == 'case1' or case == 'case2' or case == 'case7' or case == 'FBI_Net':
self.residual_module = Residual_module(out_ch, mul)
self.activation_new1 = nn.PReLU(in_ch,0).cuda()
def forward(self, x):
if self.case == 'case1' or self.case =='case2' or self.case =='case7' or self.case == 'FBI_Net': # plain NN architecture wo residual module and residual connection
output_new1 = self.new1(x)
output_new1 = self.activation_new1(output_new1)
output = self.residual_module(output_new1)
return output, output_new1
else: # final model
output_new1 = self.new1(x)
output = self.activation_new1(output_new1)
return output, output_new1
class New2_layer(nn.Module):
def __init__(self, in_ch, out_ch, case = 'FBI_Net', mul = 1):
super(New2_layer, self).__init__()
self.case = case
self.new2 = New2(in_ch,out_ch).cuda()
self.activation_new1 = nn.PReLU(in_ch,0).cuda()
if case == 'case1' or case == 'case2' or case == 'case7' or case == 'FBI_Net':
self.residual_module = Residual_module(out_ch, mul)
if case == 'case1' or case == 'case3' or case == 'case6' or case == 'FBI_Net':
self.activation_new2 = nn.PReLU(in_ch,0).cuda()
def forward(self, x, output_new):
if self.case == 'case1': #
output_new2 = self.new2(output_new)
output_new2 = self.activation_new1(output_new2)
output = (output_new2 + x) / 2.
output = self.activation_new2(output)
output = self.residual_module(output)
return output, output_new2
elif self.case == 'case2' or self.case == 'case7': #
output_new2 = self.new2(x)
output_new2 = self.activation_new1(output_new2)
output = output_new2
output = self.residual_module(output)
return output, output_new2
elif self.case == 'case3' or self.case == 'case6': #
output_new2 = self.new2(output_new)
output_new2 = self.activation_new1(output_new2)
output = (output_new2 + x) / 2.
output = self.activation_new2(output)
return output, output_new2
elif self.case == 'case4': #
output_new2 = self.new2(x)
output_new2 = self.activation_new1(output_new2)
output = output_new2
return output, output_new2
elif self.case == 'case5' : #
output_new2 = self.new2(x)
output_new2 = self.activation_new1(output_new2)
output = output_new2
return output, output_new2
else:
output_new2 = self.new2(output_new)
output_new2 = self.activation_new1(output_new2)
output = (output_new2 + x) / 2.
output = self.activation_new2(output)
output = self.residual_module(output)
return output, output_new2
class New3_layer(nn.Module):
def __init__(self, in_ch, out_ch, dilated_value=3, case = 'FBI_Net', mul = 1):
super(New3_layer, self).__init__()
self.case = case
self.new3 = New3(in_ch,out_ch,dilated_value).cuda()
self.activation_new1 = nn.PReLU(in_ch,0).cuda()
if case == 'case1' or case == 'case2' or case == 'case7' or case == 'FBI_Net':
self.residual_module = Residual_module(out_ch, mul)
if case == 'case1' or case == 'case3' or case == 'case6'or case == 'FBI_Net':
self.activation_new2 = nn.PReLU(in_ch,0).cuda()
def forward(self, x, output_new):
if self.case == 'case1': #
output_new3 = self.new3(output_new)
output_new3 = self.activation_new1(output_new3)
output = (output_new3 + x) / 2.
output = self.activation_new2(output)
output = self.residual_module(output)
return output, output_new3
elif self.case == 'case2' or self.case == 'case7': #
output_new3 = self.new3(x)
output_new3 = self.activation_new1(output_new3)
output = output_new3
output = self.residual_module(output)
return output, output_new3
elif self.case == 'case3' or self.case == 'case6': #
output_new3 = self.new3(output_new)
output_new3 = self.activation_new1(output_new3)
output = (output_new3 + x) / 2.
output = self.activation_new2(output)
return output, output_new3
elif self.case == 'case4': #
output_new3 = self.new3(x)
output_new3 = self.activation_new1(output_new3)
output = output_new3
return output, output_new3
elif self.case == 'case5': #
output_new3 = self.new3(x)
output_new3 = self.activation_new1(output_new3)
output = output_new3
return output, output_new3
else:
output_new3 = self.new3(output_new)
output_new3 = self.activation_new1(output_new3)
output = (output_new3 + x) / 2.
output = self.activation_new2(output)
output = self.residual_module(output)
return output, output_new3
class Q1(nn.Module):
def __init__(self, in_ch, out_ch):
super(Q1, self).__init__()
self.mask = torch.from_numpy(np.array([[1,1,0],[1,0,0],[0,0,0]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, padding = 1, kernel_size = 3)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class Q2(nn.Module):
def __init__(self, in_ch, out_ch, dilated_value):
super(Q2, self).__init__()
self.mask = torch.from_numpy(np.array([[1,1,1],[1,1,0],[1,0,0]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, kernel_size = 3, padding=dilated_value, dilation=dilated_value)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class E1(nn.Module):
def __init__(self, in_ch, out_ch):
super(E1, self).__init__()
self.mask = torch.from_numpy(np.array([[0,1,1],[0,0,1],[0,0,0]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, padding = 1, kernel_size = 3)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class E2(nn.Module):
def __init__(self, in_ch, out_ch, dilated_value):
super(E2, self).__init__()
self.mask = torch.from_numpy(np.array([[1,1,1],[0,1,1],[0,0,1]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, kernel_size = 3, padding=dilated_value, dilation=dilated_value)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class D1(nn.Module):
def __init__(self, in_ch, out_ch):
super(D1, self).__init__()
self.mask = torch.from_numpy(np.array([[0,0,0],[0,0,0],[1,1,1]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, padding = 1, kernel_size = 3)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class D2(nn.Module):
def __init__(self, in_ch, out_ch, dilated_value):
super(D2, self).__init__()
self.mask = torch.from_numpy(np.array([[0,0,0],[1,1,1],[1,1,1]], dtype=np.float32)).cuda()
self.conv1 = nn.Conv2d(in_channels=in_ch, out_channels=out_ch, kernel_size = 3, padding=dilated_value, dilation=dilated_value)
def forward(self, x):
self.conv1.weight.data = self.conv1.weight * self.mask
x = self.conv1(x)
return x
class QED_first_layer(nn.Module):
def __init__(self, in_ch, out_ch):
super(QED_first_layer, self).__init__()
self.q1 = Q1(in_ch,out_ch)
self.e1 = E1(in_ch,out_ch)
self.d1 = D1(in_ch,out_ch)
def forward(self, x):
outputs = []
outputs.append(self.q1(x))
outputs.append(self.e1(x))
outputs.append(self.d1(x))
return outputs
class QED_layer(nn.Module):
def __init__(self, in_ch, out_ch, dilated_value):
super(QED_layer, self).__init__()
self.q2_prelu = nn.PReLU(in_ch,0).cuda()
self.e2_prelu = nn.PReLU(in_ch,0).cuda()
self.d2_prelu = nn.PReLU(in_ch,0).cuda()
self.q2 = Q2(in_ch, out_ch, dilated_value)
self.e2 = E2(in_ch, out_ch, dilated_value)
self.d2 = D2(in_ch, out_ch, dilated_value)
def forward(self, inputs):
outputs = []
out_q2 = self.q2_prelu(inputs[0])
out_e2 = self.e2_prelu(inputs[1])
out_d2 = self.d2_prelu(inputs[2])
outputs.append(self.q2(out_q2))
outputs.append(self.e2(out_e2))
outputs.append(self.d2(out_d2))
return outputs
| 34.535377
| 172
| 0.580824
| 1,933
| 14,643
| 4.143301
| 0.056389
| 0.02697
| 0.029717
| 0.025846
| 0.838307
| 0.813585
| 0.789612
| 0.757648
| 0.742914
| 0.719316
| 0
| 0.046956
| 0.298982
| 14,643
| 423
| 173
| 34.617021
| 0.733268
| 0.008537
| 0
| 0.639286
| 0
| 0
| 0.016833
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117857
| false
| 0
| 0.014286
| 0.003571
| 0.292857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f8138072b87aff489b1a5442f999cbc6585a5b9c
| 45,137
|
py
|
Python
|
tests/test_tiff2jp2.py
|
musicinmybrain/glymur
|
8789b39ecf681e44be52b883c70175fa0c3e3413
|
[
"MIT"
] | null | null | null |
tests/test_tiff2jp2.py
|
musicinmybrain/glymur
|
8789b39ecf681e44be52b883c70175fa0c3e3413
|
[
"MIT"
] | null | null | null |
tests/test_tiff2jp2.py
|
musicinmybrain/glymur
|
8789b39ecf681e44be52b883c70175fa0c3e3413
|
[
"MIT"
] | null | null | null |
# standard library imports
import importlib.resources as ir
import logging
import pathlib
import shutil
import sys
import tempfile
import unittest
from uuid import UUID
import warnings
# 3rd party library imports
import numpy as np
# Local imports
import glymur
from glymur import Jp2k, Tiff2Jp2k, command_line
from . import fixtures
from .fixtures import OPENJPEG_NOT_AVAILABLE, OPENJPEG_NOT_AVAILABLE_MSG
from glymur.lib import tiff as libtiff
@unittest.skipIf(
not fixtures.HAVE_SCIKIT_IMAGE, fixtures.HAVE_SCIKIT_IMAGE_MSG
)
@unittest.skipIf(OPENJPEG_NOT_AVAILABLE, OPENJPEG_NOT_AVAILABLE_MSG)
class TestSuite(fixtures.TestCommon):
@classmethod
def setup_minisblack_spp1(cls, path):
"""
SCENARIO: create a simple monochromatic 2x2 tiled image
"""
data = fixtures.skimage.data.moon()
h, w = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w].copy())
libtiff.close(fp)
# now read it back
fp = libtiff.open(path)
tile = np.zeros((th, tw), dtype=np.uint8)
actual_data = np.zeros((h, w), dtype=np.uint8)
libtiff.readEncodedTile(fp, 0, tile)
actual_data[:th, :tw] = tile
libtiff.readEncodedTile(fp, 1, tile)
actual_data[:th, tw:w] = tile
libtiff.readEncodedTile(fp, 2, tile)
actual_data[th:h, :tw] = tile
libtiff.readEncodedTile(fp, 3, tile)
actual_data[th:h, tw:w] = tile
libtiff.close(fp)
cls.minisblack_spp1_data = actual_data
cls.minisblack_spp1_path = path
@classmethod
def setup_minisblack_2x2_partial_tiles(cls, path):
"""
SCENARIO: create a simple monochromatic 2x2 tiled image with partial
tiles.
"""
data = fixtures.skimage.data.moon()
h, w = 480, 480
th, tw = 256, 256
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', h)
libtiff.setField(fp, 'ImageWidth', w)
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w].copy())
libtiff.close(fp)
cls.minisblack_2x2_partial_tiles_data = data[:h, :w]
cls.minisblack_2x2_partial_tiles_path = path
@classmethod
def setup_minisblack_3x3(cls, path):
"""
SCENARIO: create a simple monochromatic 3x3 tiled image
"""
data = fixtures.skimage.data.moon()
data = data[:480, :480]
h, w = data.shape
th, tw = h // 3, w // 3
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:tw * 2].copy())
libtiff.writeEncodedTile(fp, 2, data[:th, tw * 2:w].copy())
libtiff.writeEncodedTile(fp, 3, data[th:th * 2, :tw].copy())
libtiff.writeEncodedTile(fp, 4, data[th:th * 2, tw:tw * 2].copy())
libtiff.writeEncodedTile(fp, 5, data[th:th * 2, tw * 2:w].copy())
libtiff.writeEncodedTile(fp, 6, data[2 * th:h, :tw].copy())
libtiff.writeEncodedTile(fp, 7, data[2 * th:h, tw:tw * 2].copy())
libtiff.writeEncodedTile(fp, 8, data[2 * th:h, tw * 2:w].copy())
libtiff.close(fp)
cls.minisblack_3x3_data = data
cls.minisblack_3x3_tif = path
@classmethod
def setup_minisblack_3strip(cls, path):
"""
SCENARIO: create a simple monochromatic 3-strip image. The strips
evenly divide the image.
"""
data = fixtures.skimage.data.moon()
data = data[:480, :480]
h, w = data.shape
rps = h // 3
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'RowsPerStrip', rps)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.writeEncodedStrip(fp, 0, data[:rps, :].copy())
libtiff.writeEncodedStrip(fp, 1, data[rps:rps * 2, :].copy())
libtiff.writeEncodedStrip(fp, 2, data[rps * 2:rps * 3, :].copy())
libtiff.close(fp)
cls.minisblack_3_full_strips_path = path
@classmethod
def setup_minisblack_3strip_partial_last_strip(cls, path):
"""
SCENARIO: create a simple monochromatic 3-strip image
"""
data = fixtures.skimage.data.moon()
data = data[:480, :480]
h, w = data.shape
# instead of 160, this will cause a partially empty last strip
rps = 170
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'RowsPerStrip', rps)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.writeEncodedStrip(fp, 0, data[:rps, :].copy())
libtiff.writeEncodedStrip(fp, 1, data[rps:, :].copy())
data2 = np.vstack((
data[340:480, :], np.zeros((30, 480), dtype=np.uint8)
))
libtiff.writeEncodedStrip(fp, 2, data2)
libtiff.close(fp)
cls.minisblack_3strip_partial_last_strip = path
@classmethod
def setup_rgb_uint16(cls, path):
"""
SCENARIO: create a simple color 2x2 tiled 16bit image
"""
data = fixtures.skimage.data.astronaut().astype(np.uint16)
h, w, z = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.RGB)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 16)
libtiff.setField(fp, 'SamplesPerPixel', 3)
libtiff.setField(fp, 'SampleFormat', libtiff.SampleFormat.UINT)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw, :].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w, :].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw, :].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w, :].copy())
libtiff.close(fp)
# now read it back
fp = libtiff.open(path)
tile = np.zeros((th, tw, 3), dtype=np.uint16)
actual_data = np.zeros((h, w, 3), dtype=np.uint16)
libtiff.readEncodedTile(fp, 0, tile)
actual_data[:th, :tw, :] = tile
libtiff.readEncodedTile(fp, 1, tile)
actual_data[:th, tw:w, :] = tile
libtiff.readEncodedTile(fp, 2, tile)
actual_data[th:h, :tw, :] = tile
libtiff.readEncodedTile(fp, 3, tile)
actual_data[th:h, tw:w, :] = tile
libtiff.close(fp)
cls.astronaut_uint16_data = actual_data
cls.astronaut_uint16_filename = path
@classmethod
def setup_ycbcr_jpeg(cls, path):
"""
SCENARIO: create a simple color 2x2 tiled image
"""
data = fixtures.skimage.data.astronaut()
h, w, z = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.YCBCR)
libtiff.setField(fp, 'Compression', libtiff.Compression.JPEG)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 3)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.setField(fp, 'JPEGColorMode', libtiff.PlanarConfig.CONTIG)
libtiff.setField(fp, 'JPEGQuality', 100)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw, :].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w, :].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw, :].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w, :].copy())
libtiff.close(fp)
# now read it back
fp = libtiff.open(path)
tile = np.zeros((th, tw, 4), dtype=np.uint8)
actual_data = np.zeros((h, w, 3), dtype=np.uint8)
libtiff.readRGBATile(fp, 0, 0, tile)
actual_data[:th, :tw, :] = tile[::-1, :, :3]
libtiff.readRGBATile(fp, 256, 0, tile)
actual_data[:th, tw:w, :] = tile[::-1, :, :3]
libtiff.readRGBATile(fp, 0, 256, tile)
actual_data[th:h, :tw, :] = tile[::-1, :, :3]
libtiff.readRGBATile(fp, 256, 256, tile)
actual_data[th:h, tw:w, :] = tile[::-1, :, :3]
libtiff.close(fp)
cls.astronaut_ycbcr_jpeg_data = actual_data
cls.astronaut_ycbcr_jpeg_tif = path
@classmethod
def setup_rgb_bigtiff(cls, path):
"""
SCENARIO: create a simple color 2x2 tiled image, bigtiff
"""
data = fixtures.skimage.data.astronaut()
h, w, z = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(path, mode='w8')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.RGB)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 3)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw, :].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w, :].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw, :].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w, :].copy())
libtiff.close(fp)
# now read it back
fp = libtiff.open(path)
tile = np.zeros((th, tw, 3), dtype=np.uint8)
actual_data = np.zeros((h, w, 3), dtype=np.uint8)
libtiff.readEncodedTile(fp, 0, tile)
actual_data[:th, :tw, :] = tile
libtiff.readEncodedTile(fp, 1, tile)
actual_data[:th, tw:w, :] = tile
libtiff.readEncodedTile(fp, 2, tile)
actual_data[th:h, :tw, :] = tile
libtiff.readEncodedTile(fp, 3, tile)
actual_data[th:h, tw:w, :] = tile
libtiff.close(fp)
cls.rgb_bigtiff_data = actual_data
cls.rgb_bigtiff = path
@classmethod
def setup_rgb(cls, path):
"""
SCENARIO: create a simple color 2x2 tiled image
"""
data = fixtures.skimage.data.astronaut()
h, w, z = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.RGB)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', 3)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw, :].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w, :].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw, :].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w, :].copy())
libtiff.close(fp)
# now read it back
fp = libtiff.open(path)
tile = np.zeros((th, tw, 3), dtype=np.uint8)
actual_data = np.zeros((h, w, 3), dtype=np.uint8)
libtiff.readEncodedTile(fp, 0, tile)
actual_data[:th, :tw, :] = tile
libtiff.readEncodedTile(fp, 1, tile)
actual_data[:th, tw:w, :] = tile
libtiff.readEncodedTile(fp, 2, tile)
actual_data[th:h, :tw, :] = tile
libtiff.readEncodedTile(fp, 3, tile)
actual_data[th:h, tw:w, :] = tile
libtiff.close(fp)
cls.astronaut_data = actual_data
cls.astronaut_tif = path
@classmethod
def setUpClass(cls):
cls.test_tiff_dir = tempfile.mkdtemp()
cls.test_tiff_path = pathlib.Path(cls.test_tiff_dir)
cls.setup_minisblack_spp1(cls.test_tiff_path / 'moon.tif')
cls.setup_minisblack_3x3(cls.test_tiff_path / 'minisblack_3x3.tif')
cls.setup_minisblack_3strip(cls.test_tiff_path / 'moon3_stripped.tif')
path = cls.test_tiff_path / 'moon3_partial_last_strip.tif'
cls.setup_minisblack_3strip_partial_last_strip(path)
path = cls.test_tiff_path / 'minisblack_2x2_partial_tiles.tif'
cls.setup_minisblack_2x2_partial_tiles(path)
cls.setup_rgb(cls.test_tiff_path / 'astronaut.tif')
cls.setup_rgb_bigtiff(cls.test_tiff_path / 'rbg_bigtiff.tif')
cls.setup_ycbcr_jpeg(
cls.test_tiff_path / 'astronaut_ycbcr_jpeg_tiled.tif'
)
cls.setup_rgb_uint16(cls.test_tiff_path / 'astronaut_uint16.tif')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.test_tiff_dir)
def test_smoke(self):
"""
SCENARIO: Convert TIFF file to JP2
EXPECTED RESULT: data matches, number of resolution is the default.
There should be just one layer. The number of resolutions should be
the default (5). There are not PLT segments. There are no EPH
markers. There are no SOP markers. The progression order is LRCP.
The irreversible transform will NOT be used. PSNR cannot be tested
if it is not applied.
There is a UUID box appended at the end containing the metadata.
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
actual = j[:]
self.assertEqual(actual.shape, (512, 512, 3))
c = j.get_codestream(header_only=False)
actual = c.segment[2].code_block_size
expected = (64, 64)
self.assertEqual(actual, expected)
self.assertEqual(c.segment[2].layers, 1)
self.assertEqual(c.segment[2].num_res, 5)
at_least_one_eph = any(
isinstance(seg, glymur.codestream.EPHsegment)
for seg in c.segment
)
self.assertFalse(at_least_one_eph)
at_least_one_plt = any(
isinstance(seg, glymur.codestream.PLTsegment)
for seg in c.segment
)
self.assertFalse(at_least_one_plt)
at_least_one_sop = any(
isinstance(seg, glymur.codestream.SOPsegment)
for seg in c.segment
)
self.assertFalse(at_least_one_sop)
self.assertEqual(c.segment[2].prog_order, glymur.core.LRCP)
self.assertEqual(
c.segment[2].xform, glymur.core.WAVELET_XFORM_5X3_REVERSIBLE
)
self.assertEqual(j.box[-1].box_id, 'uuid')
self.assertEqual(j.box[-1].data['ImageWidth'], 512)
self.assertEqual(j.box[-1].data['ImageLength'], 512)
def test_geotiff(self):
"""
SCENARIO: Convert GEOTIFF file to JP2
EXPECTED RESULT: there is a geotiff UUID.
"""
with warnings.catch_warnings():
warnings.simplefilter('ignore')
with ir.path('tests.data', 'albers27.tif') as path:
with Tiff2Jp2k(path, self.temp_jp2_filename) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
self.assertEqual(j.box[-1].box_id, 'uuid')
self.assertEqual(
j.box[-1].uuid, UUID('b14bf8bd-083d-4b43-a5ae-8cd7d5a6ce03')
)
def test_no_uuid(self):
"""
SCENARIO: Convert TIFF file to JP2, but do not include the UUID box
for the TIFF IFD.
EXPECTED RESULT: data matches, no UUID box
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
create_uuid=False
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
actual = j[:]
self.assertEqual(actual.shape, (512, 512, 3))
at_least_one_uuid = any(
isinstance(box, glymur.jp2box.UUIDBox) for box in j.box
)
self.assertFalse(at_least_one_uuid)
def test_psnr(self):
"""
SCENARIO: Convert TIFF file to JP2 with the irreversible transform.
EXPECTED RESULT: data matches, the irreversible transform is confirmed
"""
with Tiff2Jp2k(
self.minisblack_spp1_path, self.temp_jp2_filename,
psnr=(30, 35, 40, 0)
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
d = {}
for layer in range(4):
j.layer = layer
d[layer] = j[:]
with warnings.catch_warnings():
# MSE is zero for that first image, resulting in a divide-by-zero
# warning
warnings.simplefilter('ignore')
psnr = [
fixtures.skimage.metrics.peak_signal_noise_ratio(
fixtures.skimage.data.moon(), d[j]
)
for j in range(4)
]
# That first image should be lossless.
self.assertTrue(np.isinf(psnr[0]))
# None of the subsequent images should have inf PSNR.
self.assertTrue(not np.any(np.isinf(psnr[1:])))
# PSNR should increase for the remaining images.
self.assertTrue(np.all(np.diff(psnr[1:])) > 0)
def test_irreversible(self):
"""
SCENARIO: Convert TIFF file to JP2 with the irreversible transform.
EXPECTED RESULT: data matches, the irreversible transform is confirmed
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
irreversible=True
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
c = j.get_codestream(header_only=False)
self.assertEqual(
c.segment[2].xform, glymur.core.WAVELET_XFORM_9X7_IRREVERSIBLE
)
def test_sop(self):
"""
SCENARIO: Convert TIFF file to JP2 with SOP markers.
EXPECTED RESULT: data matches, sop markers confirmed
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename, sop=True
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
c = j.get_codestream(header_only=False)
at_least_one_sop = any(
isinstance(seg, glymur.codestream.SOPsegment)
for seg in c.segment
)
self.assertTrue(at_least_one_sop)
def test_progression_order(self):
"""
SCENARIO: Convert TIFF file to JP2 with EPH markers.
EXPECTED RESULT: data matches, plt markers confirmed
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
prog='rlcp'
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
c = j.get_codestream(header_only=False)
self.assertEqual(c.segment[2].prog_order, glymur.core.RLCP)
def test_eph(self):
"""
SCENARIO: Convert TIFF file to JP2 with EPH markers.
EXPECTED RESULT: data matches, plt markers confirmed
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename, eph=True
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
c = j.get_codestream(header_only=False)
at_least_one_eph = any(
isinstance(seg, glymur.codestream.EPHsegment)
for seg in c.segment
)
self.assertTrue(at_least_one_eph)
def test_plt(self):
"""
SCENARIO: Convert TIFF file to JP2 with PLT markers.
EXPECTED RESULT: data matches, plt markers confirmed
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename, plt=True
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
c = j.get_codestream(header_only=False)
at_least_one_plt = any(
isinstance(seg, glymur.codestream.PLTsegment)
for seg in c.segment
)
self.assertTrue(at_least_one_plt)
def test_resolutions(self):
"""
SCENARIO: Convert TIFF file to JP2 with 4 resolution layers instead
of the default, which is 5.
EXPECTED RESULT: data matches, number of resolution layers is 4.
"""
expected = 4
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
numres=expected
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
actual = j[:]
self.assertEqual(actual.shape, (512, 512, 3))
c = j.get_codestream()
actual = c.segment[2].num_res
self.assertEqual(actual, expected - 1)
def test_layers(self):
"""
SCENARIO: Convert TIFF file to JP2 with multiple compression layers
EXPECTED RESULT: data matches, number of layers is 3
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
cratios=[200, 50, 10]
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
actual = j[:]
self.assertEqual(actual.shape, (512, 512, 3))
c = j.get_codestream()
self.assertEqual(c.segment[2].layers, 3)
def test_codeblock_size(self):
"""
SCENARIO: Convert TIFF file to JP2 with a specific code block size
EXPECTED RESULT: data matches, number of resolution is the default
"""
expected = (32, 32)
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
cbsize=expected
) as j:
j.run()
j = Jp2k(self.temp_jp2_filename)
actual = j[:]
self.assertEqual(actual.shape, (512, 512, 3))
c = j.get_codestream()
actual = c.segment[2].code_block_size
self.assertEqual(actual, expected)
def test_verbosity(self):
"""
SCENARIO: Convert TIFF file to JP2, use INFO log level.
EXPECTED RESULT: data matches
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
verbosity=logging.INFO
) as j:
with self.assertLogs(logger='tiff2jp2', level=logging.INFO) as cm:
j.run()
self.assertEqual(len(cm.output), 1)
def test_partial_strip_and_partial_tiles(self):
"""
SCENARIO: Convert monochromatic stripped TIFF file to JP2. The TIFF
has a partial last strip. The JP2K will have partial tiles.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.minisblack_3strip_partial_last_strip, self.temp_jp2_filename,
tilesize=(250, 250)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(
actual, self.minisblack_2x2_partial_tiles_data
)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 480)
self.assertEqual(c.segment[1].ysiz, 480)
self.assertEqual(c.segment[1].xtsiz, 250)
self.assertEqual(c.segment[1].ytsiz, 250)
def test_partial_last_strip(self):
"""
SCENARIO: Convert monochromatic TIFF file to JP2. The TIFF has a
partial last strip.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.minisblack_3strip_partial_last_strip, self.temp_jp2_filename,
tilesize=(240, 240)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.minisblack_3x3_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 480)
self.assertEqual(c.segment[1].ysiz, 480)
self.assertEqual(c.segment[1].xtsiz, 240)
self.assertEqual(c.segment[1].ytsiz, 240)
def test_32bit(self):
"""
SCENARIO: The sample format is 32bit integer.
EXPECTED RESULT: RuntimeError
"""
data = fixtures.skimage.data.moon().astype(np.uint32)
h, w = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(self.temp_tiff_filename, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'SampleFormat', libtiff.SampleFormat.UINT)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 32)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w].copy())
libtiff.close(fp)
with Tiff2Jp2k(self.temp_tiff_filename, self.temp_jp2_filename) as j:
with self.assertRaises(RuntimeError):
j.run()
def test_floating_point(self):
"""
SCENARIO: The sample format is 32bit floating point.
EXPECTED RESULT: RuntimeError
"""
data = fixtures.skimage.data.moon().astype(np.float32)
h, w = data.shape
th, tw = h // 2, w // 2
fp = libtiff.open(self.temp_tiff_filename, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.MINISBLACK)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'SampleFormat', libtiff.SampleFormat.IEEEFP)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'TileLength', th)
libtiff.setField(fp, 'TileWidth', tw)
libtiff.setField(fp, 'BitsPerSample', 32)
libtiff.setField(fp, 'SamplesPerPixel', 1)
libtiff.writeEncodedTile(fp, 0, data[:th, :tw].copy())
libtiff.writeEncodedTile(fp, 1, data[:th, tw:w].copy())
libtiff.writeEncodedTile(fp, 2, data[th:h, :tw].copy())
libtiff.writeEncodedTile(fp, 3, data[th:h, tw:w].copy())
libtiff.close(fp)
with Tiff2Jp2k(self.temp_tiff_filename, self.temp_jp2_filename) as j:
with self.assertRaises(RuntimeError):
j.run()
def test_evenly_tiled(self):
"""
SCENARIO: Convert monochromatic TIFF file to JP2. The TIFF is evenly
tiled 2x2.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.minisblack_spp1_path,
self.temp_jp2_filename,
tilesize=(256, 256)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.minisblack_spp1_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 256)
self.assertEqual(c.segment[1].ytsiz, 256)
def test_tiled_logging(self):
"""
SCENARIO: Convert monochromatic TIFF file to JP2. The TIFF is evenly
tiled 2x2. Logging is turned on.
EXPECTED RESULT: there are four messages logged, one for each tile
"""
with Tiff2Jp2k(
self.minisblack_spp1_path,
self.temp_jp2_filename,
tilesize=(256, 256)
) as j:
with self.assertLogs(logger='tiff2jp2', level=logging.INFO) as cm:
j.run()
self.assertEqual(len(cm.output), 4)
def test_minisblack__smaller_tilesize_specified(self):
"""
SCENARIO: Convert monochromatic TIFF file to JP2. The TIFF is evenly
tiled 2x2, but we want 4x4.
EXPECTED RESULT: The data matches. The JP2 file has 16 tiles.
"""
with Tiff2Jp2k(
self.minisblack_spp1_path, self.temp_jp2_filename,
tilesize=(128, 128)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.minisblack_spp1_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 128)
self.assertEqual(c.segment[1].ytsiz, 128)
def test_minisblack_3strip_to_2x2(self):
"""
SCENARIO: Convert monochromatic TIFF file to JP2. The TIFF is evenly
stripped by 3, but we want 2x2.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.minisblack_3_full_strips_path, self.temp_jp2_filename,
tilesize=(240, 240)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.minisblack_3x3_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 480)
self.assertEqual(c.segment[1].ysiz, 480)
self.assertEqual(c.segment[1].xtsiz, 240)
self.assertEqual(c.segment[1].ytsiz, 240)
def test_minisblack_3x3__larger_tilesize_specified(self):
"""
SCENARIO: Convert monochromatic TIFF file to JP2. The TIFF is evenly
tiled 3x3, but we want 2x2.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.minisblack_3x3_tif, self.temp_jp2_filename,
tilesize=(240, 240)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.minisblack_3x3_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 480)
self.assertEqual(c.segment[1].ysiz, 480)
self.assertEqual(c.segment[1].xtsiz, 240)
self.assertEqual(c.segment[1].ytsiz, 240)
def test_separated_configuration(self):
"""
SCENARIO: The TIFF has a planar configuration of SEPARATE which is
not supported if a tilesize is specified.
EXPECTED RESULT: RuntimeError
"""
with self.assertRaises(RuntimeError):
with ir.path(
'tests.data', 'flower-separated-planar-08.tif'
) as path:
with Tiff2Jp2k(
path, self.temp_jp2_filename, tilesize=(64, 64)
) as j:
j.run()
def test_bad_tile_size(self):
"""
SCENARIO: Specify a tilesize that exceeds the image size. This will
cause a segfault unless caught.
EXPECTED RESULT: RuntimeError
"""
with self.assertRaises(RuntimeError):
with ir.path('tests.data', 'albers27-8.tif') as path:
with Tiff2Jp2k(
path, self.temp_jp2_filename, tilesize=(256, 256),
) as j:
j.run()
def test_minisblack_spp1_bigtiff(self):
"""
SCENARIO: Convert minisblack BigTIFF file to JP2. The TIFF has tag
XResolution.
EXPECTED RESULT: no errors.
"""
with ir.path('tests.data', 'albers27-8.tif') as path:
with Tiff2Jp2k(path, self.temp_jp2_filename) as j:
j.run()
def test_rgb_tiled_bigtiff(self):
"""
SCENARIO: Convert RGB BigTIFF file to JP2. The TIFF is evenly
tiled 2x2.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.rgb_bigtiff, self.temp_jp2_filename, tilesize=(256, 256),
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 256)
self.assertEqual(c.segment[1].ytsiz, 256)
def test_rgb_tiled_tiff(self):
"""
SCENARIO: Convert RGB TIFF file to JP2. The TIFF is evenly
tiled 2x2.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.astronaut_tif, self.temp_jp2_filename, tilesize=(256, 256)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 256)
self.assertEqual(c.segment[1].ytsiz, 256)
def test_ycbcr_jpeg_unevenly_tiled(self):
"""
SCENARIO: Convert YCBCR/JPEG TIFF file to JP2. The TIFF is evenly
tiled 2x2. The JPEG 2000 file will be tiled 75x75.
EXPECTED RESULT: The data matches. No errors
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
tilesize=(75, 75)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_ycbcr_jpeg_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 75)
self.assertEqual(c.segment[1].ytsiz, 75)
def test_ycbcr_jpeg_tiff(self):
"""
SCENARIO: Convert YCBCR/JPEG TIFF file to JP2. The TIFF is evenly
tiled 2x2.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
tilesize=(256, 256)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_ycbcr_jpeg_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 256)
self.assertEqual(c.segment[1].ytsiz, 256)
def test_ycbcr_jpeg_single_tile(self):
"""
SCENARIO: Convert YCBCR/JPEG TIFF file to JP2. The TIFF is evenly
tiled 2x2, but no tilesize is specified.
EXPECTED RESULT: The data matches.
"""
with Tiff2Jp2k(
self.astronaut_ycbcr_jpeg_tif, self.temp_jp2_filename,
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_ycbcr_jpeg_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 512)
self.assertEqual(c.segment[1].ytsiz, 512)
def test_tiff_file_not_there(self):
"""
Scenario: The input TIFF file is not present.
Expected Result: FileNotFoundError
"""
with self.assertRaises(FileNotFoundError):
Tiff2Jp2k(
self.test_dir_path / 'not_there.tif', self.temp_jp2_filename
)
def test_rgb_uint16(self):
"""
SCENARIO: Convert RGB TIFF file to JP2. The TIFF is evenly
tiled 2x2 and uint16.
EXPECTED RESULT: The data matches. The JP2 file has 4 tiles.
"""
with Tiff2Jp2k(
self.astronaut_uint16_filename, self.temp_jp2_filename,
tilesize=(256, 256)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_uint16_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 256)
self.assertEqual(c.segment[1].ytsiz, 256)
def test_commandline_tiff2jp2(self):
"""
Scenario: patch sys such that we can run the command line tiff2jp2
script.
Expected Results: Same as test_astronaut.
"""
sys.argv = [
'', str(self.astronaut_tif), str(self.temp_jp2_filename),
'--tilesize', '256', '256'
]
command_line.tiff2jp2()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.astronaut_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 512)
self.assertEqual(c.segment[1].ysiz, 512)
self.assertEqual(c.segment[1].xtsiz, 256)
self.assertEqual(c.segment[1].ytsiz, 256)
def test_cmyk(self):
"""
Scenario: CMYK (or separated) is not a supported colorspace.
Expected result: RuntimeError
"""
data = fixtures.skimage.data.moon()
data = np.dstack((data, data))
h, w, spp = data.shape
# instead of 160, this will cause a partially empty last strip
rps = 512
fp = libtiff.open(self.temp_tiff_filename, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.SEPARATED)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'RowsPerStrip', rps)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', spp)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
libtiff.setField(fp, 'InkSet', libtiff.InkSet.MULTIINK)
libtiff.writeEncodedStrip(fp, 0, data.copy())
libtiff.close(fp)
with Tiff2Jp2k(self.temp_tiff_filename, self.temp_jp2_filename) as j:
with warnings.catch_warnings():
# weird warning about extra samples
warnings.simplefilter('ignore')
with self.assertRaises(RuntimeError):
j.run()
class TestSuiteNoScikitImage(fixtures.TestCommon):
@classmethod
def setUpClass(cls):
cls.test_tiff_dir = tempfile.mkdtemp()
cls.test_tiff_path = pathlib.Path(cls.test_tiff_dir)
cls.setup_rgb_evenly_stripped(cls.test_tiff_path / 'goodstuff.tif')
@classmethod
def setup_rgb_evenly_stripped(cls, path):
"""
SCENARIO: create a simple RGB stripped image, stripsize of 32
"""
j = Jp2k(glymur.data.goodstuff())
data = j[:]
h, w, spp = data.shape
rps = 32
fp = libtiff.open(path, mode='w')
libtiff.setField(fp, 'Photometric', libtiff.Photometric.RGB)
libtiff.setField(fp, 'Compression', libtiff.Compression.DEFLATE)
libtiff.setField(fp, 'ImageLength', data.shape[0])
libtiff.setField(fp, 'ImageWidth', data.shape[1])
libtiff.setField(fp, 'RowsPerStrip', rps)
libtiff.setField(fp, 'BitsPerSample', 8)
libtiff.setField(fp, 'SamplesPerPixel', spp)
libtiff.setField(fp, 'PlanarConfig', libtiff.PlanarConfig.CONTIG)
for stripnum in range(25):
row = rps * stripnum
stripdata = data[row:row + rps, :, :].copy()
libtiff.writeEncodedStrip(fp, stripnum, stripdata)
libtiff.close(fp)
cls.goodstuff_data = data
cls.goodstuff_path = path
def test_stripped_logging(self):
"""
Scenario: input TIFF is organized by strips and logging is turned on.
Expected result: there are 104 log messages, one for each tile
"""
with Tiff2Jp2k(
self.goodstuff_path, self.temp_jp2_filename, tilesize=(64, 64),
verbosity=logging.INFO
) as j:
with self.assertLogs(logger='tiff2jp2', level=logging.INFO) as cm:
j.run()
self.assertEqual(len(cm.output), 104)
def test_rgb_stripped(self):
"""
Scenario: input TIFF is evenly divided into strips, but the tile size
does not evenly divide either dimension.
"""
with Tiff2Jp2k(
self.goodstuff_path, self.temp_jp2_filename, tilesize=(64, 64)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.goodstuff_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 480)
self.assertEqual(c.segment[1].ysiz, 800)
self.assertEqual(c.segment[1].xtsiz, 64)
self.assertEqual(c.segment[1].ytsiz, 64)
def test_rgb_stripped_bottom_of_tile_coincides_with_bottom_of_strip(self):
"""
Scenario: input TIFF is evenly divided into strips, but the tile size
does not evenly divide either dimension. The strip size is 32. The
tile size is 13x13, so the jp2k tile in tile row 4 and column 0 will
have it's last row only one pixel past the last row of the tiff tile
in row 2 and column 0.
Expected Result: no errors
"""
with Tiff2Jp2k(
self.goodstuff_path, self.temp_jp2_filename, tilesize=(75, 75)
) as j:
j.run()
jp2 = Jp2k(self.temp_jp2_filename)
actual = jp2[:]
np.testing.assert_array_equal(actual, self.goodstuff_data)
c = jp2.get_codestream()
self.assertEqual(c.segment[1].xsiz, 480)
self.assertEqual(c.segment[1].ysiz, 800)
self.assertEqual(c.segment[1].xtsiz, 75)
self.assertEqual(c.segment[1].ytsiz, 75)
| 33.23785
| 79
| 0.604471
| 5,568
| 45,137
| 4.783405
| 0.074353
| 0.064204
| 0.072764
| 0.057858
| 0.823722
| 0.782721
| 0.751408
| 0.719907
| 0.703687
| 0.696103
| 0
| 0.035343
| 0.281011
| 45,137
| 1,357
| 80
| 33.262343
| 0.785351
| 0.144161
| 0
| 0.684337
| 0
| 0
| 0.047663
| 0.004217
| 0
| 0
| 0
| 0
| 0.140964
| 1
| 0.060241
| false
| 0
| 0.018072
| 0
| 0.080723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f88c92ca65c9f385e325589f5ae8cf6b21d499b3
| 575
|
py
|
Python
|
api_features/support/http_status_codes.py
|
alexgarzao/beeweb
|
c67d024c2d43c6bb2da7ba6877c1648e8760f036
|
[
"MIT"
] | 5
|
2017-11-10T12:48:10.000Z
|
2018-02-21T21:29:52.000Z
|
api_features/support/http_status_codes.py
|
alexgarzao/beeweb
|
c67d024c2d43c6bb2da7ba6877c1648e8760f036
|
[
"MIT"
] | 1
|
2018-04-22T00:08:16.000Z
|
2018-04-22T00:08:16.000Z
|
api_features/support/http_status_codes.py
|
alexgarzao/beeweb
|
c67d024c2d43c6bb2da7ba6877c1648e8760f036
|
[
"MIT"
] | 2
|
2017-11-10T17:06:34.000Z
|
2017-11-11T03:27:46.000Z
|
class HttpStatusCodeAlias(object):
def __init__(self, status_code, alias):
self.status_code = status_code
self.alias = alias
class HttpStatusCodeAliasList(object):
def __init__(self):
self.__http_status_code_alias_list = {}
def add(self, status_code, alias):
self.__http_status_code_alias_list[alias] = HttpStatusCodeAlias(status_code, alias)
def get(self, alias):
return self.__http_status_code_alias_list[alias]
def get_code(self, alias):
return self.__http_status_code_alias_list[alias].status_code
| 30.263158
| 91
| 0.723478
| 73
| 575
| 5.164384
| 0.205479
| 0.265252
| 0.278515
| 0.190981
| 0.517241
| 0.405836
| 0.334218
| 0.249337
| 0.249337
| 0.249337
| 0
| 0
| 0.191304
| 575
| 18
| 92
| 31.944444
| 0.810753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.384615
| false
| 0
| 0
| 0.153846
| 0.692308
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f8c9efa4d0a112105cde7318d5a203b67e941ab8
| 42
|
py
|
Python
|
src/epub_lib/epub/toc/__init__.py
|
ammarjai/epub_lib
|
a1cc368060eb4e34d73ac44cba2bd29794293136
|
[
"MIT"
] | null | null | null |
src/epub_lib/epub/toc/__init__.py
|
ammarjai/epub_lib
|
a1cc368060eb4e34d73ac44cba2bd29794293136
|
[
"MIT"
] | null | null | null |
src/epub_lib/epub/toc/__init__.py
|
ammarjai/epub_lib
|
a1cc368060eb4e34d73ac44cba2bd29794293136
|
[
"MIT"
] | null | null | null |
from . import parser
from .TOC import TOC
| 14
| 20
| 0.761905
| 7
| 42
| 4.571429
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e00e0bfe69927a419edd461535b480db52ff785
| 2,973
|
py
|
Python
|
40-49/49. iconv/iconv.py
|
dcragusa/PythonMorsels
|
5f75b51a68769036e4004e9ccdada6b220124ab6
|
[
"MIT"
] | 1
|
2021-11-30T05:03:24.000Z
|
2021-11-30T05:03:24.000Z
|
40-49/49. iconv/iconv.py
|
dcragusa/PythonMorsels
|
5f75b51a68769036e4004e9ccdada6b220124ab6
|
[
"MIT"
] | null | null | null |
40-49/49. iconv/iconv.py
|
dcragusa/PythonMorsels
|
5f75b51a68769036e4004e9ccdada6b220124ab6
|
[
"MIT"
] | 2
|
2021-04-18T05:26:43.000Z
|
2021-11-28T18:46:43.000Z
|
import sys
from argparse import ArgumentParser
# parser = ArgumentParser()
# parser.add_argument('input')
# parser.add_argument('-o', '--output')
# parser.add_argument('-f', '--from-code', default=sys.getdefaultencoding())
# parser.add_argument('-t', '--to-code', default=sys.getdefaultencoding())
# args = parser.parse_args()
#
# if __name__ == '__main__':
# with open(args.input, 'r', encoding=args.from_code) as input_f:
# with open(args.output, 'w', encoding=args.to_code) as output_f:
# output_f.write(input_f.read())
# parser = ArgumentParser()
# parser.add_argument('input')
# parser.add_argument('-o', '--output')
# parser.add_argument('-f', '--from-code', default=sys.getdefaultencoding())
# parser.add_argument('-t', '--to-code', default=sys.getdefaultencoding())
# args = parser.parse_args()
#
# if __name__ == '__main__':
# with open(args.input, 'r', encoding=args.from_code) as input_f:
# if args.output:
# with open(args.output, 'w', encoding=args.to_code) as output_f:
# output_f.write(input_f.read())
# else:
# sys.stdout.reconfigure(encoding=args.to_code)
# sys.stdout.write(input_f.read())
# parser = ArgumentParser()
# parser.add_argument('input', nargs='?', default='-')
# parser.add_argument('-o', '--output')
# parser.add_argument('-f', '--from-code', default=sys.getdefaultencoding())
# parser.add_argument('-t', '--to-code', default=sys.getdefaultencoding())
# args = parser.parse_args()
#
# if __name__ == '__main__':
# if args.input != '-':
# with open(args.input, 'r', encoding=args.from_code) as input_f:
# contents = input_f.read()
# else:
# sys.stdin.reconfigure(encoding=args.from_code)
# contents = sys.stdin.read()
#
# if args.output:
# with open(args.output, 'w', encoding=args.to_code) as output_f:
# output_f.write(contents)
# else:
# sys.stdout.reconfigure(encoding=args.to_code)
# sys.stdout.write(contents)
parser = ArgumentParser()
parser.add_argument('input', nargs='?', default='-')
parser.add_argument('-o', '--output')
parser.add_argument('-f', '--from-code', default=sys.getdefaultencoding())
parser.add_argument('-t', '--to-code', default=sys.getdefaultencoding())
parser.add_argument('-c', dest='skip_errors', action='store_true')
args = parser.parse_args()
if __name__ == '__main__':
errors = 'ignore' if args.skip_errors else None
if args.input != '-':
with open(args.input, 'r', encoding=args.from_code, errors=errors) as input_f:
contents = input_f.read()
else:
sys.stdin.reconfigure(encoding=args.from_code, errors=errors)
contents = sys.stdin.read()
if args.output:
with open(args.output, 'w', encoding=args.to_code) as output_f:
output_f.write(contents)
else:
sys.stdout.reconfigure(encoding=args.to_code)
sys.stdout.write(contents)
| 36.703704
| 86
| 0.648503
| 377
| 2,973
| 4.885942
| 0.119363
| 0.083062
| 0.156895
| 0.138979
| 0.941911
| 0.941911
| 0.928882
| 0.903909
| 0.903909
| 0.903909
| 0
| 0
| 0.17558
| 2,973
| 80
| 87
| 37.1625
| 0.75153
| 0.635049
| 0
| 0.086957
| 0
| 0
| 0.07811
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3e5702c74eb952403abde17d58092f8fb8996576
| 110
|
py
|
Python
|
content/en/api/integrations_aws_logs/code_snippets/result.aws_logs_list.py
|
wparth/documentation
|
45246d61fe8a708e12152c7a76fea6e62730348d
|
[
"BSD-3-Clause"
] | 1
|
2020-04-09T01:40:33.000Z
|
2020-04-09T01:40:33.000Z
|
content/en/api/integrations_aws_logs/code_snippets/result.aws_logs_list.py
|
wparth/documentation
|
45246d61fe8a708e12152c7a76fea6e62730348d
|
[
"BSD-3-Clause"
] | null | null | null |
content/en/api/integrations_aws_logs/code_snippets/result.aws_logs_list.py
|
wparth/documentation
|
45246d61fe8a708e12152c7a76fea6e62730348d
|
[
"BSD-3-Clause"
] | null | null | null |
[{u'services': [u'list', u'of', u'enabled', u'services'], u'account_id': u'<AWS_ACCOUNT_ID>', u'lambdas': []}]
| 110
| 110
| 0.618182
| 19
| 110
| 3.421053
| 0.473684
| 0.276923
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 110
| 1
| 110
| 110
| 0.637255
| 0
| 0
| 0
| 0
| 0
| 0.558559
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e40d2ee0a2855241a7702ab6f5c7faf26242e8c7
| 116
|
py
|
Python
|
project_reporter/__init__.py
|
araichev/project_reporter
|
8b9b59258bd694b8aa2ddcd10c65ff97a7f1a204
|
[
"MIT"
] | null | null | null |
project_reporter/__init__.py
|
araichev/project_reporter
|
8b9b59258bd694b8aa2ddcd10c65ff97a7f1a204
|
[
"MIT"
] | null | null | null |
project_reporter/__init__.py
|
araichev/project_reporter
|
8b9b59258bd694b8aa2ddcd10c65ff97a7f1a204
|
[
"MIT"
] | null | null | null |
from .constants import *
from .utilities import *
from .project import *
from .replicon import *
from .main import *
| 23.2
| 24
| 0.75
| 15
| 116
| 5.8
| 0.466667
| 0.45977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163793
| 116
| 5
| 25
| 23.2
| 0.896907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f64328a6a6902158bf1c18d6291a28cc1626f12
| 101
|
py
|
Python
|
config.py
|
RDeep1986/World_Weather_Challenge-
|
b9e301dfee596afd14a68f928aee8c68f5281fd5
|
[
"MIT"
] | null | null | null |
config.py
|
RDeep1986/World_Weather_Challenge-
|
b9e301dfee596afd14a68f928aee8c68f5281fd5
|
[
"MIT"
] | null | null | null |
config.py
|
RDeep1986/World_Weather_Challenge-
|
b9e301dfee596afd14a68f928aee8c68f5281fd5
|
[
"MIT"
] | null | null | null |
weather_api_key = "7322a9574fecf6b96b373286224a9254"
g_key ="AIzaSyCN5eWhpOhOHhLMIxnBFbUQGGB2Mq1FsEM"
| 50.5
| 52
| 0.90099
| 7
| 101
| 12.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.278351
| 0.039604
| 101
| 2
| 53
| 50.5
| 0.628866
| 0
| 0
| 0
| 0
| 0
| 0.696078
| 0.696078
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
39f4fd47b910f94512bb9c0042b04c5dedb904af
| 39
|
py
|
Python
|
kbdiffdi/indices/__init__.py
|
subond/kbdi-ffdi
|
f0f05afbfa43ef62dedc92a5ca1f4ce2ca17b4b3
|
[
"MIT"
] | null | null | null |
kbdiffdi/indices/__init__.py
|
subond/kbdi-ffdi
|
f0f05afbfa43ef62dedc92a5ca1f4ce2ca17b4b3
|
[
"MIT"
] | null | null | null |
kbdiffdi/indices/__init__.py
|
subond/kbdi-ffdi
|
f0f05afbfa43ef62dedc92a5ca1f4ce2ca17b4b3
|
[
"MIT"
] | 1
|
2021-12-04T15:39:30.000Z
|
2021-12-04T15:39:30.000Z
|
from .kbdi import *
from .ffdi import *
| 19.5
| 19
| 0.717949
| 6
| 39
| 4.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 39
| 2
| 20
| 19.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f2ec5b3800834f11c8f0276175a346806214bd24
| 390
|
py
|
Python
|
temboo/core/Library/FedSpending/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/FedSpending/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/FedSpending/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.FedSpending.Assistance import Assistance, AssistanceInputSet, AssistanceResultSet, AssistanceChoreographyExecution
from temboo.Library.FedSpending.Contracts import Contracts, ContractsInputSet, ContractsResultSet, ContractsChoreographyExecution
from temboo.Library.FedSpending.Recovery import Recovery, RecoveryInputSet, RecoveryResultSet, RecoveryChoreographyExecution
| 97.5
| 134
| 0.9
| 30
| 390
| 11.7
| 0.566667
| 0.08547
| 0.145299
| 0.239316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053846
| 390
| 3
| 135
| 130
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
841732c32f41138d2fde11d858c914d646c8c1d5
| 71
|
py
|
Python
|
build/lib/centralplatform/__init__.py
|
sabarikannan-ai/pydist
|
1fc69c3a2cc3ee7375cc2408f94bc867ba0a6505
|
[
"MIT"
] | null | null | null |
build/lib/centralplatform/__init__.py
|
sabarikannan-ai/pydist
|
1fc69c3a2cc3ee7375cc2408f94bc867ba0a6505
|
[
"MIT"
] | null | null | null |
build/lib/centralplatform/__init__.py
|
sabarikannan-ai/pydist
|
1fc69c3a2cc3ee7375cc2408f94bc867ba0a6505
|
[
"MIT"
] | null | null | null |
from centralplatform.File1 import *
from centralplatform.Dept import *
| 23.666667
| 35
| 0.830986
| 8
| 71
| 7.375
| 0.625
| 0.644068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.112676
| 71
| 2
| 36
| 35.5
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.