hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fadb22cc54d470339762a904fa9aac869f9e75f9
| 21
|
py
|
Python
|
roentgen/lines/__init__.py
|
ehsteve/roentgen
|
76016ad2558d20c3e87304bd4abafa906d98caa5
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 7
|
2018-06-05T20:28:44.000Z
|
2022-03-16T15:22:45.000Z
|
roentgen/lines/__init__.py
|
samaloney/roentgen
|
44467581886eaa355cb991b3778bb8de7e30a47d
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 17
|
2018-06-05T20:29:42.000Z
|
2021-06-06T18:26:22.000Z
|
roentgen/lines/__init__.py
|
samaloney/roentgen
|
44467581886eaa355cb991b3778bb8de7e30a47d
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2019-10-24T15:24:52.000Z
|
2020-08-27T22:17:05.000Z
|
from .lines import *
| 10.5
| 20
| 0.714286
| 3
| 21
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4f08475dd9a85aa62e0f69ff5968d09d1b8964a4
| 39
|
py
|
Python
|
tinycards/__init__.py
|
njam/tinycards-python-api
|
7381efbc75feeed4d7aaa25f78c192af6bf1d011
|
[
"MIT"
] | null | null | null |
tinycards/__init__.py
|
njam/tinycards-python-api
|
7381efbc75feeed4d7aaa25f78c192af6bf1d011
|
[
"MIT"
] | null | null | null |
tinycards/__init__.py
|
njam/tinycards-python-api
|
7381efbc75feeed4d7aaa25f78c192af6bf1d011
|
[
"MIT"
] | null | null | null |
from tinycards.client import Tinycards
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87c7a9d1c9ff1ddddb26909f13e6f43a2ba1cd25
| 333
|
py
|
Python
|
src/ebay_rest/api/sell_analytics/api/__init__.py
|
gbm001/ebay_rest
|
077d3478423ccd80ff35e0361821d6a11180bc54
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/sell_analytics/api/__init__.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/sell_analytics/api/__init__.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from ...sell_analytics.api.customer_service_metric_api import CustomerServiceMetricApi
from ...sell_analytics.api.seller_standards_profile_api import SellerStandardsProfileApi
from ...sell_analytics.api.traffic_report_api import TrafficReportApi
| 37
| 88
| 0.864865
| 41
| 333
| 6.634146
| 0.560976
| 0.088235
| 0.1875
| 0.220588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003268
| 0.081081
| 333
| 8
| 89
| 41.625
| 0.885621
| 0.123123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87c92c6dbfb8ba87e7c6aeee7b61fbce832f35fc
| 5,974
|
py
|
Python
|
tests/_display/test_line_dash_setting_interface.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 16
|
2021-04-16T02:01:29.000Z
|
2022-01-01T08:53:49.000Z
|
tests/_display/test_line_dash_setting_interface.py
|
ynsnf/apysc
|
b10ffaf76ec6beb187477d0a744fca00e3efc3fb
|
[
"MIT"
] | 613
|
2021-03-24T03:37:38.000Z
|
2022-03-26T10:58:37.000Z
|
tests/_display/test_line_dash_setting_interface.py
|
simon-ritchie/apyscript
|
c319f8ab2f1f5f7fad8d2a8b4fc06e7195476279
|
[
"MIT"
] | 2
|
2021-06-20T07:32:58.000Z
|
2021-12-26T08:22:11.000Z
|
import re
from random import randint
from typing import Match
from typing import Optional
from retrying import retry
from apysc._display.line_dash_setting import LineDashSetting
from apysc._display.line_dash_setting_interface import LineDashSettingInterface
from apysc._expression import expression_data_util
from tests.testing_helper import assert_raises
class TestLineDashSettingInterface:
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__initialize_line_dash_setting_if_not_initialized(self) -> None:
interface: LineDashSettingInterface = LineDashSettingInterface()
interface._initialize_line_dash_setting_if_not_initialized()
assert interface._line_dash_setting is None
interface._line_dash_setting = LineDashSetting(
dash_size=10, space_size=5)
interface._initialize_line_dash_setting_if_not_initialized()
assert interface._line_dash_setting.dash_size == 10
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test_line_dash_setting(self) -> None:
interface: LineDashSettingInterface = LineDashSettingInterface()
interface.variable_name = 'test_line_dash_setting_interface'
line_dash_setting: Optional[LineDashSetting] = \
interface.line_dash_setting
assert line_dash_setting is None
interface._line_dash_setting = LineDashSetting(
dash_size=10, space_size=5)
line_dash_setting = interface.line_dash_setting
assert line_dash_setting.dash_size == 10 # type: ignore
interface.line_dash_setting = LineDashSetting(
dash_size=5, space_size=3)
line_dash_setting = interface.line_dash_setting
assert line_dash_setting.dash_size == 5
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__update_line_dash_setting_and_skip_appending_exp(self) -> None:
expression_data_util.empty_expression()
interface: LineDashSettingInterface = LineDashSettingInterface()
assert_raises(
expected_error_class=TypeError,
func_or_method=interface.
_update_line_dash_setting_and_skip_appending_exp,
kwargs={'value': 'dash'},
match='Not supported line_dash_setting type specified: ')
interface._update_line_dash_setting_and_skip_appending_exp(
value=LineDashSetting(dash_size=10, space_size=5))
line_dash_setting: Optional[LineDashSetting] = \
interface.line_dash_setting
assert line_dash_setting.dash_size == 10 # type: ignore
expression: str = expression_data_util.get_current_expression()
assert '.css(' not in expression
interface._update_line_dash_setting_and_skip_appending_exp(
value=None)
line_dash_setting = interface.line_dash_setting
assert line_dash_setting is None
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__append_line_dash_setting_update_expression(self) -> None:
expression_data_util.empty_expression()
interface: LineDashSettingInterface = LineDashSettingInterface()
interface._initialize_line_dash_setting_if_not_initialized()
interface.variable_name = 'test_line_dash_interface'
interface._append_line_dash_setting_update_expression()
expression: str = expression_data_util.get_current_expression()
match: Optional[Match] = re.search(
pattern=(
rf'{interface.variable_name}.css\("stroke-dasharray", ""\);'
),
string=expression, flags=re.MULTILINE)
assert match is not None
expression_data_util.empty_expression()
interface._line_dash_setting = LineDashSetting(
dash_size=10, space_size=5)
interface._append_line_dash_setting_update_expression()
expression = expression_data_util.get_current_expression()
match = re.search(
pattern=(
rf'{interface.variable_name}.css\("stroke-dasharray", '
rf'String\(.+?\) \+ " " \+ String\(.+\)\);'
),
string=expression, flags=re.MULTILINE)
assert match is not None
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__make_snapshot(self) -> None:
interface: LineDashSettingInterface = LineDashSettingInterface()
interface.variable_name = 'test_line_dash_setting_interface'
interface.line_dash_setting = LineDashSetting(
dash_size=10, space_size=5)
snapshot_name: str = interface._get_next_snapshot_name()
interface._run_all_make_snapshot_methods(snapshot_name=snapshot_name)
assert isinstance(
interface._line_dash_setting_snapshots[snapshot_name],
LineDashSetting)
interface.line_dash_setting = None
interface._run_all_make_snapshot_methods(snapshot_name=snapshot_name)
assert isinstance(
interface._line_dash_setting_snapshots[snapshot_name],
LineDashSetting)
@retry(stop_max_attempt_number=15, wait_fixed=randint(10, 3000))
def test__revert(self) -> None:
interface: LineDashSettingInterface = LineDashSettingInterface()
interface.variable_name = 'test_line_dash_setting_interface'
interface.line_dash_setting = LineDashSetting(
dash_size=10, space_size=5)
snapshot_name: str = interface._get_next_snapshot_name()
interface._run_all_revert_methods(snapshot_name=snapshot_name)
interface._run_all_make_snapshot_methods(snapshot_name=snapshot_name)
interface.line_dash_setting = None
interface._run_all_revert_methods(snapshot_name=snapshot_name)
assert isinstance(interface._line_dash_setting, LineDashSetting)
| 47.03937
| 80
| 0.710412
| 660
| 5,974
| 5.971212
| 0.140909
| 0.095407
| 0.175082
| 0.115707
| 0.875666
| 0.87186
| 0.833799
| 0.792185
| 0.718346
| 0.69424
| 0
| 0.016101
| 0.220288
| 5,974
| 126
| 81
| 47.412698
| 0.82997
| 0.004185
| 0
| 0.66055
| 0
| 0
| 0.056357
| 0.037801
| 0
| 0
| 0
| 0
| 0.137615
| 1
| 0.055046
| false
| 0
| 0.082569
| 0
| 0.146789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
87d01ca8b749ecbc0fe028ab6886a2988226b905
| 23
|
py
|
Python
|
__init__.py
|
shbhuk/hpfspec
|
5023d6a090c1dd4835fccb6c0655d821d42c370a
|
[
"MIT"
] | null | null | null |
__init__.py
|
shbhuk/hpfspec
|
5023d6a090c1dd4835fccb6c0655d821d42c370a
|
[
"MIT"
] | 2
|
2022-01-23T23:07:34.000Z
|
2022-01-25T14:51:34.000Z
|
__init__.py
|
shbhuk/hpfspec
|
5023d6a090c1dd4835fccb6c0655d821d42c370a
|
[
"MIT"
] | 2
|
2021-09-06T00:31:59.000Z
|
2022-01-23T21:46:26.000Z
|
from .hpfspec import *
| 11.5
| 22
| 0.73913
| 3
| 23
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87dc25fc0d72d1bec7126fae7397039a289920b8
| 110
|
py
|
Python
|
apps/menu/context_processors.py
|
theju/confista
|
632e651f734ff88d968efb5f24608633c313ee3f
|
[
"MIT"
] | null | null | null |
apps/menu/context_processors.py
|
theju/confista
|
632e651f734ff88d968efb5f24608633c313ee3f
|
[
"MIT"
] | null | null | null |
apps/menu/context_processors.py
|
theju/confista
|
632e651f734ff88d968efb5f24608633c313ee3f
|
[
"MIT"
] | 1
|
2019-12-02T19:01:49.000Z
|
2019-12-02T19:01:49.000Z
|
from menu.models import Menu_Item
def menu(request):
return {"menu_items": Menu_Item.objects.iterator()}
| 22
| 55
| 0.754545
| 16
| 110
| 5
| 0.6875
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 110
| 4
| 56
| 27.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
e2054b2a6d28f682419b9409e29643dc5f146ae9
| 32
|
py
|
Python
|
ode4jax/base.py
|
PhilipVinc/netket_dynamics
|
6e8009098c279271cb0f289ba9e85c039bb284e4
|
[
"Apache-2.0"
] | 2
|
2021-10-02T20:29:44.000Z
|
2021-10-02T20:38:28.000Z
|
ode4jax/base.py
|
PhilipVinc/netket_dynamics
|
6e8009098c279271cb0f289ba9e85c039bb284e4
|
[
"Apache-2.0"
] | 11
|
2021-10-01T09:15:06.000Z
|
2022-03-21T09:19:23.000Z
|
ode4jax/base.py
|
PhilipVinc/netket_dynamics
|
6e8009098c279271cb0f289ba9e85c039bb284e4
|
[
"Apache-2.0"
] | null | null | null |
from ode4jax._src.base import *
| 16
| 31
| 0.78125
| 5
| 32
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.125
| 32
| 1
| 32
| 32
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
356c34625efaba16849c7fb686ce911d5e058658
| 1,077
|
py
|
Python
|
tests/test_module_check.py
|
shift-left-test/SAGE
|
da5ff53af1b83a7190f91271fad380e720bd939f
|
[
"MIT-0",
"MIT"
] | null | null | null |
tests/test_module_check.py
|
shift-left-test/SAGE
|
da5ff53af1b83a7190f91271fad380e720bd939f
|
[
"MIT-0",
"MIT"
] | null | null | null |
tests/test_module_check.py
|
shift-left-test/SAGE
|
da5ff53af1b83a7190f91271fad380e720bd939f
|
[
"MIT-0",
"MIT"
] | null | null | null |
import os
import pytest
import shutil
import subprocess
import sys
from .base_module_check import *
def test_run_check_default(basic_build_bad_content):
ctx = basic_build_bad_content.run_tools(["cppcheck", "cpplint"])
assert len(ctx.file_analysis_map) != 0
for file_name, file_analysis in ctx.file_analysis_map.items():
num_of_issues = get_num_of_issues(file_analysis)
assert num_of_issues != 0
def test_run_check_all(basic_build_bad_content):
ctx = basic_build_bad_content.run_tools(["cppcheck", "cpplint", "clang-tidy"])
assert len(ctx.file_analysis_map) != 0
for file_name, file_analysis in ctx.file_analysis_map.items():
num_of_issues = get_num_of_issues(file_analysis)
assert num_of_issues != 0
def test_run_check_default_no_issue(basic_build):
ctx = basic_build.run_tools(["cppcheck", "cpplint"])
assert len(ctx.file_analysis_map) == 0
def test_run_check_all_no_issue(basic_build):
ctx = basic_build.run_tools(["cppcheck", "cpplint", "clang-tidy"])
assert len(ctx.file_analysis_map) == 0
| 29.916667
| 82
| 0.748375
| 165
| 1,077
| 4.460606
| 0.248485
| 0.163043
| 0.122283
| 0.146739
| 0.888587
| 0.849185
| 0.820652
| 0.820652
| 0.820652
| 0.820652
| 0
| 0.006565
| 0.151346
| 1,077
| 35
| 83
| 30.771429
| 0.798687
| 0
| 0
| 0.416667
| 0
| 0
| 0.07428
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.166667
| false
| 0
| 0.25
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
356d4c172525515024dd77d202399bfd58b664bd
| 1,859
|
py
|
Python
|
tests/test_token_utils.py
|
Badger-Finance/python-keepers
|
b5b2b0b083a237dceecd161d81754512959822b1
|
[
"MIT"
] | null | null | null |
tests/test_token_utils.py
|
Badger-Finance/python-keepers
|
b5b2b0b083a237dceecd161d81754512959822b1
|
[
"MIT"
] | 22
|
2022-03-08T19:30:45.000Z
|
2022-03-28T21:14:15.000Z
|
tests/test_token_utils.py
|
Badger-Finance/python-keepers
|
b5b2b0b083a237dceecd161d81754512959822b1
|
[
"MIT"
] | null | null | null |
import pytest
import responses
from requests import HTTPError
from config.enums import Network
from src.token_utils import get_token_price
@responses.activate
def test_get_token_price_prod():
currency = "usd"
price = 8.75
responses.add(
responses.GET,
f"https://api.badger.finance/v2/prices?currency={currency}&chain={Network.Ethereum}",
json={
"0x3472a5a71965499acd81997a54bba8d852c6e53d": 8.75,
},
status=200,
)
token_price = get_token_price(
token_address="0x3472a5a71965499acd81997a54bba8d852c6e53d",
currency="usd",
chain=Network.Ethereum,
)
assert token_price == price
@responses.activate
def test_get_token_price_staging():
currency = "usd"
price = 8.75
responses.add(
responses.GET,
f"https://staging-api.badger.finance/v2/prices?currency={currency}"
f"&chain={Network.Ethereum}",
json={
"0x3472a5a71965499acd81997a54bba8d852c6e53d": 8.75,
},
status=200,
)
token_price = get_token_price(
token_address="0x3472a5a71965499acd81997a54bba8d852c6e53d",
currency="usd",
chain=Network.Ethereum,
use_staging=True,
)
assert token_price == price
@responses.activate
def test_get_token_price_raises():
currency = "usd"
responses.add(
responses.GET,
f"https://staging-api.badger.finance/v2/prices?currency={currency}"
f"&chain={Network.Ethereum}",
json={
"0x3472a5a71965499acd81997a54bba8d852c6e53d": 8.75,
},
status=403,
)
with pytest.raises(HTTPError):
get_token_price(
token_address="0x3472a5a71965499acd81997a54bba8d852c6e53d",
currency="usd",
chain=Network.Ethereum,
use_staging=True,
)
| 26.557143
| 93
| 0.642281
| 182
| 1,859
| 6.395604
| 0.241758
| 0.094502
| 0.078179
| 0.064433
| 0.854811
| 0.854811
| 0.854811
| 0.820447
| 0.784364
| 0.784364
| 0
| 0.14049
| 0.253362
| 1,859
| 69
| 94
| 26.942029
| 0.698127
| 0
| 0
| 0.66129
| 0
| 0.016129
| 0.284562
| 0.162453
| 0
| 0
| 0.135557
| 0
| 0.032258
| 1
| 0.048387
| false
| 0
| 0.080645
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
35792705e9f3be1ae6d84b497d62e8bad033c3ba
| 64
|
py
|
Python
|
handlers/funny/__init__.py
|
yunlzheng/PDFLabs
|
2b11d4d0b1ab7c2e4c474665e49935606e6b6bcd
|
[
"MIT",
"Unlicense"
] | 1
|
2015-08-21T03:08:23.000Z
|
2015-08-21T03:08:23.000Z
|
handlers/funny/__init__.py
|
yunlzheng/PDFLabs
|
2b11d4d0b1ab7c2e4c474665e49935606e6b6bcd
|
[
"MIT",
"Unlicense"
] | null | null | null |
handlers/funny/__init__.py
|
yunlzheng/PDFLabs
|
2b11d4d0b1ab7c2e4c474665e49935606e6b6bcd
|
[
"MIT",
"Unlicense"
] | null | null | null |
from .Gallery import UserGalleryHandler, UserGalleryApiHandler
| 32
| 63
| 0.875
| 5
| 64
| 11.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 64
| 1
| 64
| 64
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea3faa787513e8d02d9d1ec531826937afa7c499
| 18,859
|
py
|
Python
|
spitfire/compiler/analyzer_test.py
|
atubbs/spitfire
|
73dc7ab2f9721aa96d402887e3a080012047e3e1
|
[
"BSD-3-Clause"
] | 385
|
2016-04-07T06:21:12.000Z
|
2022-03-30T23:03:59.000Z
|
spitfire/compiler/analyzer_test.py
|
atubbs/spitfire
|
73dc7ab2f9721aa96d402887e3a080012047e3e1
|
[
"BSD-3-Clause"
] | 25
|
2016-02-03T23:43:47.000Z
|
2016-03-31T21:34:45.000Z
|
spitfire/compiler/analyzer_test.py
|
atubbs/spitfire
|
73dc7ab2f9721aa96d402887e3a080012047e3e1
|
[
"BSD-3-Clause"
] | 67
|
2016-04-29T14:49:09.000Z
|
2022-03-15T01:51:02.000Z
|
# Copyright 2014 The Spitfire Authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import unittest
from spitfire.compiler import analyzer
from spitfire.compiler import ast
from spitfire.compiler import compiler
from spitfire.compiler import options
from spitfire.compiler import util
from spitfire.compiler import walker
from spitfire import test_util
class BaseTest(unittest.TestCase):
def __init__(self, *args):
unittest.TestCase.__init__(self, *args)
self.analyzer_options = options.default_options
self.analyzer_options.update(cache_resolved_placeholders=True,
enable_warnings=True,
warnings_as_errors=True)
def setUp(self):
self.compiler = compiler.Compiler(
analyzer_options=self.analyzer_options,
xspt_mode=False,
compiler_stack_traces=True)
def _get_analyzer(self, ast_root):
semantic_analyzer = analyzer.SemanticAnalyzer(
'TestTemplate', ast_root, self.compiler.analyzer_options,
self.compiler)
semantic_analyzer.get_ast = test_util.RecordedFunction(
semantic_analyzer.get_ast)
return semantic_analyzer
def _build_function_template(self):
""" Build a simple template with a function.
file: TestTemplate
#def test_function
#end def
"""
ast_root = ast.TemplateNode('TestTemplate')
def_node = ast.DefNode('test_function')
ast_root.append(def_node)
return (ast_root, def_node)
def _build_if_template(self, condition=None):
""" Build a simple template with a function and an if statement.
file: TestTemplate
#def test_function
#if True
#end if
#end def
"""
ast_root, def_node = self._build_function_template()
condition_node = condition or ast.LiteralNode(True)
if_node = ast.IfNode(condition_node)
def_node.append(if_node)
return (ast_root, def_node, if_node)
def _compile(self, template_content):
template_node = util.parse_template(template_content)
template_node.source_path = 'test_template.spt'
return template_node
class TestEmptyIfBlockError(BaseTest):
def test_empty_if_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#if True
#end if
#end def
"""
ast_root, def_node, if_node = self._build_if_template()
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_optional_whitespace_if_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#if True
#end if
#end def
"""
ast_root, def_node, if_node = self._build_if_template()
if_node.append(ast.OptionalWhitespaceNode(' '))
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_comment_if_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#if True
## This is a comment.
#end if
#end def
"""
ast_root, def_node, if_node = self._build_if_template()
if_node.append(ast.CommentNode(' This is a comment.'))
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_empty_if_full_else_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#if True
#else
#set $foo = true
#end if
#end def
"""
ast_root, def_node, if_node = self._build_if_template()
assign_node = ast.AssignNode(
ast.IdentifierNode('foo'), ast.LiteralNode(True))
if_node.else_.append(assign_node)
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_empty_elif_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#if True
#set $foo = True
#elif False
#end if
#end def
"""
ast_root, def_node, if_node = self._build_if_template()
assign_node = ast.AssignNode(
ast.IdentifierNode('foo'), ast.LiteralNode(True))
if_node.append(assign_node)
elif_node = ast.IfNode(ast.LiteralNode(False))
if_node.else_.append(elif_node)
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_non_empty_if_ok(self):
self.ast_description = """
file: TestTemplate
#def test_function
#if True
#set $foo = True
#end if
#end def
"""
ast_root, def_node, if_node = self._build_if_template()
assign_node = ast.AssignNode(
ast.IdentifierNode('foo'), ast.LiteralNode(True))
if_node.append(assign_node)
semantic_analyzer = self._get_analyzer(ast_root)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised SemanticAnalyzerError unexpectedly.')
class TestEmptyForBlockError(BaseTest):
def _build_for_template(self):
""" Build a simple template with a function and a for loop.
file: TestTemplate
#def test_function
#for $i in []
#end for
#end def
"""
ast_root, def_node = self._build_function_template()
target_list = ast.TargetListNode()
target_list.append(ast.PlaceholderNode('foo'))
expression_list = ast.ExpressionListNode()
expression_list.append(ast.LiteralNode([]))
for_node = ast.ForNode(target_list=target_list,
expression_list=expression_list)
def_node.append(for_node)
return (ast_root, def_node, for_node)
def test_empty_for_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#for $i in []
#end for
#end def
"""
ast_root, def_node, for_node = self._build_for_template()
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_optional_whitespace_for_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#for $i in []
#end for
#end def
"""
ast_root, def_node, for_node = self._build_for_template()
for_node.append(ast.OptionalWhitespaceNode(' '))
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_comment_for_fails(self):
self.ast_description = """
file: TestTemplate
#def test_function
#for $i in []
## This is a comment.
#end for
#end def
"""
ast_root, def_node, for_node = self._build_for_template()
for_node.append(ast.CommentNode(' This is a comment.'))
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_non_empty_for_ok(self):
self.ast_description = """
file: TestTemplate
#def test_function
#for $i in []
#set $foo = True
#end for
#end def
"""
ast_root, def_node, for_node = self._build_for_template()
assign_node = ast.AssignNode(
ast.IdentifierNode('foo'), ast.LiteralNode(True))
for_node.append(assign_node)
semantic_analyzer = self._get_analyzer(ast_root)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised SemanticAnalyzerError unexpectedly.')
class TestGlobalScopeLibraryError(BaseTest):
def _build_function_template_library(self):
""" Build a simple library template with a function.
file: TestTemplate
#implements library
#def test_function
#end def
"""
ast_root = ast.TemplateNode('TestTemplate')
implements_node = ast.ImplementsNode('library')
ast_root.append(implements_node)
def_node = ast.DefNode('test_function')
ast_root.append(def_node)
return (ast_root, def_node)
def test_library_ok(self):
self.ast_description = """
file: TestTemplate
#implements library
#def test_function
#end def
"""
ast_root, def_node = self._build_function_template_library()
semantic_analyzer = self._get_analyzer(ast_root)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised SemanticAnalyzerError unexpectedly.')
def test_set_error(self):
self.ast_description = """
file: TestTemplate
#implements library
#set $foo = True
#def test_function
#end def
"""
ast_root, def_node = self._build_function_template_library()
assign_node = ast.AssignNode(
ast.IdentifierNode('foo'), ast.LiteralNode(True))
ast_root.insert_before(def_node, assign_node)
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_attr_error(self):
self.ast_description = """
file: TestTemplate
#implements library
#attr $foo = True
#def test_function
#end def
"""
ast_root, def_node = self._build_function_template_library()
attr_node = ast.AttributeNode('foo', default=ast.LiteralNode(True))
ast_root.insert_before(def_node, attr_node)
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_global_ok(self):
self.ast_description = """
file: TestTemplate
#implements library
#global $foo
#def test_function
#end def
"""
ast_root, def_node = self._build_function_template_library()
global_node = ast.GlobalNode('foo')
ast_root.insert_before(def_node, global_node)
semantic_analyzer = self._get_analyzer(ast_root)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised SemanticAnalyzerError unexpectedly.')
class TestAssignSlice(BaseTest):
def test_slice_non_identifier_error(self):
self.ast_description = """
file: TestTemplate
#def test_function
#set 1[1] = 1
#end def
"""
ast_root, def_node = self._build_function_template()
assign_node = ast.AssignNode(
ast.SliceNode(
ast.LiteralNode(1), ast.LiteralNode(1)), ast.LiteralNode(1))
def_node.append(assign_node)
semantic_analyzer = self._get_analyzer(ast_root)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_slice_identifier_ok(self):
self.ast_description = """
file: TestTemplate
#def test_function
#set $foo[1] = 1
#end def
"""
ast_root, def_node = self._build_function_template()
assign_node = ast.AssignNode(
ast.SliceNode(
ast.IdentifierNode('foo'), ast.LiteralNode(1)),
ast.LiteralNode(1))
def_node.append(assign_node)
semantic_analyzer = self._get_analyzer(ast_root)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised SemanticAnalyzerError unexpectedly.')
class TestSanitizedFunction(BaseTest):
def setUp(self):
self.analyzer_options = options.default_options
self.analyzer_options.update(cache_resolved_placeholders=True,
enable_warnings=True,
warnings_as_errors=True,
baked_mode=True,
generate_unicode=False)
self.compiler = compiler.Compiler(
analyzer_options=self.analyzer_options,
xspt_mode=False,
compiler_stack_traces=True)
self.compiler.new_registry_format = True
self.compiler.function_name_registry['reg_f'] = ('a.reg_f',
['skip_filter'])
def test_template_method_direct(self):
code = """
#def foo
Hello
#end def
#def bar
$foo()
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
analyzed_ast = semantic_analyzer.get_ast()
def pred(node):
return (type(node) == ast.CallFunctionNode and
type(node.expression) == ast.PlaceholderNode and
node.expression.name == 'foo')
foo_call = walker.find_node(analyzed_ast, pred)
if not foo_call:
self.fail('Expected foo() in ast')
self.assertEqual(foo_call.sanitization_state,
ast.SanitizedState.SANITIZED_STRING)
def test_library_function_direct(self):
code = """
#from module import library my_lib
#def bar
$my_lib.foo()
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
analyzed_ast = semantic_analyzer.get_ast()
def pred(node):
return (type(node) == ast.CallFunctionNode and
type(node.expression) == ast.IdentifierNode and
node.expression.name == 'my_lib.foo')
foo_call = walker.find_node(analyzed_ast, pred)
if not foo_call:
self.fail('Expected my_lib.foo() in ast')
self.assertEqual(foo_call.sanitization_state,
ast.SanitizedState.SANITIZED_STRING)
def test_library_function_registry_yes(self):
code = """
#def bar
$reg_f()
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
analyzed_ast = semantic_analyzer.get_ast()
def pred(node):
return (type(node) == ast.CallFunctionNode and
type(node.expression) == ast.PlaceholderNode and
node.expression.name == 'reg_f')
foo_call = walker.find_node(analyzed_ast, pred)
if not foo_call:
self.fail('Expected reg_f() in ast')
self.assertEqual(foo_call.sanitization_state,
ast.SanitizedState.SANITIZED)
def test_external_function_maybe(self):
code = """
#from module import my_lib
#def bar
$my_lib.foo()
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
analyzed_ast = semantic_analyzer.get_ast()
def pred(node):
return (type(node) == ast.CallFunctionNode and
type(node.expression) == ast.GetUDNNode and
type(node.expression.expression) == ast.PlaceholderNode and
node.expression.expression.name == 'my_lib' and
node.expression.name == 'foo')
foo_call = walker.find_node(analyzed_ast, pred)
if not foo_call:
self.fail('Expected my_libfoo() in ast')
self.assertEqual(foo_call.sanitization_state,
ast.SanitizedState.UNKNOWN)
class TestNoRaw(BaseTest):
def setUp(self):
self.analyzer_options = options.default_options
self.analyzer_options.update(enable_warnings=True,
warnings_as_errors=True,
no_raw=True)
self.compiler = compiler.Compiler(
analyzer_options=self.analyzer_options,
xspt_mode=False,
compiler_stack_traces=True)
def test_error_with_raw(self):
code = """
#def foo
#set $a = "a"
${a|raw}
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
def test_allow_raw_no_error(self):
code = """
#allow_raw
#def foo
#set $a = "a"
${a|raw}
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised an error unexpectedly.')
def test_allow_raw_macro_no_error(self):
code = """
#allow_raw
#global $a
#def foo
#i18n()#
${a|raw}
#end i18n#
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
try:
semantic_analyzer.get_ast()
except analyzer.SemanticAnalyzerError:
self.fail('get_ast raised an error unexpectedly.')
def test_allow_raw_no_raw_error(self):
code = """
#allow_raw
#def foo
#end def
"""
template = self._compile(code)
semantic_analyzer = self._get_analyzer(template)
self.assertRaises(analyzer.SemanticAnalyzerError,
semantic_analyzer.get_ast)
if __name__ == '__main__':
unittest.main()
| 30.665041
| 79
| 0.600827
| 2,002
| 18,859
| 5.362637
| 0.094406
| 0.077496
| 0.046013
| 0.053279
| 0.799739
| 0.78791
| 0.768722
| 0.760525
| 0.756893
| 0.723268
| 0
| 0.001394
| 0.315287
| 18,859
| 614
| 80
| 30.714984
| 0.830016
| 0.033035
| 0
| 0.735358
| 0
| 0
| 0.196829
| 0.005822
| 0
| 0
| 0
| 0
| 0.036876
| 1
| 0.08243
| false
| 0
| 0.021692
| 0.008677
| 0.140998
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ea40dcb2d20078c1240087e8476e9e3e1964d53c
| 2,587
|
py
|
Python
|
kmmoderation/role.py
|
KINGMAN1996/moderation-by-kmcode
|
b70611b4a4e534cdd44d014be5152ddf272f73d6
|
[
"MIT"
] | 1
|
2021-04-05T14:55:51.000Z
|
2021-04-05T14:55:51.000Z
|
kmmoderation/role.py
|
KINGMAN1996/moderation-by-kmcode
|
b70611b4a4e534cdd44d014be5152ddf272f73d6
|
[
"MIT"
] | null | null | null |
kmmoderation/role.py
|
KINGMAN1996/moderation-by-kmcode
|
b70611b4a4e534cdd44d014be5152ddf272f73d6
|
[
"MIT"
] | 4
|
2021-04-01T15:53:55.000Z
|
2021-04-15T22:01:09.000Z
|
from main import *
import discord
from discord.ext import commands
from discord import File
from discord import Embed
from dotenv import load_dotenv
class addrole(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.has_any_role(admine_role)
async def addrole(self, ctx, user: discord.Member, role: discord.Role):
if black_list_role in [role.id for role in ctx.author.roles]:
await ctx.send("لايمكن اضافة رول")
else:
embed=discord.Embed(title="Roles Log", url="https://github.com/KINGMAN1996", description="تم اعطاء رول", color=0x0467da)
embed.set_author(name="</>KMCodes", url="https://github.com/KINGMAN1996")
embed.add_field(name="اسم الاداري", value=f"{ctx.author.mention}", inline=True)
embed.add_field(name="اي دي الاداري", value=f"{ctx.author.id}", inline=True)
embed.add_field(name="اسم العضو", value=f"{user.mention}", inline=True)
embed.add_field(name="اي دي العضو", value=f"{user.id}", inline=True)
embed.add_field(name="الرول", value=f"{role.mention}", inline=True)
embed.set_footer(text="Power By </>KMCodes & MeCodes")
rolelog = self.bot.get_channel(rolelogchannel)
await user.add_roles(role)
await ctx.send(f"تم اضافة رول {role.mention} بواسطة {ctx.author.mention}")
await rolelog.send(embed=embed)
@commands.command()
@commands.has_any_role(admine_role)
async def removerole(self, ctx, user: discord.Member, role: discord.Role):
if black_list_role in [role.id for role in ctx.author.roles] or ctx.author.top_role <= user.top_role:
await ctx.send("لايمكن اضافة رول")
else:
embed=discord.Embed(title="Roles Log", url="https://github.com/KINGMAN1996", description="تم ازالة رول", color=0x0467da)
embed.set_author(name="</>KMCodes", url="https://github.com/KINGMAN1996")
embed.add_field(name="اسم الاداري", value=f"{ctx.author.mention}", inline=True)
embed.add_field(name="اي دي الاداري", value=f"{ctx.author.id}", inline=True)
embed.add_field(name="اسم العضو", value=f"{user.mention}", inline=True)
embed.add_field(name="اي دي العضو", value=f"{user.id}", inline=True)
embed.add_field(name="الرول", value=f"{role.mention}", inline=True)
embed.set_footer(text="</>KMCodes KINGMAN")
rolelog = self.bot.get_channel(rolelogchannel) #role.mention
await user.remove_roles(role)
await ctx.send(f"تم ازالة رول {role.mention} بواسطة {ctx.author.mention}")
await rolelog.send(embed=embed)
def setup(bot):
bot.add_cog(addrole(bot))
| 43.116667
| 126
| 0.695014
| 382
| 2,587
| 4.612565
| 0.222513
| 0.045403
| 0.07378
| 0.096481
| 0.807037
| 0.807037
| 0.763905
| 0.736663
| 0.736663
| 0.736663
| 0
| 0.011894
| 0.155006
| 2,587
| 60
| 127
| 43.116667
| 0.794145
| 0.004639
| 0
| 0.510638
| 0
| 0
| 0.238058
| 0
| 0
| 0
| 0.006214
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.12766
| 0
| 0.191489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ea66d770fe0402a79e1d243e55ddc913d7febae1
| 147,841
|
py
|
Python
|
XML2MapReduce/code_gen.py
|
YSmart/YSmart
|
fdf442ed19559105cc6c8fdabca665603ec1aef6
|
[
"Apache-2.0"
] | 10
|
2015-06-28T02:09:17.000Z
|
2021-04-18T01:20:38.000Z
|
XML2MapReduce/code_gen.py
|
YSmart/YSmart
|
fdf442ed19559105cc6c8fdabca665603ec1aef6
|
[
"Apache-2.0"
] | null | null | null |
XML2MapReduce/code_gen.py
|
YSmart/YSmart
|
fdf442ed19559105cc6c8fdabca665603ec1aef6
|
[
"Apache-2.0"
] | 8
|
2015-09-17T13:01:13.000Z
|
2020-03-31T15:35:03.000Z
|
#! /usr/bin/python
"""
Copyright (c) 2012 The Ohio State University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import commands
import os.path
import copy
import ystree
import correlation
import config
##input exp should be YFuncExp
math_func_dict = {"PLUS":" + ","MINUS":" - ","DIVIDE":" / ","MULTIPLY":" * "}
agg_func_list = ["SUM","AVG","COUNT","MAX","MIN","COUNT_DISTINCT"]
bool_func_dict = {"AND":" && ","OR":" || "}
rel_func_dict = {"EQ":" == ","GTH":" > ", "LTH":" < ","NOT_EQ":" != ","GEQ":" >= ","LEQ":" <= "}
packagepath = "edu/osu/cse/ysmart/"
packagename = "edu.osu.cse.ysmart"
###__select_func_convert_to_java__ is mainly used to convert the math function in the select list into jave expression
####input: @exp: the sql expression than you want to translate. If the exp is an agg operation, it will return the jave exp of its argument
####
def __select_func_convert_to_java__(exp,buf_dict):
return_str = ""
if not isinstance(exp,ystree.YFuncExp):
return
if exp.func_name in math_func_dict.keys():
##### +, -, *, / operations
para1 = exp.parameter_list[0]
para2 = exp.parameter_list[1]
if isinstance(para1,ystree.YRawColExp):
return_str += "("
return_str += __para_to_java__(para1.column_type,para1.column_name,buf_dict[para1.table_name])
elif isinstance(para1,ystree.YFuncExp):
return_str += "("
return_str += __select_func_convert_to_java__(para1,buf_dict)
else:
#return_str += __para_to_java__(para1.cons_type,para1.cons_value,None)
return_str += "("
return_str += para1.cons_value
if exp.func_name == "PLUS":
return_str += " + "
elif exp.func_name == "MINUS":
return_str += " - "
elif exp.func_name == "DIVIDE":
return_str += " / "
elif exp.func_name == "MULTIPLY":
return_str += " * "
if isinstance(para2,ystree.YRawColExp):
return_str += __para_to_java__(para2.column_type,para2.column_name,buf_dict[para2.table_name])
return_str += ")"
elif isinstance(para2,ystree.YFuncExp):
return_str += __select_func_convert_to_java__(para2,buf_dict)
return_str += ")"
else:
#return_str += __para_to_java__(para2.cons_type,para2.cons_value,None)
return_str += para2.cons_value
return_str += ")"
elif exp.func_name in agg_func_list:
if len(exp.parameter_list) != 1:
print >>sys.stderr,"Internal Error:__select_func_convert_to_java__ "
exit(29)
para = exp.parameter_list[0]
if isinstance(para,ystree.YRawColExp):
return_str += __para_to_java__(para.column_type,para.column_name,buf_dict[para.table_name])
elif isinstance(para,ystree.YFuncExp):
return_str += "("
return_str += __select_func_convert_to_java__(para, buf_dict)
return_str += ")"
else:
#return_str += __para_to_java__(para.cons_type,para.cons_value,None)
return_str += str(para.cons_value)
else:
print >>sys.stderr,"Internal Error:__select_func_convert_to_java__"
exit(29)
return return_str
##### the difference between __gb_exp_to_java__ and __select_func_convert_to_java__ is how they handle the agg exp
def __gb_exp_to_java__(exp,gb_exp_list,buf_dict,hash_key):
return_str = ""
if not isinstance(exp,ystree.YFuncExp):
print >>sys.stderr,"Internal Error:__gb_exp_to_java__"
exit(29)
if exp.func_name in agg_func_list:
for x in gb_exp_list:
if x.compare(exp) is True:
return_str += "("
if hash_key is None:
return_str += buf_dict["AGG"] +"[" + str(gb_exp_list.index(x)) + "]"
else:
return_str += buf_dict["AGG"] + "[" + str(gb_exp_list.index(x)) + "].get("+hash_key+")"
return_str += ")"
break
elif exp.func_name in math_func_dict.keys():
para1 = exp.parameter_list[0]
para2 = exp.parameter_list[1]
if isinstance(para1,ystree.YRawColExp):
return_str += "("
return_str += __para_to_java__(para1.column_type,para1.column_name,buf_dict[para1.table_name])
elif isinstance(para1,ystree.YFuncExp):
return_str += "("
return_str += __gb_exp_to_java__(para1,gb_exp_list,buf_dict,hash_key)
else:
return_str += "("
return_str += para1.cons_value
if exp.func_name == "PLUS":
return_str += " + "
elif exp.func_name == "MINUS":
return_str += " - "
elif exp.func_name == "DIVIDE":
return_str += " / "
elif exp.func_name == "MULTIPLY":
return_str += " * "
if isinstance(para2,ystree.YRawColExp):
return_str += __para_to_java__(para2.column_type,para2.column_name,buf_dict[para2.table_name])
return_str += ")"
elif isinstance(para2,ystree.YFuncExp):
return_str += __gb_exp_to_java__(para2,gb_exp_list,buf_dict,hash_key)
return_str += ")"
else:
return_str += para2.cons_value
return_str += ")"
else:
print >>sys.stderr,"Internal Error:__gb_exp_to_java__"
exit(29)
return return_str
def __para_to_java__(para_type,value,buf_name):
return_str = ""
if buf_name is not None:
if para_type == "INTEGER":
return_str = "Integer.parseInt(" + buf_name + "[" + str(value) + "])"
elif para_type == "DECIMAL":
return_str = "Double.parseDouble(" + buf_name + "[" +str(value) + "])"
elif para_type == "TEXT":
return_str = buf_name + "[" + str(value) + "]"
elif para_type == "DATE":
return_str = buf_name + "[" + str(value) + "]"
else:
print >>sys.stderr,"Internal Error:__para_to_java__"
exit(29)
else:
if para_type == "INTEGER":
return_str = str(value)
elif para_type == "DECIMAL":
return_str = str(value)
elif para_type == "TEXT":
return_str = str(value)
elif para_type == "DATE":
return_str = str(value)
else:
print >>sys.stderr,"Internal Error:__para_to_java__"
exit(29)
return return_str
def __operator_to_java__(op_type,op_name,op_list):
res_str = ""
if op_name in rel_func_dict.keys():
if op_type == "INTEGER" or op_type == "DECIMAL":
res_str = op_list[0] + rel_func_dict[op_name] + op_list[1]
elif op_type == "TEXT":
res_str = op_list[0] + ".compareTo(" + op_list[1] + ")" + rel_func_dict[op_name] +"0"
elif op_type =="DATE":
res_str = op_list[0] + ".compareTo(" + op_list[1] + ")" + rel_func_dict[op_name] +"0"
elif op_name in math_func_dict.keys():
if op_type == "INTEGER" or op_type == "DECIMAL":
res_str = op_list[0] + math_func_dict[op_name] + op_list[1]
elif op_type == "TEXT":
print >>sys.stderr,"Internal Error:__operator_to_java__"
exit(29)
elif op_type == "DATE":
print >>sys.stderr,"Internal Error:__operator_to_java__"
exit(29)
elif op_name in bool_func_dict.keys():
count = 0
for tmp in op_list:
count = count +1
if count != len(op_list):
res_str = res_str + tmp + bool_func_dict[op_name]
else:
res_str = res_str + tmp
else:
print >>sys.stderr,"Internal Error:__operator_to_java__"
exit(29)
return res_str
#### translate the where exp to java.
def __where_convert_to_java__(exp,buf_dict):
return_str = ""
if isinstance(exp,ystree.YFuncExp):
if exp.func_name in rel_func_dict.keys() or exp.func_name in math_func_dict.keys():
tmp_list = []
op_type = None
for tmp_exp in exp.parameter_list:
if isinstance(tmp_exp,ystree.YRawColExp):
if tmp_exp.table_name != "AGG":
tmp_str = __para_to_java__(tmp_exp.column_type,tmp_exp.column_name,buf_dict[tmp_exp.table_name])
else:
tmp_str = buf_dict[tmp_exp.table_name] + "[" + str(tmp_exp.column_name) + "]"
tmp_list.append(tmp_str)
op_type = tmp_exp.column_type
elif isinstance(tmp_exp,ystree.YFuncExp):
tmp_str = __where_convert_to_java__(tmp_exp,buf_dict)
tmp_list.append(tmp_str)
op_type = "DECIMAL"
else:
tmp_str = tmp_exp.cons_value
tmp_list.append(tmp_str)
op_type = tmp_exp.cons_type
if len(tmp_list) != 2:
print >>sys.stderr,"Internal Error:__where_convert_to_java__"
exit(29)
return_str = __operator_to_java__(op_type,exp.func_name,tmp_list)
elif exp.func_name in agg_func_list:
tmp_exp = exp.parameter_list[0]
if isinstance(tmp_exp,ystree.YRawColExp):
return_str = buf_dict[tmp_exp.table_name] + "[" + str(tmp_exp.column_name) + "]"
elif isinstance(tmp_exp,ystree.YFuncExp):
return_str = __where_convert_to_java__(tmp_exp,buf_dict)
elif exp.func_name in bool_func_dict.keys():
tmp_list = []
op_type = "BOOLEAN"
for tmp_exp in exp.parameter_list:
tmp_str = __where_convert_to_java__(tmp_exp,buf_dict)
tmp_list.append(tmp_str)
return_str = __operator_to_java__(op_type,exp.func_name,tmp_list)
else:
return_str += "("
if exp.func_name == "IS":
para1 = exp.parameter_list[0]
para2 = exp.parameter_list[1]
if isinstance(para1,ystree.YRawColExp):
return_str += buf_dict[para1.table_name] + "[" + str(para1.column_name) + "]"
else:
print >>sys.stderr,"Internal Error:__where_convert_to_java__"
exit(29)
return_str += ".compareTo(\""
if isinstance(para2,ystree.YConsExp):
return_str += str(para2.cons_value)
return_str += "\") == 0"
else:
print >>sys.stderr,"Internal Error:__where_convert_to_java__"
exit(29)
return_str += ")"
elif isinstance(exp, ystree.YConsExp):
if exp.cons_type == "BOOLEAN":
if exp.cons_value == "FALSE":
return "false"
else:
return "true"
return return_str
sql_type_to_java = {"INTEGER":"IntWritable","DECIMAL":"DoubleWritable","TEXT":"Text","DATE":"Text"}
java_type_to_hash = {"IntWritable":"Integer","DoubleWritable":"Double","Text":"String","Date":"String"}
def __get_key_value_type__(exp_list):
res = ""
if len(exp_list) !=1:
res = sql_type_to_java["TEXT"]
else:
exp = exp_list[0]
if isinstance(exp,ystree.YRawColExp):
res = sql_type_to_java[exp.column_type]
elif isinstance(exp,ystree.YFuncExp):
res = sql_type_to_java[exp.get_value_type()]
else:
res = sql_type_to_java[exp.cons_type]
return res
def __gen_des__(fo):
print >>fo,"/*\n\
The following code is automatically generated by YSmart 12.01.\n\
Author: Rubao Lee, Yuan Yuan \n\
Email: [email protected]\n\
*/\n"
def __gen_header__(fo):
print >>fo, "package " + packagename + ";"
print >>fo,"import java.io.IOException;"
print >>fo,"import java.util.*;"
print >>fo,"import java.text.*;"
print >>fo,"import org.apache.hadoop.fs.Path;"
print >>fo,"import org.apache.hadoop.conf.*;"
print >>fo,"import org.apache.hadoop.io.*;"
print >>fo,"import org.apache.hadoop.util.Tool;"
print >>fo,"import org.apache.hadoop.util.ToolRunner;"
print >>fo, "import org.apache.hadoop.mapreduce.Job;"
print >>fo, "import org.apache.hadoop.mapreduce.Mapper;"
print >>fo, "import org.apache.hadoop.mapreduce.Reducer;"
print >>fo, "import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;"
print >>fo, "import org.apache.hadoop.mapreduce.lib.input.FileSplit;"
print >>fo, "import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;"
print >>fo, "import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;"
print >>fo, "import org.apache.hadoop.mapreduce.lib.partition.*;"
print >>fo,"\n"
def __gen_mr_key__(exp_list,type,buf_dict):
res = ""
if len(exp_list) == 0:
res = "\" \""
return res
for exp in exp_list:
if isinstance(exp,ystree.YRawColExp):
res += __para_to_java__(exp.column_type,exp.column_name,buf_dict[exp.table_name])
if type == "Text":
res += "+ \"|\" +"
else:
res += "+"
elif isinstance(exp,ystree.YFuncExp):
res += __select_func_convert_to_java__(exp,buf_dict)
if type == "Text":
res += "+ \"|\" +"
else:
res += "+"
else:
res += __para_to_java__(exp.cons_type,exp.cons_value,None)
if type == "Text":
res += "+ \"|\" +"
else:
res += "+"
res = res[:-1]
return res
def __gen_mr_value__(exp_list,type,buf_dict):
res = ""
if len(exp_list) == 0:
res = "\" \""
return res
for exp in exp_list:
if isinstance(exp,ystree.YRawColExp):
res += __para_to_java__(exp.column_type,exp.column_name,buf_dict[exp.table_name])
if type == "Text":
res += "+ \"|\" +"
else:
res += "+"
elif isinstance(exp,ystree.YFuncExp):
res += __select_func_convert_to_java__(exp,buf_dict)
if type == "Text":
res += "+ \"|\" +"
else:
res += "+"
else:
res += __para_to_java__(exp.cons_type,exp.cons_value,None)
if type == "Text":
res += "+ \"|\" +"
else:
res += "+"
res = res[:-1]
return res
def __get_max_index__(exp_list):
ret = -1
for exp in exp_list:
if isinstance(exp,ystree.YRawColExp):
if ret < int(exp.column_name):
ret = int(exp.column_name)
elif isinstance(exp,ystree.YFuncExp):
col_list = []
ystree.__get_func_para__(exp,col_list)
for x in col_list:
if ret< int(x.column_name):
ret = int(x.column_name)
ret = ret +1
return ret
def __get_gb_exp__(exp,tmp_list):
if not isinstance(exp,ystree.YFuncExp):
return
if exp.func_name in agg_func_list:
tmp_list.append(exp)
else:
for x in exp.parameter_list:
__get_gb_exp__(x,tmp_list)
def __get_gbexp_list__(exp_list,gb_exp_list):
for exp in exp_list:
if not isinstance(exp,ystree.YFuncExp):
continue
tmp_list = []
__get_gb_exp__(exp,tmp_list)
for tmp in tmp_list:
tmp_bool = False
for gb_exp in gb_exp_list:
if tmp.compare(gb_exp) is True:
tmp_bool = True
break
if tmp_bool is False:
gb_exp_list.append(tmp)
def __tablenode_gen_mr__(tree,fo):
line_buffer = "line_buf"
if tree.select_list is None:
print >>sys.stderr,"Internal Error:__tablenode_gen_mr__"
exit(29)
buf_dict = {}
buf_dict[tree.table_name] = line_buffer
buf_dict[tree.table_alias] = line_buffer
map_key_type = "NullWritable"
map_value_type = __get_key_value_type__(tree.select_list.tmp_exp_list)
map_value = __gen_mr_value__(tree.select_list.tmp_exp_list,map_value_type,buf_dict)
exp_list = tree.select_list.tmp_exp_list
if tree.where_condition is not None:
exp_list.append(tree.where_condition.where_condition_exp)
max_index = __get_max_index__(exp_list)
print >>fo,"\tpublic static class Map extends Mapper<Object, Text,"+map_key_type+","+map_value_type+">{\n"
print >>fo,"\t\tpublic void map(Object key, Text value, Context context) throws IOException,InterruptedException{\n"
print >>fo,"\t\t\tString line = value.toString();"
print >>fo,"\t\t\tString[] "+ line_buffer +" = new String["+ str(max_index)+"];"
print >>fo, "\t\t\tint prev=0,i=0,n=0;"
print >>fo, "\t\t\tfor(i=0,n=0,prev=0;i<line.length();i++){\n"
print >>fo, "\t\t\t\tif (line.charAt(i) == \'|\'){"
print >>fo, "\t\t\t\t\t" + line_buffer + "[n] = line.substring(prev,i);"
print >>fo, "\t\t\t\t\tn = n+1;"
print >>fo, "\t\t\t\t\tprev = i+1;"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t\tif(n == "+str(max_index)+")"
print >>fo, "\t\t\t\t\tbreak;"
print >>fo,"\t\t\t}\n"
print >>fo,"\t\t\tif(n<" + str(max_index) + ")"
print >>fo,"\t\t\t\t"+line_buffer+ "[n] = line.substring(prev,i);"
if tree.where_condition is None:
print >>fo,"\t\t\tNullWritable key_op = NullWritable.get();"
tmp_output = "\t\t\tcontext.write("
#tmp_output += "new " + map_key_type + "(" + map_key + ")"
tmp_output += "key_op"
tmp_output += " , "
tmp_output += "new " + map_value_type + "(" + map_value + ")"
tmp_output += ");"
print >>fo, tmp_output
else:
where_str = "\t\t\tif("
where_str +=__where_convert_to_java__(tree.where_condition.where_condition_exp,buf_dict)
where_str += "){\n"
print >>fo,where_str
print >>fo,"\t\t\t\tNullWritable key_op = NullWritable.get();"
tmp_output = "\t\t\t\tcontext.write("
#tmp_output += "new " + map_key_type + "(" + map_key + ")"
tmp_output += "key_op"
tmp_output += " , "
tmp_output += "new " + map_value_type + "(" + map_value + ")"
tmp_output += ");"
print >>fo, tmp_output
print >>fo,"\t\t\t}" # end of if
print >>fo,"\t\t}\n"
print >>fo,"\t}\n"
__gen_main__(tree,fo,map_key_type,map_value_type,map_key_type,map_value_type,False)
def __orderby_gen_mr__(tree,fo):
line_buffer = "line_buf"
buf_dict = {}
for x in tree.table_list:
buf_dict[x] = line_buffer
od_len = len(tree.order_by_clause.orderby_exp_list)
if isinstance(tree.child,ystree.TableNode):
map_key_type = __get_key_value_type__(tree.order_by_clause.orderby_exp_list)
map_value_type = __get_key_value_type__(tree.child.select_list.tmp_exp_list)
map_value = __gen_mr_value__(tree.child.select_list.tmp_exp_list,map_value_type,buf_dict)
map_key = __gen_mr_key__(tree.child.select_list.tmp_exp_list[:od_len],map_key_type,buf_dict)
max_index = __get_max_index__(tree.child.select_list.tmp_exp_list)
else:
map_key = ""
for i in range(0,od_len):
map_key += line_buffer + "[" + str(i) + "] +"
map_key += "\"|\"+"
map_key = map_key[:-1]
map_key_type = "Text"
map_value = ""
for i in range(od_len,len(tree.child.select_list.tmp_exp_list)):
map_value += line_buffer + "[" + str(i) + "] +"
map_value += "\"|\"+"
map_value = map_value[:-1]
map_value_type = "Text"
max_index = len(tree.child.select_list.tmp_exp_list)
print >>fo,"\tpublic static class Map extends Mapper<Object, Text,"+map_key_type+","+map_value_type+">{\n"
print >>fo,"\t\tpublic void map(Object key, Text value, Context context) throws IOException,InterruptedException{\n"
print >>fo,"\t\t\tString line = value.toString();"
print >>fo,"\t\t\tString[] "+ line_buffer +" = new String["+ str(max_index)+"];"
print >>fo, "\t\t\tint prev=0,i=0,n=0;"
print >>fo, "\t\t\tfor(i=0,n=0,prev=0;i<line.length();i++){\n"
print >>fo, "\t\t\t\tif (line.charAt(i) == \'|\'){"
print >>fo, "\t\t\t\t\t" + line_buffer + "[n] = line.substring(prev,i);"
print >>fo, "\t\t\t\t\tn = n+1;"
print >>fo, "\t\t\t\t\tprev = i+1;"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t\tif(n == "+str(max_index)+")"
print >>fo, "\t\t\t\t\tbreak;"
print >>fo,"\t\t\t}\n"
print >>fo,"\t\t\tif(n<" + str(max_index) + ")"
print >>fo,"\t\t\t\t"+line_buffer+ "[n] = line.substring(prev,i);"
if not isinstance(tree.child,ystree.TableNode) or tree.child.where_condition is None:
tmp_output = "\t\t\tcontext.write("
tmp_output += "new " + map_key_type + "(" + map_key +")"
tmp_output += ","
tmp_output += "new " + map_value_type + "(" + map_value + ")"
tmp_output += ");"
print >>fo, tmp_output
else:
where_str = "\t\t\tif("
where_str +=__where_convert_to_java__(tree.child.where_condition.where_condition_exp,buf_dict)
where_str += "){\n"
print >>fo,where_str
tmp_output = "\t\t\t\tcontext.write( "
tmp_output += "new " + map_key_type + "(" + map_key +")"
tmp_output += ","
tmp_output += "new " + map_value_type + "(" + map_value + ")"
tmp_output += ");"
print >>fo, tmp_output
print >>fo,"\t\t\t}" # end of if
print >>fo,"\t\t}\n"
print >>fo,"\t}\n"
###### orderby reduce part
reduce_key_type = "NullWritable"
reduce_value_type = "Text"
print >>fo,"\tpublic static class Reduce extends Reducer<"+ map_key_type+","+map_value_type+","+reduce_key_type+","+reduce_value_type+">{\n"
print >>fo,"\t\tpublic void reduce("+map_key_type+" key, Iterable<"+map_value_type+"> v, Context context) throws IOException,InterruptedException{\n"
print >>fo, "\t\t\tIterator values = v.iterator();"
print >>fo, "\t\t\tNullWritable key_op = NullWritable.get();"
print >>fo,"\t\t\twhile(values.hasNext()){\n"
print >>fo, "\t\t\t\tString tmp = values.next().toString();"
print >>fo, "\t\t\t\tcontext.write(key_op,new Text(tmp));"
print >>fo, "\t\t\t}\n"
print >>fo,"\t\t}\n"
print >>fo,"\t}\n"
#### orderby main
key_spec = ""
for i in range(0,od_len):
k= i+1
key_spec += "-k" + str(k) + "," + str(k)
exp = tree.order_by_clause.orderby_exp_list[i]
if exp.get_value_type() in ["INTEGER","DECIMAL"]:
key_spec += "n"
order= tree.order_by_clause.order_indicator_list[i]
if order == "DESC":
key_spec += "r"
key_spec += " "
print >>fo,"\tpublic int run(String[] args) throws Exception{\n"
job_name = fo.name.split(".java")[0]
print >>fo, "\t\tConfiguration conf = new Configuration();"
print >>fo, "\t\tconf.set(\"mapreduce.partition.keycomparator.options\",\"" + key_spec + "\");"
print >>fo, "\t\tconf.set(\"mapreduce.map.output.key.field.separator\", \"|\");"
print >>fo, "\t\tJob job = new Job(conf, \"" + job_name + "\");"
print >>fo, "\t\tjob.setJarByClass(" + job_name + ".class" + ");"
print >>fo, "\t\tjob.setSortComparatorClass(KeyFieldBasedComparator.class);"
print >>fo, "\t\tjob.setPartitionerClass(KeyFieldBasedPartitioner.class);"
print >>fo,"\t\tjob.setMapOutputKeyClass(" + map_key_type+".class);"
print >>fo,"\t\tjob.setMapOutputValueClass(" + map_value_type+ ".class);"
print >>fo,"\t\tjob.setOutputKeyClass("+reduce_key_type+".class);"
print >>fo,"\t\tjob.setOutputValueClass("+reduce_value_type+".class);"
print >>fo,"\t\tjob.setMapperClass(Map.class);"
print >>fo,"\t\tjob.setReducerClass(Reduce.class);"
print >>fo, "\t\tjob.setNumReduceTasks(1);"
print >>fo,"\t\tFileInputFormat.addInputPath(job, new Path(args[0]));"
print >>fo,"\t\tFileOutputFormat.setOutputPath(job, new Path(args[1]));"
print >>fo, "\t\treturn (job.waitForCompletion(true) ? 0 : 1);"
print >>fo,"\t}\n"
print >>fo, "\tpublic static void main(String[] args) throws Exception {\n "
print >>fo, "\t\tint res = ToolRunner.run(new Configuration(), new "+job_name + "(),args);"
print >>fo,"\t\tSystem.exit(res);"
print >>fo,"\t}\n"
def __groupby_gen_mr__(tree,fo):
##### groupby map part
line_buffer = "line_buf"
if tree.select_list is None or tree.child.select_list is None:
print >>sys.stderr,"Internal Error:__groupby_gen_mr__"
exit(29)
buf_dict = {}
for x in tree.child.table_list:
buf_dict[x] = line_buffer
if tree.group_by_clause is None:
print >>sys.stderr,"Internal Error:__groupby_gen_mr__"
exit(29)
else:
gb_len = len(tree.group_by_clause.groupby_exp_list)
exp_list = list(tree.child.select_list.tmp_exp_list)
if isinstance(tree.child,ystree.TableNode):
map_key_type = __get_key_value_type__(tree.group_by_clause.groupby_exp_list)
map_key = __gen_mr_key__(tree.child.select_list.tmp_exp_list[:gb_len],map_key_type,buf_dict)
map_value_type = __get_key_value_type__(tree.child.select_list.tmp_exp_list[gb_len:])
map_value = __gen_mr_value__(tree.child.select_list.tmp_exp_list[gb_len:],map_value_type,buf_dict)
if tree.child.where_condition is not None:
exp_list.append(tree.child.where_condition.where_condition_exp)
max_index = __get_max_index__(exp_list)
else:
map_key = ""
map_key_type = "Text"
for i in range(0,gb_len):
map_key += line_buffer + "[" + str(i) + "]" + "+ \"|\"+"
map_key = map_key[:-1]
map_value_type = "Text"
map_value = ""
for i in range(gb_len,len(tree.child.select_list.tmp_exp_list)):
map_value += line_buffer + "[" + str(i) + "]" + "+ \"|\"+"
map_value = map_value[:-1]
max_index = len(tree.child.select_list.tmp_exp_list)
gb_exp_list = []
__get_gbexp_list__(tree.select_list.tmp_exp_list,gb_exp_list)
for exp in gb_exp_list:
if ystree.__groupby_func_name__(exp) == "COUNT_DISTINCT":
config.advanced_agg = False
break
if config.advanced_agg is True:
for exp in gb_exp_list:
if ystree.__groupby_func_name__(exp) == "AVG":
map_value_type = "Text"
print >>fo,"\tpublic static class Map extends Mapper<Object, Text,"+ map_key_type+","+map_value_type+">{\n"
adv_gb_output = "adv_gb_output"
adv_count_output = "adv_count_output"
adv_dc_output = "adv_dc_output"
hash_type = java_type_to_hash[map_key_type]
if config.advanced_agg is True:
print >>fo,"\t\tHashtable<"+hash_type+",Double>[] "+adv_gb_output+"=new Hashtable[" + str(len(gb_exp_list)) + "];"
print >>fo,"\t\tHashtable<"+hash_type+",Integer> "+adv_count_output+"=new Hashtable<"+hash_type+",Integer>();"
print >>fo,"\t\tpublic void setup(Context context) throws IOException, InterruptedException {\n"
print >>fo,"\t\t\tfor(int i =0;i<"+str(len(gb_exp_list)) + ";i++){"
print >>fo,"\t\t\t\t" + adv_gb_output + "[i] = new Hashtable<"+hash_type+",Double>();"
print >>fo,"\t\t\t}"
print >>fo,"\t\t}\n"
print >>fo,"\t\tpublic void cleanup(Context context) throws IOException, InterruptedException {\n"
print >>fo,"\t\t\tfor("+hash_type+" tmp_key:"+adv_count_output+".keySet()){"
print >>fo,"\t\t\t\tDouble count = (double) "+adv_count_output+".get(tmp_key);"
map_value = ""
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
func_name = ystree.__groupby_func_name__(exp)
if func_name == "AVG":
print >>fo,"\t\t\t\tDouble avg_"+str(i)+" = "+adv_gb_output+"["+str(i) + "].get(tmp_key);"
map_value += "avg_"+str(i)+" + \"&\"+count+\"|\"+"
elif func_name == "COUNT":
print >>fo,"\t\t\t\t"+adv_gb_output +"["+str(i)+"].put(tmp_key.toString(),count);"
if map_value_type == "Text":
map_value += "count + \"&\"+\"|\"+"
else:
map_value += "count"
else:
print >>fo,"\t\t\t\tDouble tmp_"+str(i)+" = "+adv_gb_output+"["+str(i)+"].get(tmp_key);"
if map_value_type == "Text":
map_value += "tmp_"+str(i)+" + \"&\"+\"|\"+"
else:
map_value += "tmp_" + str(i)
if map_key_type == "Text":
if map_value_type == "Text":
map_value = map_value[:-1]
print >>fo,"\t\t\t\tcontext.write(new Text(tmp_key.toString()),new "+map_value_type+"("+map_value+"));"
else:
if map_value_type == "Text":
map_value = map_value[:-1]
print >>fo,"\t\t\t\tcontext.write(new "+map_key_type+"(tmp_key),new "+map_value_type+"("+map_value+"));"
print >>fo,"\t\t\t}"
print >>fo,"\t\t}"
print >>fo,"\t\tpublic void map(Object key, Text value, Context context) throws IOException,InterruptedException{\n"
print >>fo,"\t\t\tString line = value.toString();"
print >>fo,"\t\t\tString[] "+ line_buffer +" = new String["+ str(max_index)+"];"
print >>fo, "\t\t\tint prev=0,i=0,n=0;"
print >>fo, "\t\t\tfor(i=0,n=0,prev=0;i<line.length();i++){\n"
print >>fo, "\t\t\t\tif (line.charAt(i) == \'|\'){"
print >>fo, "\t\t\t\t\t" + line_buffer + "[n] = line.substring(prev,i);"
print >>fo, "\t\t\t\t\tn = n+1;"
print >>fo, "\t\t\t\t\tprev = i+1;"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t\tif(n == "+str(max_index)+")"
print >>fo, "\t\t\t\t\tbreak;"
print >>fo,"\t\t\t}\n"
print >>fo,"\t\t\tif(n<" + str(max_index) + ")"
print >>fo,"\t\t\t\t"+line_buffer+ "[n] = line.substring(prev,i);"
if config.advanced_agg is True:
### map part agg
hash_key = ""
buf_dict = {}
for x in tree.child.table_list:
buf_dict[x] = line_buffer
if isinstance(tree.child,ystree.TableNode):
hash_key = __gen_mr_key__(tree.child.select_list.tmp_exp_list[:gb_len],map_key_type,buf_dict)
else:
for i in range(0,gb_len):
hash_key += line_buffer + "[" + str(i) + "]+"
hash_key += "\"|\"+"
hash_key = hash_key[:-1]
print >>fo,"\t\t\t"+hash_type+" hash_key = "+hash_key + ";"
if isinstance(tree.child,ystree.TableNode) and tree.child.where_condition is not None:
where_str = "\t\t\tif("
where_str +=__where_convert_to_java__(tree.child.where_condition.where_condition_exp,buf_dict)
where_str += "){\n"
print >>fo,where_str
buf_dict = {}
for tn in tree.table_list:
buf_dict[tn] = line_buffer
print >>fo,"\t\t\tif(" + adv_count_output +".containsKey(hash_key)){"
print >>fo,"\t\t\t\tInteger count = "+adv_count_output+".get(hash_key)+1;"
print >>fo,"\t\t\t\t"+adv_count_output+".put(hash_key,count);"
print >>fo,"\t\t\t}else{"
print >>fo,"\t\t\t\t"+adv_count_output+".put(hash_key,1);"
print >>fo,"\t\t\t}"
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
func_name = ystree.__groupby_func_name__(exp)
tmp = ""
if isinstance(tree.child,ystree.TableNode):
tmp_exp = copy.deepcopy(exp)
col_list = []
ystree.__get_func_para__(tmp_exp,col_list)
for x in col_list:
x.column_name = tree.child.select_list.tmp_exp_list[x.column_name].column_name
tmp = __select_func_convert_to_java__(tmp_exp,buf_dict)
else:
tmp = __select_func_convert_to_java__(exp,buf_dict)
if func_name == "MAX":
print >>fo,"\t\t\tif(" + adv_gb_output + "["+str(i)+"].containsKey(hash_key)){"
print >>fo,"\t\t\t\tDouble max_tmp = (double)" + tmp + ";"
print >>fo,"\t\t\t\tif(max_tmp > "+adv_gb_output+"["+str(i)+"].get(hash_key))"
print >>fo,"\t\t\t\t\t"+adv_gb_output+"["+str(i)+"].put(hash_key,max_tmp);"
print >>fo,"\t\t\t}else{"
print >>fo,"\t\t\t\t" + adv_gb_output+"["+str(i)+"].put(hash_key,(double)" + tmp + ");"
print >>fo,"\t\t\t}"
elif func_name == "MIN":
print >>fo,"\t\t\tif(" + adv_gb_output + "["+str(i)+"].containsKey(hash_key)){"
print >>fo,"\t\t\t\tDouble min_tmp = (double)"+tmp +";"
print >>fo,"\t\t\t\tif(min_tmp < "+adv_gb_output+"["+str(i)+"].get(hash_key))"
print >>fo,"\t\t\t\t\t"+adv_gb_output+"["+str(i)+"].put(hash_key,min_tmp);"
print >>fo,"\t\t\t}else{"
print >>fo,"\t\t\t\t" + adv_gb_output+"["+str(i)+"].put(hash_key,(double)"+tmp + ");"
print >>fo,"\t\t\t}"
elif func_name == "SUM" or func_name == "AVG":
print >>fo,"\t\t\tif(" + adv_gb_output + "["+str(i)+"].containsKey(hash_key)){"
print >>fo,"\t\t\t\tDouble sum_tmp = (double)"+tmp+";"
print >>fo,"\t\t\t\tsum_tmp += " +adv_gb_output+"[" +str(i)+"].get(hash_key);"
print >>fo,"\t\t\t\t"+adv_gb_output+"["+str(i)+"].put(hash_key, sum_tmp);"
print >>fo,"\t\t\t}else{"
print >>fo,"\t\t\t\t" + adv_gb_output+"["+str(i)+"].put(hash_key,(double)"+tmp+");"
print >>fo,"\t\t\t}"
if isinstance(tree.child,ystree.TableNode) and tree.child.where_condition is not None:
print >>fo,"\t\t\t}\n"### end where condition
else:
#### no map part agg
if not isinstance(tree.child,ystree.TableNode) or tree.child.where_condition is None:
tmp_output = "\t\t\tcontext.write("
tmp_output += "new " + map_key_type + "(" + map_key +")"
tmp_output += ","
tmp_output += "new " + map_value_type + "(" + map_value + ")"
tmp_output += ");"
print >>fo, tmp_output
else:
where_str = "\t\t\tif("
where_str +=__where_convert_to_java__(tree.child.where_condition.where_condition_exp,buf_dict)
where_str += "){\n"
print >>fo,where_str
tmp_output = "\t\t\t\tcontext.write( "
tmp_output += "new " + map_key_type + "(" + map_key +")"
tmp_output += ","
tmp_output += "new " + map_value_type + "(" + map_value + ")"
tmp_output += ");"
print >>fo, tmp_output
print >>fo,"\t\t\t}" # end of if
print >>fo,"\t\t}\n"
print >>fo,"\t}\n"
###### groupby reduce part
line_counter = "al_line"
agg_buffer = "result"
d_count_buffer = "d_count_buf"
buf_dict = {}
for x in tree.table_list:
buf_dict[x] = line_buffer
reduce_key_type = "NullWritable"
reduce_value_type = "Text"
print >>fo,"\tpublic static class Reduce extends Reducer<"+ map_key_type+","+map_value_type+","+reduce_key_type+","+reduce_value_type+">{\n"
print >>fo,"\t\tpublic void reduce("+map_key_type+" key, Iterable<"+map_value_type+"> v, Context context) throws IOException,InterruptedException{\n"
print >>fo, "\t\t\tIterator values = v.iterator();"
print >>fo, "\t\t\tDouble[] "+agg_buffer+" = new Double[" + str(len(gb_exp_list)) + "];"
print >>fo, "\t\t\tArrayList[] "+d_count_buffer+" = new ArrayList[" + str(len(gb_exp_list)) + "];"
print >>fo, "\t\t\tString tmp = \"\";"
print >>fo, "\t\t\tfor(int i=0;i<"+str(len(gb_exp_list))+";i++){\n"
print >>fo, "\t\t\t\t"+agg_buffer+"[i] = 0.0;"
print >>fo, "\t\t\t\t" + d_count_buffer + "[i] = new ArrayList();"
print >>fo, "\t\t\t}\n"
if config.advanced_agg is False:
### no map agg
print >>fo, "\t\t\tint " + line_counter + " = 0;"
print >>fo,"\t\t\twhile(values.hasNext()){\n"
print >>fo, "\t\t\t\ttmp = values.next().toString();"
if map_key_type == "Text":
print >>fo, "\t\t\t\ttmp = key.toString().concat(tmp);"
else:
print >>fo, "\t\t\t\ttmp = key.toString().concat(\"|\" + tmp);"
print >>fo, "\t\t\t\tString[] " + line_buffer + " = tmp.split(\"\\\|\");"
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
tmp_output = __select_func_convert_to_java__(exp,buf_dict)
tmp_name = ystree.__groupby_func_name__(exp)
if tmp_name == "SUM" or tmp_name == "AVG":
print >>fo, "\t\t\t\t"+agg_buffer+"[" + str(i) + "] = "+agg_buffer+"[" +str(i) + "] + " + tmp_output + ";"
elif tmp_name == "COUNT_DISTINCT":
print >>fo, "\t\t\t\tif("+d_count_buffer+"[" + str(i) + "].contains(" +tmp_output+ ") == false)"
print >>fo, "\t\t\t\t\t"+d_count_buffer+"[" + str(i) + "].add(" + tmp_output + ");"
elif tmp_name == "MAX":
print >>fo,"\t\t\t\tif("+line_counter+"==0)"
print >>fo,"\t\t\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)" + tmp_output + ";"
print >>fo,"\t\t\t\telse{"
print >>fo, "\t\t\t\t\tif("+agg_buffer+"[" + str(i) + "] < " + tmp_output + ")"
print >>fo, "\t\t\t\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)" + tmp_output + ";"
print >>fo, "\t\t\t\t}"
elif tmp_name == "MIN":
print >>fo,"\t\t\t\tif("+line_counter+"==0)"
print >>fo,"\t\t\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)" + tmp_output + ";"
print >>fo,"\t\t\t\telse{"
print >>fo, "\t\t\t\t\tif("+agg_buffer+"[" + str(i) + "] > " + tmp_output + ")"
print >>fo, "\t\t\t\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)" + tmp_output + ";"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t\t" + line_counter + "++;"
print >>fo, "\t\t\t}\n" ### end of while
else:
## map part agg
print >>fo, "\t\t\tint[] " + line_counter + " = new int["+str(len(gb_exp_list)) + "];"
print >>fo,"\t\t\tfor(int i=0;i<"+str(len(gb_exp_list)) + ";i++){"
print >>fo,"\t\t\t\t"+line_counter+"["+str(i)+"] = 0;"
print >>fo,"\t\t\t}"
print >>fo,"\t\t\tint tmp_count = 0;"
print >>fo,"\t\t\twhile(values.hasNext()){\n"
print >>fo,"\t\t\t\tString[] tmp_buf = values.next().toString().split(\"\\\|\");"
print >>fo,"\t\t\t\ttmp = key.toString();"
print >>fo,"\t\t\t\tString[] agg_tmp;"
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
func_name = ystree.__groupby_func_name__(exp)
print >>fo,"\t\t\t\tagg_tmp = tmp_buf["+str(i)+"].split(\"&\");"
if func_name == "SUM":
print >>fo, "\t\t\t\t"+agg_buffer+"[" + str(i) + "] += Double.parseDouble(agg_tmp[0]);"
elif func_name == "MIN":
print >>fo,"\t\t\t\tif(tmp_count==0)"
print >>fo,"\t\t\t\t\t"+agg_buffer+"[" + str(i) + "]= Double.parseDouble(agg_tmp[0]);"
print >>fo,"\t\t\t\telse if("+agg_buffer+"["+str(i)+"]>Double.parseDouble(agg_tmp[0]))"
print >>fo,"\t\t\t\t\t"+agg_buffer+"[" + str(i) + "]= Double.parseDouble(agg_tmp[0]);"
elif func_name == "MAX":
print >>fo,"\t\t\t\tif(tmp_count==0)"
print >>fo,"\t\t\t\t\t"+agg_buffer+"[" + str(i) + "]= Double.parseDouble(agg_tmp[0]);"
print >>fo,"\t\t\t\telse if("+agg_buffer+"["+str(i)+"]<Double.parseDouble(agg_tmp[0]))"
print >>fo,"\t\t\t\t\t"+agg_buffer+"[" + str(i) + "]= Double.parseDouble(agg_tmp[0]);"
elif func_name == "COUNT":
print >>fo, "\t\t\t\t"+line_counter+"["+str(i)+"]+= Double.parseDouble(agg_tmp[0]);"
elif func_name == "AVG":
print >>fo, "\t\t\t\t"+agg_buffer+"["+str(i)+"] += Double.parseDouble(agg_tmp[0]);"
print >>fo, "\t\t\t\t"+line_counter+"["+str(i)+"]+= Double.parseDouble(agg_tmp[1]);"
print >>fo,"\t\t\t\ttmp_count++;"
print >>fo,"\t\t\t}" #### end of while
print >>fo, "\t\t\tString[] " + line_buffer + " = tmp.split(\"\\\|\");"
if config.advanced_agg is True:
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
if not isinstance(exp,ystree.YFuncExp):
print >>sys.stderr,"Internal Error:__groupby_gen_mr__"
exit(29)
tmp_name = ystree.__groupby_func_name__(exp)
if tmp_name == "AVG":
print >>fo, "\t\t\t"+agg_buffer+"[" + str(i) + "] = "+agg_buffer+"[" + str(i) + "] /"+line_counter+"["+str(i)+"];"
elif tmp_name == "COUNT":
print >>fo, "\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)"+ line_counter + "["+str(i)+"];"
else:
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
if not isinstance(exp,ystree.YFuncExp):
print >>sys.stderr,"Internal Error:__groupby_gen_mr__"
exit(29)
tmp_name = ystree.__groupby_func_name__(exp)
if tmp_name == "AVG":
print >>fo, "\t\t\t"+agg_buffer+"[" + str(i) + "] = "+agg_buffer+"[" + str(i) + "] /"+line_counter+";"
elif tmp_name == "COUNT":
print >>fo, "\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)"+ line_counter+";"
elif tmp_name == "COUNT_DISTINCT":
print >>fo, "\t\t\t"+agg_buffer+"[" + str(i) + "] = (double)"+d_count_buffer+"["+str(i)+"].size();"
col_list = []
if tree.having_clause is not None:
ystree.__get_gb_list__(tree.having_clause.where_condition_exp,col_list)
having_len = len(col_list)
buf_dict = {}
for x in tree.table_list:
buf_dict[x] = line_buffer
buf_dict["AGG"] = agg_buffer
reduce_value = ""
for i in range(0,len(tree.select_list.tmp_exp_list)-having_len):
exp = tree.select_list.tmp_exp_list[i]
if isinstance(exp,ystree.YFuncExp):
tmp_list = []
__get_gb_exp__(exp,tmp_list)
if len(tmp_list) >0:
reduce_value += __gb_exp_to_java__(exp,gb_exp_list,buf_dict,None)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
else:
reduce_value += __select_func_convert_to_java__(exp,buf_dict)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
elif isinstance(exp,ystree.YRawColExp):
reduce_value += __para_to_java__(exp.column_type,exp.column_name,line_buffer)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
else:
reduce_value += __para_to_java__(exp.cons_type,exp.cons_value,None)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
reduce_value = reduce_value[:-1]
if reduce_value == "":
reduce_value = "\" \""
print >>fo, "\t\t\tNullWritable key_op = NullWritable.get();"
if tree.where_condition is not None:
tmp_list = []
__get_gb_exp__(tree.where_condition.where_condition_exp,tmp_list)
for tmp in tmp_list:
for exp in gb_exp_list:
if tmp.compare(exp) is True:
func_obj = tmp.func_obj
exp_index = gb_exp_list.index(exp)
new_exp = ystree.YRawColExp("AGG",exp_index)
new_exp.column_name = int(new_exp.column_name)
new_exp.column_type = tmp.get_value_type()
func_obj.replace(tmp,new_exp)
break
buf_dict = {}
buf_dict["AGG"] = agg_buffer
for x in tree.table_list:
buf_dict[x] = line_buffer
tmp_output = "\t\t\tif("+ __where_convert_to_java__(tree.where_condition.where_condition_exp,buf_dict) + "){\n"
tmp_output += "\t\t\t\tcontext.write(key_op"
tmp_output += ","
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
tmp_output += "\t\t\t}\n"
else:
tmp_output = "\t\t\tcontext.write(key_op"
tmp_output += ","
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, tmp_output
print >>fo,"\t\t}\n" ### end of reduce func
print >>fo,"\t}\n"
__gen_main__(tree,fo,map_key_type,map_value_type,reduce_key_type,reduce_value_type,True)
def __get_join_key__(exp,col_list,table):
if exp is None or not isinstance(exp,ystree.YFuncExp):
return
if len(exp.parameter_list ) ==2:
para1 = exp.parameter_list[0]
para2 = exp.parameter_list[1]
tmp_bool = True
if not isinstance(para1,ystree.YRawColExp):
tmp_bool = False
if not isinstance(para2,ystree.YRawColExp):
tmp_bool = False
if tmp_bool == True and para1.table_name != para2.table_name:
if para1.table_name == table:
col_list.append(para1)
else:
col_list.append(para2)
return
for x in exp.parameter_list:
if isinstance(x,ystree.YFuncExp):
__get_join_key__(x,col_list,table)
### replace the exp with NULL if its table name is not the specified one.
def __gen_func_exp__(exp,table_name):
ret_exp = None
if not isinstance(exp,ystree.YFuncExp):
return None
new_list = []
for x in exp.parameter_list:
if isinstance(x,ystree.YRawColExp):
if x.table_name != table_name:
tmp_exp = ystree.YConsExp("\"NULL\"","TEXT")
new_list.append(tmp_exp)
else:
new_list.append(x)
elif isinstance(x,ystree.YFuncExp):
tmp_exp = __gen_func_exp__(x,table_name)
if tmp_exp is None:
print >>sys.stderr,"Internal Error:__gen_func_exp__"
exit(29)
new_list.append(tmp_exp)
else:
new_list.append(x)
ret_exp = ystree.YFuncExp(exp.func_name,new_list)
return ret_exp
##generate a new list ,which contains the exps with the specified table name.
def __gen_join_list__(cur_list,new_list,table_name):
count = 0
tmp_exp = ystree.YConsExp("\"NULL\"","TEXT")
for exp in cur_list:
if isinstance(exp,ystree.YRawColExp):
if exp.table_name != table_name:
new_list.append(tmp_exp)
else:
new_list.append(exp)
elif isinstance(exp,ystree.YFuncExp):
tmp_exp = __gen_func_exp__(exp,table_name)
new_list.append(tmp_exp)
def __gen_join_where__(cur_exp,table_name):
ret_exp = None
if not isinstance(cur_exp,ystree.YFuncExp):
return None
if cur_exp.func_name in bool_func_dict.keys():
for x in cur_exp.parameter_list:
if not isinstance(x,ystree.YFuncExp):
print >>sys.stderr,"Internal Error:__gen_join_where__"
exit(29)
tmp_exp = __gen_join_where__(x,table_name)
if ret_exp == None:
ret_exp = tmp_exp
else:
para_list = []
para_list.append(ret_exp)
para_list.append(tmp_exp)
ret_exp = ystree.YFuncExp(cur_exp.func_name,para_list)
return ret_exp
else:
###fix me here: how to handle the first para if the func is IS
if cur_exp.func_name == "IS":
tmp_bool = True
para1 = cur_exp.parameter_list[0]
if isinstance(para1,ystree.YRawColExp):
if para1.table_name != table_name:
tmp_bool = False
if tmp_bool == True:
ret_exp = copy.deepcopy(cur_exp)
else:
ret_exp = ystree.YConsExp("FALSE","BOOLEAN")
else:
tmp_bool = True
para1 = cur_exp.parameter_list[0]
para2 = cur_exp.parameter_list[1]
if isinstance(para1,ystree.YRawColExp):
if para1.table_name != table_name:
tmp_bool = False
if isinstance(para2,ystree.YRawColExp):
if para2.table_name != table_name:
tmp_bool = False
if tmp_bool == True:
ret_exp = copy.deepcopy(cur_exp)
else:
ret_exp = ystree.YConsExp("FALSE","BOOLEAN")
return ret_exp
###### whether it is a self join
def __self_join__(tree):
if not isinstance(tree.left_child,ystree.TableNode):
return False
if not isinstance(tree.right_child,ystree.TableNode):
return False
if len(tree.table_alias_dict.values()) !=2:
return False
t_name1 = tree.table_alias_dict.values()[0]
t_name2 = tree.table_alias_dict.values()[1]
if t_name1 != t_name2:
return False
else:
return True
def __join_gen_mr__(tree,left_name,fo):
### join map part
line_buffer = "line_buf"
if tree.select_list is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
self_join_bool = False
self_join_bool = __self_join__(tree)
### get map output key
left_key_list = []
right_key_list = []
if tree.join_explicit is True:
__get_join_key__(tree.join_condition.on_condition_exp,left_key_list,"LEFT")
__get_join_key__(tree.join_condition.on_condition_exp,right_key_list,"RIGHT")
elif tree.join_condition is not None:
__get_join_key__(tree.join_condition.where_condition_exp,left_key_list,"LEFT")
__get_join_key__(tree.join_condition.where_condition_exp,right_key_list,"RIGHT")
if len(left_key_list) == 0:
new_exp = ystree.YConsExp(1,"INTEGER")
left_key_list.append(new_exp)
if len(right_key_list) == 0:
new_exp = ystree.YConsExp(1,"INTEGER")
right_key_list.append(new_exp)
left_key_type = __get_key_value_type__(left_key_list)
right_key_type = __get_key_value_type__(right_key_list)
if left_key_type != right_key_type:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
map_key_type = left_key_type
map_value_type = "Text" ## we need to add tag to differentiate the data from left table and right table
print >>fo,"\tpublic static class Map extends Mapper<Object, Text,"+map_key_type+","+map_value_type+">{\n"
print >>fo, "\t\tprivate int left = 0;"
print >>fo, "\t\tpublic void setup(Context context) throws IOException, InterruptedException {\n"
print >>fo, "\t\t\tint last_index = -1, start_index = -1;"
print >>fo, "\t\t\tString path = ((FileSplit)context.getInputSplit()).getPath().toString();"
print >>fo, "\t\t\tlast_index = path.lastIndexOf(\'/\');"
print >>fo,"\t\t\tlast_index = last_index - 1;"
print >>fo, "\t\t\tstart_index = path.lastIndexOf(\'/\',last_index);"
print >>fo, "\t\t\tString f_name = path.substring(start_index+1,last_index+1);"
print >>fo, "\t\t\tif(f_name.compareTo(\"" + left_name + "\") == 0 )"
print >>fo, "\t\t\t\tleft = 1;"
print >>fo,"\t\t}"
print >>fo,"\t\tpublic void map(Object key, Text value,Context context) throws IOException,InterruptedException{\n"
print >>fo,"\t\t\tString line = value.toString();"
print >>fo, "\t\t\tint prev=0,i=0,n=0;"
if self_join_bool is False:
print >>fo,"\t\t\tif(this.left == 1){\n"
else:
if isinstance(tree.left_child,ystree.TableNode):
exp_list = list(tree.left_child.select_list.tmp_exp_list)
if tree.left_child.where_condition is not None:
exp_list.append(tree.left_child.where_condition.where_condition_exp)
tmp1 = __get_max_index__(exp_list)
else:
tmp1 = len(tree.left_child.select_list.tmp_exp_list)
if isinstance(tree.right_child,ystree.TableNode):
exp_list = list(tree.right_child.select_list.tmp_exp_list)
if tree.right_child.where_condition is not None:
exp_list.append(tree.right_child.where_condition.where_condition_exp)
tmp2 = __get_max_index__(exp_list)
else:
tmp2 = len(tree.right_child.select_list.tmp_exp_list)
if tmp1>tmp2:
max_index = tmp1
else:
max_index = tmp2
print >>fo,"\t\t\t\tString[] "+ line_buffer +" = new String["+ str(max_index)+"];"
print >>fo, "\t\t\t\tfor(i=0,n=0,prev=0;i<line.length();i++){\n"
print >>fo, "\t\t\t\t\tif (line.charAt(i) == \'|\'){"
print >>fo, "\t\t\t\t\t\t" + line_buffer + "[n] = line.substring(prev,i);"
print >>fo, "\t\t\t\t\t\tn = n+1;"
print >>fo, "\t\t\t\t\t\tprev = i+1;"
print >>fo, "\t\t\t\t\t}"
print >>fo, "\t\t\t\t\tif(n == "+str(max_index)+")"
print >>fo, "\t\t\t\t\t\tbreak;"
print >>fo,"\t\t\t\t}\n"
print >>fo,"\t\t\tif(n<" + str(max_index) + ")"
print >>fo,"\t\t\t\t"+line_buffer+ "[n] = line.substring(prev,i);"
buf_dict = {}
buf_dict["LEFT"] = line_buffer
left_key = __gen_mr_value__(left_key_list,left_key_type,buf_dict)
buf_dict = {}
for x in tree.left_child.table_list:
if x not in buf_dict.keys():
buf_dict[x] = line_buffer
if isinstance(tree.left_child,ystree.TableNode):
if tree.left_child.table_name not in buf_dict.keys():
buf_dict[tree.left_child.table_name] = line_buffer
left_value_type = map_value_type
right_value_type = map_value_type
### scan the input of the left child
if tree.left_child.select_list is not None:
if isinstance(tree.left_child,ystree.TableNode):
left_value = __gen_mr_value__(tree.left_child.select_list.tmp_exp_list,left_value_type,buf_dict)
exp_list = list(tree.left_child.select_list.tmp_exp_list)
if tree.left_child.where_condition is not None:
exp_list.append(tree.left_child.where_condition.where_condition_exp)
max_index = __get_max_index__(exp_list)
else:
left_value = ""
for i in range(0,len(tree.left_child.select_list.tmp_exp_list)):
left_value += line_buffer + "[" + str(i) + "]"
left_value += "+ \"|\"+"
left_value = left_value[:-1]
max_index = len(tree.left_child.select_list.tmp_exp_list)
if self_join_bool is False:
print >>fo,"\t\t\t\tString[] "+ line_buffer +" = new String["+ str(max_index)+"];"
print >>fo, "\t\t\t\tfor(i=0,n=0,prev=0;i<line.length();i++){\n"
print >>fo, "\t\t\t\t\tif (line.charAt(i) == \'|\'){"
print >>fo, "\t\t\t\t\t\t" + line_buffer + "[n] = line.substring(prev,i);"
print >>fo, "\t\t\t\t\t\tn = n+1;"
print >>fo, "\t\t\t\t\t\tprev = i+1;"
print >>fo, "\t\t\t\t\t}"
print >>fo, "\t\t\t\t\tif(n == "+str(max_index)+")"
print >>fo, "\t\t\t\t\t\tbreak;"
print >>fo,"\t\t\t\t}\n"
print >>fo,"\t\t\tif(n<" + str(max_index) + ")"
print >>fo,"\t\t\t\t"+line_buffer+ "[n] = line.substring(prev,i);"
if not isinstance(tree.left_child,ystree.TableNode) or tree.left_child.where_condition is None:
tmp_output = "\t\t\t\tcontext.write(new " + left_key_type + "(" + left_key + ")"
tmp_output += ", "
tmp_output += "new " + left_value_type + "(\"L\"+\"|\" +"+ left_value +")"
tmp_output += ");"
print >>fo,tmp_output
else:
where_str = "\t\t\t\tif("
where_str +=__where_convert_to_java__(tree.left_child.where_condition.where_condition_exp,buf_dict)
where_str += "){\n"
print >>fo,where_str
tmp_output = "\t\t\t\t\tcontext.write(new " + left_key_type + "(" + left_key + ")"
tmp_output += ", "
tmp_output += "new " + left_value_type + "(\"L\"+\"|\"+"+ left_value +")"
tmp_output += ");"
print >>fo,tmp_output
print >>fo,"\t\t\t\t}" # end of if
if self_join_bool is False:
print >>fo,"\t\t\t}else{\n" ##end of left child
### scan the input of the right child
buf_dict = {}
buf_dict["RIGHT"] = line_buffer
right_key = __gen_mr_value__(right_key_list,right_key_type,buf_dict)
buf_dict = {}
for x in tree.right_child.table_list:
if x not in buf_dict.keys():
buf_dict[x] = line_buffer
if isinstance(tree.right_child,ystree.TableNode):
if tree.right_child.table_name not in buf_dict.keys():
buf_dict[tree.right_child.table_name] = line_buffer
if tree.right_child.select_list is not None:
if isinstance(tree.right_child,ystree.TableNode):
right_value = __gen_mr_value__(tree.right_child.select_list.tmp_exp_list,right_value_type,buf_dict)
exp_list = tree.right_child.select_list.tmp_exp_list
if tree.right_child.where_condition is not None:
exp_list.append(tree.right_child.where_condition.where_condition_exp)
max_index = __get_max_index__(exp_list)
else:
right_value = ""
for i in range(0,len(tree.right_child.select_list.tmp_exp_list)):
right_value += line_buffer + "[" + str(i) + "]"
right_value += "+ \"|\"+"
right_value = right_value[:-1]
max_index = len(tree.right_child.select_list.tmp_exp_list)
if self_join_bool is False:
print >>fo,"\t\t\t\tString[] "+ line_buffer +" = new String["+ str(max_index)+"];"
print >>fo, "\t\t\t\tfor(i=0,n=0,prev=0;i<line.length();i++){\n"
print >>fo, "\t\t\t\t\tif (line.charAt(i) == \'|\'){"
print >>fo, "\t\t\t\t\t\t" + line_buffer + "[n] = line.substring(prev,i);"
print >>fo, "\t\t\t\t\t\tn = n+1;"
print >>fo, "\t\t\t\t\t\tprev = i+1;"
print >>fo, "\t\t\t\t\t}"
print >>fo, "\t\t\t\t\tif(n == "+str(max_index)+")"
print >>fo, "\t\t\t\t\t\tbreak;"
print >>fo,"\t\t\t\t}\n"
print >>fo,"\t\t\tif(n<" + str(max_index) + ")"
print >>fo,"\t\t\t\t"+line_buffer+ "[n] = line.substring(prev,i);"
if not isinstance(tree.right_child,ystree.TableNode) or tree.right_child.where_condition is None:
tmp_output = "\t\t\t\tcontext.write(new " + right_key_type + "(" + right_key + ")"
tmp_output += ", "
tmp_output += "new " + right_value_type + "(\"R\"+\"|\" +"+ right_value +")"
tmp_output += ");"
print >>fo, tmp_output
else:
where_str = "\t\t\t\tif("
where_str +=__where_convert_to_java__(tree.right_child.where_condition.where_condition_exp,buf_dict)
where_str += "){\n"
print >>fo,where_str
tmp_output = "\t\t\t\t\tcontext.write(new " + right_key_type + "(" + right_key + ")"
tmp_output += ", "
tmp_output += "new " + right_value_type + "(\"R\"+\"|\" +"+ right_value +")"
tmp_output += ");"
print >>fo, tmp_output
print >>fo,"\t\t\t\t}" # end of if
if self_join_bool is False:
print >>fo,"\t\t\t}\n"
print >>fo,"\t\t}\n" ### end of map func
print >>fo,"\t}\n" ## end of map class
### join reduce part
left_array = "al_left"
right_array = "al_right"
reduce_key_type = "NullWritable"
reduce_value_type = "Text"
print >>fo,"\tpublic static class Reduce extends Reducer<"+ map_key_type+","+map_value_type+","+reduce_key_type+","+reduce_value_type+">{\n"
print >>fo, "\t\tpublic void reduce("+map_key_type+" key, Iterable<"+map_value_type+"> v, Context context) throws IOException,InterruptedException{\n"
print >>fo, "\t\t\tIterator values = v.iterator();"
print >>fo,"\t\t\tArrayList "+ left_array +" = new ArrayList();"
print >>fo,"\t\t\tArrayList "+ right_array +" = new ArrayList();"
print >>fo,"\t\t\twhile(values.hasNext()){\n"
print >>fo,"\t\t\t\tString tmp = values.next().toString();"
print >>fo,"\t\t\t\tif(tmp.charAt(0) == \'L\'){\n"
print >>fo,"\t\t\t\t\t"+ left_array + ".add(tmp.substring(2));"
print >>fo,"\t\t\t\t}else{\n"
print >>fo,"\t\t\t\t\t" + right_array +".add(tmp.substring(2));"
print >>fo,"\t\t\t\t}\n"
print >>fo,"\t\t\t}\n" ### end of while
print >>fo,"\t\t\tNullWritable key_op = NullWritable.get();"
buf_dict = {}
left_line_buffer = "left_buf"
right_line_buffer = "right_buf"
buf_dict["LEFT"] = "left_buf"
buf_dict["RIGHT"] = "right_buf"
if tree.join_explicit is True:
join_type = tree.join_type.upper()
if join_type == "LEFT":
reduce_value = __gen_mr_value__(tree.select_list.tmp_exp_list,reduce_value_type,buf_dict)
print >>fo,"\t\t\tfor(int i=0;i<" + left_array + ".size();i++){\n"
print >>fo,"\t\t\t\tString[] " + left_line_buffer + " = ((String)" + left_array + ".get(i)).split(\"\\\|\");"
print >>fo, "\t\t\t\tif(" + right_array + ".size()>0){\n"
print >>fo,"\t\t\t\t\tfor(int j=0;j<" +right_array + ".size();j++){\n"
print >>fo,"\t\t\t\t\t\tString[] " + right_line_buffer + " = ((String)" + right_array + ".get(j)).split(\"\\\|\");"
if tree.where_condition is not None:
exp = tree.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif(" + __where_convert_to_java__(exp,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t\t",tmp_output
print >>fo,"\t\t\t\t\t\t}\n" #### end of where condtion
else:
if tree.select_list is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}\n"
print >>fo, "\t\t\t\t}else{\n"
##### generate new select_list and where_condition.
new_list = []
__gen_join_list__(tree.select_list.tmp_exp_list,new_list,"LEFT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if tree.where_condition is not None:
new_where = None
new_where = __gen_join_where__(tree.where_condition.where_condition_exp,"LEFT")
if new_where is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
print >>fo,"\t\t\t\t\tif(" + __where_convert_to_java__(new_where,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}"
else:
tmp_output = "context.write("
tmp_output += "key_op"
tmp_output += ","
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t}\n" ### end of else
print >>fo, "\t\t\t}\n" ## end of for
elif join_type == "RIGHT":
reduce_value = __gen_mr_value__(tree.select_list.tmp_exp_list,reduce_value_type,buf_dict)
print >>fo,"\t\t\tfor(int i=0;i<" +right_array + ".size();i++){\n"
print >>fo,"\t\t\t\tString[] " + right_line_buffer + " = ((String)" + right_array + ".get(i)).split(\"\\\|\");"
print >>fo, "\t\t\t\tif(" + left_array + ".size()>0){\n"
print >>fo,"\t\t\t\t\tfor(int j=0;j<" +left_array + ".size();j++){\n"
print >>fo,"\t\t\t\t\t\tString[] " + left_line_buffer + " = ((String)" + left_array + ".get(j)).split(\"\\\|\");"
if tree.where_condition is not None:
exp = tree.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif(" + __where_convert_to_java__(exp,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t\t",tmp_output
print >>fo,"\t\t\t\t\t\t}\n" #### end of where condtion
else:
if tree.select_list is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}\n"
print >>fo, "\t\t\t\t}else{\n"
new_list = []
__gen_join_list__(tree.select_list.tmp_exp_list,new_list,"RIGHT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if tree.where_condition is not None:
new_where = None
new_where = __gen_join_where__(tree.where_condition.where_condition_exp,"RIGHT")
if new_where is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
print >>fo,"\t\t\t\t\tif(" + __where_convert_to_java__(new_where,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}"
else:
tmp_output = "context.write("
tmp_output += "key_op"
tmp_output += ","
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t}\n" ### end of else
print >>fo, "\t\t\t}\n" ## end of for
elif join_type == "FULL":
reduce_value = __gen_mr_value__(tree.select_list.tmp_exp_list,reduce_value_type,buf_dict)
print >>fo, "\t\t\tif(" + left_array + ".size()>0 && "+ right_array + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<" + left_array + ".size();i++){\n"
print >>fo, "\t\t\t\t\tString[] " + left_line_buffer + " = ((String)" + left_array + ".get(i)).split(\"\\\|\");"
print >>fo, "\t\t\t\t\tfor(int j=0;j<" +right_array + ".size();j++){\n"
print >>fo, "\t\t\t\t\t\tString[] " + right_line_buffer + " = ((String)" + right_array + ".get(j)).split(\"\\\|\");"
if tree.where_condition is not None:
exp = tree.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif(" + __where_convert_to_java__(exp,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t\t",tmp_output
print >>fo,"\t\t\t\t\t\t}\n" #### end of where condtion
else:
if tree.select_list is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}" # end of right_array for
print >>fo, "\t\t\t\t}" # end of left_array for
print >>fo, "\t\t\t}else if(" + left_array + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<" + left_array + ".size();i++){\n"
print >>fo, "\t\t\t\t\tString[] " + left_line_buffer + " = ((String)" + left_array + ".get(i)).split(\"\\\|\");"
new_list = []
__gen_join_list__(tree.select_list.tmp_exp_list,new_list,"LEFT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if tree.where_condition is not None:
new_where = None
new_where = __gen_join_where__(tree.where_condition.where_condition_exp,"LEFT")
if new_where is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
print >>fo,"\t\t\t\t\tif(" + __where_convert_to_java__(new_where,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}"
else:
tmp_output = "context.write("
tmp_output += "key_op"
tmp_output += ","
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}else if(" + right_array + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<" + right_array + ".size();i++){\n"
print >>fo, "\t\t\t\t\tString[] " + right_line_buffer + " = ((String)" + right_array + ".get(i)).split(\"\\\|\");"
new_list = []
__gen_join_list__(tree.select_list.tmp_exp_list,new_list,"RIGHT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if tree.where_condition is not None:
new_where = None
new_where = __gen_join_where__(tree.where_condition.where_condition_exp,"RIGHT")
if new_where is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
print >>fo,"\t\t\t\t\tif(" + __where_convert_to_java__(new_where,buf_dict) + "){\n"
tmp_output = "context.write("
#tmp_output += "new " + reduce_key_type + "(" + reduce_key + ")"
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t\t}"
else:
tmp_output = "context.write("
tmp_output += "key_op"
tmp_output += ","
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo,"\t\t\t\t\t",tmp_output
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}"
else:
print >>fo,"\t\t\tfor(int i=0;i<" + left_array + ".size();i++){\n"
print >>fo,"\t\t\t\tString[] " + left_line_buffer + " = ((String)" + left_array + ".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\tfor(int j=0;j<" +right_array + ".size();j++){\n"
print >>fo,"\t\t\t\t\tString[] " + right_line_buffer + " = ((String)" + right_array + ".get(j)).split(\"\\\|\");"
reduce_key = __gen_mr_value__(tree.select_list.tmp_exp_list[:1],reduce_key_type,buf_dict)
reduce_value = __gen_mr_value__(tree.select_list.tmp_exp_list,reduce_value_type,buf_dict)
if tree.where_condition is not None:
exp = tree.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\tif(" + __where_convert_to_java__(exp,buf_dict) + "){\n"
tmp_output = "context.write("
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t\t",tmp_output
print >>fo,"\t\t\t\t\t}"
else:
if tree.select_list is None:
print >>sys.stderr,"Internal Error:__join_gen_mr__"
exit(29)
tmp_output = "context.write("
tmp_output += "key_op"
tmp_output += ", "
tmp_output += "new " + reduce_value_type + "(" + reduce_value + ")"
tmp_output += ");"
print >>fo, "\t\t\t\t\t",tmp_output
print >>fo,"\t\t\t\t}\n"
print >>fo,"\t\t\t}\n"
print >>fo,"\t\t}\n" #### end of reduce func
print >>fo,"\t}\n" ##### end of reduce class
__gen_main__(tree,fo,map_key_type,map_value_type,reduce_key_type,reduce_value_type,True)
def __composite_gen_mr__(tree,fo):
line_buffer = "line_buf"
map_key_type = ""
tn_to_tag = {}
i = 1
index_to_name = {}
filename = fo.name.split(".java")[0]
for node in tree.child_list:
index = tree.child_list.index(node)
tn = filename[:-1] + str(int(filename[-1]) + index +1)
index_to_name[index] = tn
tree.pk_dict[tn] = copy.deepcopy(tree.pk_dict[index])
del tree.pk_dict[index]
for exp in tree.pk_dict[tn][0]:
exp.table_name = tn
exp_list = None
if isinstance(node,ystree.CompositeNode):
exp_list = node.jfc_node_list[-1].select_list.tmp_exp_list
else:
exp_list = node.select_list.tmp_exp_list
tmp_exp_list = []
for tmp in exp_list:
tmp_in = exp_list.index(tmp)
new_exp = ystree.YRawColExp(tn,tmp_in)
new_exp.table_name = tn
new_exp.column_name = int(new_exp.column_name)
new_exp.column_type = tmp.column_type
tmp_exp_list.append(new_exp)
tree.mapoutput[tn] = tmp_exp_list
buf_dict = {}
for x in tree.mapoutput.keys():
tn_to_tag[x] = i
buf_dict[x] = line_buffer
map_key_type = __get_key_value_type__(tree.pk_dict[x][0])
i = i +1
map_value_type = "Text"
print >>fo,"\tpublic static class Map extends Mapper<Object, Text,"+map_key_type+","+map_value_type+">{\n"
print >>fo, "\t\tprivate String filename;"
print >>fo, "\t\tprivate int filetag = -1;"
print >>fo, "\t\tpublic void setup(Context context) throws IOException, InterruptedException {\n"
print >>fo, "\t\t\tint last_index = -1, start_index = -1;"
print >>fo, "\t\t\tString path = ((FileSplit)context.getInputSplit()).getPath().toString();"
print >>fo, "\t\t\tlast_index = path.lastIndexOf(\'/\');"
print >>fo,"\t\t\tlast_index = last_index - 1;"
print >>fo, "\t\t\tstart_index = path.lastIndexOf(\'/\',last_index);"
print >>fo, "\t\t\tfilename = path.substring(start_index+1,last_index+1);"
for tn in tn_to_tag.keys():
print >>fo, "\t\t\tif(filename.compareTo(\""+tn+"\")==0){"
print >>fo, "\t\t\t\tfiletag = " + str(tn_to_tag[tn]) + ";"
print >>fo, "\t\t\t}"
print >>fo,"\t\t}\n"
print >>fo,"\t\tpublic void map(Object key, Text value,Context context) throws IOException,InterruptedException{\n"
print >>fo,"\t\t\tString line = value.toString();"
print >>fo,"\t\t\tString[] " + line_buffer + "= line.split(\"\\\|\");"
print >>fo,"\t\t\tBitSet dispatch = new BitSet(32);"
for table_name in tree.mapoutput.keys():
map_key = __gen_mr_key__(tree.pk_dict[table_name][0],map_key_type,buf_dict)
print >>fo,"\t\t\tif(filetag =="+str(tn_to_tag[table_name])+"){\n"
map_value = __gen_mr_value__(tree.mapoutput[table_name],map_value_type,buf_dict)
#### generate map where_exp
mapfilter = {}
for x in tree.mapfilter.keys():
where_exp = None
for y in tree.mapfilter[x]:
if where_exp is None:
where_exp = y
else:
para_list = []
para_list.append(where_exp)
para_list.append(y)
where_exp = ystree.YFuncExp("OR",para_list)
mapfilter[x] = where_exp
if table_name in tree.mapfilter.keys():
print >>fo, "\t\t\t\tif(" + __where_convert_to_java__(mapfilter[table_name],buf_dict) + "){\n"
#### dispatch #####
for x in tree.it_node_list:
if isinstance(x,ystree.GroupByNode):
if table_name in x.table_list and x.child.where_condition is not None:
where_exp = x.child.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\tif(!("+__where_convert_to_java__(where_exp,buf_dict) + "))"
print >>fo,"\t\t\t\t\t\tdispatch.set(",tree.it_node_list.index(x),");"
elif isinstance(x,ystree.TwoJoinNode):
self_join_bool = __self_join__(x)
if isinstance(x.left_child,ystree.TableNode):
if table_name == x.left_child.table_name and x.left_child.where_condition is not None:
where_exp = x.left_child.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\tif(!("+__where_convert_to_java__(where_exp,buf_dict) + "))"
if self_join_bool is False:
print >>fo,"\t\t\t\t\t\tdispatch.set(",tree.it_node_list.index(x),");"
else:
print >>fo,"\t\t\t\t\t\tdispatch.set(",16+tree.it_node_list.index(x),");"
if isinstance(x.right_child,ystree.TableNode):
if table_name == x.right_child.table_name and x.right_child.where_condition is not None:
where_exp = x.right_child.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\tif(!("+__where_convert_to_java__(where_exp,buf_dict) + "))"
if self_join_bool is False:
print >>fo,"\t\t\t\t\t\tdispatch.set(",tree.it_node_list.index(x),");"
else:
print >>fo,"\t\t\t\t\t\tdispatch.set(",17+tree.it_node_list.index(x),");"
print >>fo,"\t\t\t\t\tif(dispatch.isEmpty())"
output = "\t\t\t\t\t\tcontext.write("
output += "new " + map_key_type + "(" + map_key + ")"
output += ","
output += "new " + map_value_type + "(" + str(tn_to_tag[table_name]) + "+\"||\"+" + map_value + ")"
output += ");"
print >>fo, output
print >>fo,"\t\t\t\t\telse"
output = "\t\t\t\t\t\tcontext.write("
output += "new " + map_key_type + "(" + map_key + ")"
output += ","
output += "new " + map_value_type + "(" + str(tn_to_tag[table_name]) + "+\"|\"+dispatch.toString()+\"|\"+" + map_value + ")"
output += ");"
print >>fo,output
print >>fo,"\t\t\t\t}"
else:
output = "\t\t\t\tcontext.write("
output += "new " + map_key_type + "(" + map_key + ")"
output += ","
output += "new " + map_value_type + "(" + str(tn_to_tag[table_name]) + "+\"||\"+" + map_value + ")"
output += ");"
print >>fo,output
print >>fo, "\t\t\t}\n"
print >>fo, "\t\t}\n"
print >>fo,"\t}\n"
reduce_key_type = "NullWritable"
reduce_value_type = "Text"
print >>fo,"\tpublic static class Reduce extends Reducer<"+ map_key_type+","+map_value_type+","+reduce_key_type+","+reduce_value_type+">{\n"
if len(tree.jfc_node_list) == 0:
print >>fo, "\t\tprivate MultipleOutputs mos;"
print >>fo, "\t\tpublic void setup(Context context) throws IOException, InterruptedException{\n"
print >>fo, "\t\t\tmos = new MultipleOutputs(context);"
print >>fo, "\t\t}\n"
print >>fo, "\t\tpublic void reduce("+map_key_type+" key, Iterable<"+map_value_type+"> v, Context context) throws IOException,InterruptedException{\n"
##########reduce part variable declaration
print >>fo,"\t\t\tIterator values = v.iterator();"
print >>fo,"\t\t\tArrayList[] it_output = new ArrayList[" +str(len(tree.it_node_list)) + "];"
print >>fo,"\t\t\tfor(int i=0;i<"+str(len(tree.it_node_list))+";i++){"
print >>fo,"\t\t\t\tit_output[i]=new ArrayList();"
print >>fo,"\t\t\t}"
agg_buffer = "result"
d_count_buf = "d_count_buf"
line_counter = "al_line"
left_array = "al_left"
right_array = "al_right"
print >>fo,"\t\t\tString tmp = \"\";"
for x in tree.it_node_list:
if isinstance(x,ystree.GroupByNode):
gb_exp_list = []
__get_gbexp_list__(x.select_list.tmp_exp_list,gb_exp_list)
tmp_agg_buffer = agg_buffer + "_" +str(tree.it_node_list.index(x))
tmp_count_buf = d_count_buf + "_" +str(tree.it_node_list.index(x))
tmp_line_counter = line_counter + "_"+str(tree.it_node_list.index(x))
print >>fo,"\t\t\tDouble[] " + tmp_agg_buffer+"=new Double["+str(len(gb_exp_list))+"];"
print >>fo,"\t\t\tArrayList[] " + tmp_count_buf+"=new ArrayList["+str(len(gb_exp_list)) + "];"
print >>fo,"\t\t\tint "+tmp_line_counter + " = 0;"
print >>fo,"\t\t\tfor(int i=0;i<"+str(len(gb_exp_list))+";i++){"
print >>fo,"\t\t\t\t"+tmp_agg_buffer + "[i] = 0.0;"
print >>fo,"\t\t\t\t"+tmp_count_buf+"[i] = new ArrayList();"
print >>fo,"\t\t\t}\n"
elif isinstance(x,ystree.TwoJoinNode):
tmp_left_array = left_array + "_" + str(tree.it_node_list.index(x))
tmp_right_array = right_array + "_" + str(tree.it_node_list.index(x))
print >>fo,"\t\t\tArrayList " + tmp_left_array + "= new ArrayList();"
print >>fo,"\t\t\tArrayList " + tmp_right_array + "= new ArrayList();"
############## go through each value
print >>fo,"\t\t\twhile(values.hasNext()){"
print >>fo,"\t\t\t\tString line = values.next().toString();"
print >>fo,"\t\t\t\tString dispatch = line.split(\"\\\|\")[1];"
print >>fo,"\t\t\t\ttmp = line.substring(2+dispatch.length()+1);"
print >>fo,"\t\t\t\tString[] "+line_buffer + "= tmp.split(\"\\\|\");"
for x in tree.it_node_list:
if isinstance(x,ystree.GroupByNode):
tmp_agg_buffer = agg_buffer + "_" + str(tree.it_node_list.index(x))
tmp_d_count_buf = d_count_buf + "_" +str(tree.it_node_list.index(x))
tmp_line_counter = line_counter + "_"+str(tree.it_node_list.index(x))
gb_exp_list = []
__get_gbexp_list__(x.select_list.tmp_exp_list,gb_exp_list)
tn = x.child.table_name
index = tree.it_node_list.index(x)
if_stat = "\t\t\t\tif(line.charAt(0) =='" + str(tn_to_tag[tn]) + "'"
if_stat += "&& ("
if_stat += "dispatch.length()==0 ||"
if_stat += "dispatch.indexOf('" + str(index) + "')==-1"
if_stat += ")){"
print >>fo, if_stat
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
if not isinstance(exp,ystree.YFuncExp):
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
tmp_output = __select_func_convert_to_java__(exp,buf_dict)
tmp_name = ystree.__groupby_func_name__(exp)
if tmp_name == "SUM" or tmp_name == "AVG":
print >>fo,"\t\t\t\t\t"+tmp_agg_buffer+"["+str(i)+"] +="+tmp_output+";"
elif tmp_name == "COUNT_DISTINCT":
print >>fo,"\t\t\t\t\tif("+tmp_d_count_buf+"["+str(i)+"].contains("+tmp_output+")==false)"
print >>fo,"\t\t\t\t\t\t"+tmp_d_count_buf+"["+str(i)+"].add("+tmp_output+");"
elif tmp_name == "MAX":
print >>fo,"\t\t\t\t\tif("+tmp_line_counter+"==0)"
print >>fo,"\t\t\t\t\t\t"+tmp_agg_buffer+"["+str(i)+"]=(double) "+tmp_output + ";"
print >>fo,"\t\t\t\t\telse if("+tmp_agg_buffer+"["+str(i)+"]>"+tmp_output+")"
print >>fo,"\t\t\t\t\t\t"+tmp_agg_buffer+"["+str(i)+"]= (double) "+tmp_output+";"
elif tmp_name == "MIN":
print >>fo,"\t\t\t\t\tif("+tmp-line_counter+"==0)"
print >>fo,"\t\t\t\t\t\t"+tmp_agg_buffer+"["+str(i)+"]=(double) "+tmp_output + ";"
print >>fo,"\t\t\t\t\telse if("+tmp_agg_buffer+"["+str(i)+"]<"+tmp_output+")"
print >>fo,"\t\t\t\t\t\t"+tmp_agg_buffer+"["+str(i)+"]= (double) "+tmp_output,+";"
print >>fo, "\t\t\t\t\t" + tmp_line_counter+ "++;"
print >>fo,"\t\t\t\t}"
elif isinstance(x,ystree.TwoJoinNode):
self_join_bool = __self_join__(x)
index = tree.it_node_list.index(x)
tmp_left_array = left_array + "_" + str(tree.it_node_list.index(x))
tmp_right_array = right_array + "_" +str(tree.it_node_list.index(x))
if isinstance(x.left_child,ystree.TableNode):
left_tn = x.left_child.table_name
else:
if x.left_composite is not None:
left_tn = index_to_name[tree.child_list.index(x.left_composite)]
else:
if x.left_child in tree.it_node_list or x.left_child in tree.jfc_node_list:
continue
else:
left_tn = index_to_name[tree.child_list.index(x.left_child)]
if_stat = "\t\t\t\tif(line.charAt(0)=='" + str(tn_to_tag[left_tn]) + "'"
if_stat += "&&("
if_stat += "dispatch.length()==0 ||"
if self_join_bool is False:
if_stat += "dispatch.indexOf(\"" + str(index) + "\")==-1"
else:
if_stat += "dispatch.indexOf(\"" + str(16+index) + "\")==-1"
if_stat += "))"
print >>fo,if_stat
print >>fo,"\t\t\t\t\t"+tmp_left_array + ".add(tmp);"
if isinstance(x.right_child,ystree.TableNode):
right_tn = x.right_child.table_name
else:
if x.right_composite is not None:
right_tn = index_to_name[tree.child_list.index(x.right_composite)]
else:
if x.right_child in tree.it_node_list or x.right_child in tree.jfc_node_list:
continue
else:
right_tn = index_to_name[tree.child_list.index(x.right_child)]
if_stat = "\t\t\t\tif(line.charAt(0)=='" + str(tn_to_tag[right_tn]) + "'"
if_stat += "&&("
if_stat += "dispatch.length()==0 ||"
if self_join_bool is False:
if_stat += "dispatch.indexOf(\"" + str(index) + "\")==-1"
else:
if_stat += "dispatch.indexOf(\"" + str(17+index) + "\")==-1"
if_stat += "))"
print >>fo,if_stat
print >>fo,"\t\t\t\t\t"+tmp_right_array + ".add(tmp);"
print >>fo,"\t\t\t}" #######end of while(values.hasNext())
#### sum up the value
print >>fo,"\t\t\tString[] "+line_buffer+" = tmp.split(\"\\\|\");"
for x in tree.it_node_list:
if isinstance(x,ystree.GroupByNode):
tmp_agg_buffer = agg_buffer + "_"+str(tree.it_node_list.index(x))
tmp_d_count_buf = d_count_buf + "_" +str(tree.it_node_list.index(x))
tmp_line_counter = line_counter+"_"+str(tree.it_node_list.index(x))
gb_exp_list = []
__get_gbexp_list__(x.select_list.tmp_exp_list,gb_exp_list)
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
if not isinstance(exp,ystree.YFuncExp):
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
tmp_name = ystree.__groupby_func_name__(exp)
if tmp_name == "AVG":
print >>fo,"\t\t\t"+tmp_agg_buffer+"["+str(i)+"]="+tmp_agg_buffer+"["+str(i)+"]/"+tmp_line_counter+";"
elif tmp_name == "COUNT":
print >>fo,"\t\t\t"+tmp_agg_buffer+"["+str(i)+"]=(double)" + tmp_line_counter+";"
elif tmp_name == "COUNT_DISTINCT":
print >>fo,"\t\t\t",tmp_agg_buffer+"["+str(i) +"]=(double)"+tmp_d_count_buf+"["+str(i) + "].size();"
col_list = []
if x.having_clause is not None:
ystree.__get_gb_list__(tree.having_clause.where_condition_exp,col_list)
having_len = len(col_list)
buf_dict = {}
for tn in x.table_list:
buf_dict[tn] = line_buffer
buf_dict["AGG"] = tmp_agg_buffer
reduce_value = ""
for j in range(0,len(x.select_list.tmp_exp_list)-having_len):
exp = x.select_list.tmp_exp_list[j]
if isinstance(exp,ystree.YFuncExp):
tmp_list = []
__get_gb_exp__(exp,tmp_list)
if len(tmp_list) >0:
reduce_value += __gb_exp_to_java__(exp,gb_exp_list,buf_dict,None)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
else:
reduce_value += __select_func_convert_to_java__(exp,buf_dict)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
elif isinstance(exp,ystree.YRawColExp):
reduce_value += __para_to_java__(exp.column_type,exp.column_name,line_buffer)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
else:
reduce_value += __para_to_java__(exp.cons_type,exp.cons_value,None)
if reduce_value_type == "Text":
reduce_value += " + \"|\""
reduce_value += "+"
reduce_value = reduce_value[:-1]
if reduce_value == "":
reduce_value = "\" \""
if x.where_condition is not None:
buf_dict = {}
buf_dict["AGG"] = tmp_agg_buffer
for tn in x.table_list:
buf_dict[tn] = line_buffer
tmp_list = []
__get_gb_exp__(x.where_condition.where_condition_exp,tmp_list)
for tmp in tmp_list:
for exp in gb_exp_list:
if tmp.compare(exp) is True:
func_obj = tmp.func_obj
exp_index = gb_exp_list.index(exp)
new_exp = ystree.YRawColExp("AGG",exp_index)
new_exp.column_name = int(new_exp.column_name)
new_exp.column_type = tmp.get_value_type()
func_obj.replace(tmp,new_exp)
break
print >>fo,"\t\t\tif("+ __where_convert_to_java__(x.where_condition.where_condition_exp,buf_dict) + ")"
print >>fo,"\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add(" + reduce_value + ");"
else:
print >>fo,"\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add(" + reduce_value + ");"
elif isinstance(x,ystree.TwoJoinNode):
if x.left_child in tree.it_node_list or x.right_child in tree.it_node_list:
if x.parent in tree.jfc_node_list:
index = tree.jfc_node_list.index(x.parent)
tree.jfc_node_list.insert(index,x)
else:
tree.jfc_node_list.append(x)
continue
elif x.left_child in tree.jfc_node_list or x.right_child in tree.jfc_node_list:
if x.parent in tree.jfc_node_list:
index = tree.jfc_node_list.index(x.parent)
tree.jfc_node_list.insert(index,x)
else:
tree.jfc_node_list.append(x)
continue
tmp_left_array = left_array+"_"+str(tree.it_node_list.index(x))
tmp_right_array = right_array+"_"+str(tree.it_node_list.index(x))
buf_dict = {}
left_line_buffer = "left_buf_"+str(tree.it_node_list.index(x))
right_line_buffer = "right_buf_"+str(tree.it_node_list.index(x))
buf_dict["LEFT"] = left_line_buffer
buf_dict["RIGHT"] = right_line_buffer
reduce_value_type = "Text"
if x.join_explicit is True:
join_type = x.join_type.upper()
if join_type == "LEFT":
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
print >>fo,"\t\t\tfor(int i=0;i<" + tmp_left_array + ".size();i++){"
print >>fo,"\t\t\t\tString[] "+left_line_buffer+"=((String)"+tmp_left_array+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\tif("+tmp_right_array+".size()>0){"
print >>fo,"\t\t\t\t\tfor(int j=0;j<"+tmp_right_array+".size();j++){"
print >>fo,"\t\t\t\t\t\tString[] "+right_line_buffer+" = ((String)"+tmp_right_array+".get(j)).split(\"\\\|\");"
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(exp,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
print >>fo,"\t\t\t\t}else{"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"LEFT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
new_where = __gen_join_where__(x.where_condition.where_condition_exp,"LEFT")
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}"
elif join_type == "RIGHT":
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
print >>fo,"\t\t\tfor(int i=0;i<" + tmp_right_array + ".size();i++){"
print >>fo,"\t\t\t\tString[] "+right_line_buffer+"=((String)"+tmp_right_array+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\tif("+tmp_left_array+".size()>0){"
print >>fo,"\t\t\t\t\tfor(int j=0;j<"+tmp_left_array+".size();j++){"
print >>fo,"\t\t\t\t\t\tString[] "+left_line_buffer+" = ((String)"+tmp_left_array+".get(j)).split(\"\\\|\");"
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(exp,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
print >>fo,"\t\t\t\t}else{"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"RIGHT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
new_where = __gen_join_where__(x.where_condition.where_condition_exp,"RIGHT")
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}"
elif join_type == "FULL":
print >>fo, "\t\t\tif(" + tmp_left_array + ".size()>0 && "+ tmp_right_array + ".size()>0{"
print >>fo, "\t\t\t\tfor(int i=0;i<" + tmp_left_array + ".size();i++){"
print >>fo, "\t\t\t\t\tString[] "+left_line_buffer+"=((String)"+tmp_left_array+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\t\tfor(int j=0;j<"+tmp_right_array+".size();j++){"
print >>fo,"\t\t\t\t\t\tString[] "+right_line_buffer+" = ((String)"+tmp_right_array+".get(j)).split(\"\\\|\");"
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(exp,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo, "\t\t\t\t\t}"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}else if (" + tmp_left_array + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<" + tmp_left_array + ".size();i++){"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"LEFT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
new_where = __gen_join_where__(x.where_condition.where_condition_exp,"LEFT")
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}else if (" + tmp_right_array + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<" + tmp_right_array + ".size();i++){"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"RIGHT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
new_where = __gen_join_where__(x.where_condition.where_condition_exp,"RIGHT")
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}"
else:
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
else:
print >>fo,"\t\t\tfor(int i=0;i<"+tmp_left_array+".size();i++){"
print >>fo,"\t\t\t\tString[] "+left_line_buffer+"=((String)"+tmp_left_array+".get(i)).split(\"\\\|\");"
print >>fo, "\t\t\t\tfor(int j=0;j<"+tmp_right_array+".size();j++){"
print >>fo,"\t\t\t\t\tString[] "+right_line_buffer+"=((String)"+tmp_right_array+".get(j)).split(\"\\\|\");"
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\tif(" + __where_convert_to_java__(exp,buf_dict) + "){"
print >>fo,"\t\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\tit_output["+str(tree.it_node_list.index(x)) + "].add("+reduce_value+");"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}"
### handle jfc node
if len(tree.jfc_node_list) ==0:
print >>fo,"\t\t\tNullWritable key_op=NullWritable.get();"
print >>fo, "\t\t\tfor(int i=0;i<"+str(len(tree.it_node_list)) + ";i++){"
print >>fo, "\t\t\t\tfor(int j=0;j<it_output[i].size();j++){"
print >>fo, "\t\t\t\t\tmos.write(key_op,new Text(it_output[i].get(j).toString()),Integer.toString(i)+\"/Mul\");"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}\n"
print >>fo, "\t\t}"
print >>fo, "\t}\n"
__gen_main__(tree,fo,map_key_type,map_value_type,reduce_key_type,reduce_value_type,True)
return
print >>fo,"\t\t\tArrayList[] jfc_output = new ArrayList[" + str(len(tree.jfc_node_list)) + "];"
print >>fo,"\t\t\tfor(int i=0;i<"+str(len(tree.jfc_node_list))+";i++){"
print >>fo,"\t\t\t\tjfc_output[i]=new ArrayList();"
print >>fo,"\t\t\t}"
for x in tree.jfc_node_list:
if isinstance(x,ystree.GroupByNode):
tmp_gb_input = ""
if x.child in tree.jfc_node_list:
index = tree.jfc_node_list.index(x.child)
tmp_gb_input = "jfc_output["+str(index) + "]"
elif x.child in tree.it_node_list:
index = tree.it_node_list.index(x.child)
tmp_gb_input = "it_output[" + str(index) + "]"
else:
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
gb_exp_list = []
__get_gbexp_list__(x.select_list.tmp_exp_list,gb_exp_list)
key_len = len(x.group_by_clause.groupby_exp_list)
tmp_output_len = len(gb_exp_list)
tmp_gb_output = "jfc_gb_output_" + str(tree.jfc_node_list.index(x))
tmp_dc_output = "jfc_dc_output_" + str(tree.jfc_node_list.index(x))
tmp_count_output = "jfc_count_output_" + str(tree.jfc_node_list.index(x))
print >>fo,"\t\t\tHashtable<String,Double>[] " +tmp_gb_output + "=new Hashtable["+str(tmp_output_len) + "];"
print >>fo,"\t\t\tHashtable<String,ArrayList>[] " +tmp_dc_output + "=new Hashtable["+str(tmp_output_len) + "];"
print >>fo,"\t\t\tHashtable<String,Integer>[] " +tmp_count_output + "=new Hashtable["+str(tmp_output_len) + "];"
print >>fo,"\t\t\tfor(int i=0;i<"+str(tmp_output_len) + ";i++){"
print >>fo,"\t\t\t\t"+tmp_gb_output+"[i]=new Hashtable<String,Double>();"
print >>fo,"\t\t\t\t"+tmp_dc_output+"[i]=new Hashtable<String,ArrayList>();"
print >>fo,"\t\t\t\t"+tmp_count_output+"[i]=new Hashtable<String,Integer>();"
print >>fo,"\t\t\t}"
print >>fo,"\t\t\tfor(int i=0;i<"+tmp_gb_input + ".size();i++){"
print >>fo,"\t\t\t\tString[] tmp_buf = ((String)"+ tmp_gb_input + ".get(i)).split(\"\\\|\");"
tmp_key = ""
buf_dict={}
for tn in x.table_list:
buf_dict[tn] = "tmp_buf"
for i in range(0,len(x.group_by_clause.groupby_exp_list)):
tmp_key += "tmp_buf[" + str(i) + "]+"
tmp_key += "\"|\"+"
tmp_key = tmp_key[:-1]
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
func_name = ystree.__groupby_func_name__(exp)
if func_name == "MAX":
tmp = __select_func_convert_to_java__(exp,buf_dict)
print >>fo,"\t\t\t\tif(" + tmp_gb_output + "["+str(i)+"].containsKey("+tmp_key + ")){"
print >>fo,"\t\t\t\t\tDouble max_tmp = (double)" + tmp + ";"
print >>fo,"\t\t\t\t\tif(max_tmp > "+tmp_gb_output+"["+str(i)+"].get("+tmp_key+"))"
print >>fo,"\t\t\t\t\t\t"+tmp_gb_output+"["+str(i)+"].put("+tmp_key+",max_tmp);"
print >>fo,"\t\t\t\t}else{"
print >>fo,"\t\t\t\t\t" + tmp_gb_output+"["+str(i)+"].put("+tmp_key+",(double)" + tmp + ");"
print >>fo,"\t\t\t\t}"
elif func_name == "MIN":
tmp = __select_func_convert_to_java__(exp,buf_dict)
print >>fo,"\t\t\t\tif(" + tmp_gb_output + "["+str(i)+"].containsKey("+tmp_key + ")){"
print >>fo,"\t\t\t\t\tDouble min_tmp = (double)"+tmp +";"
print >>fo,"\t\t\t\t\tif(min_tmp < "+tmp_gb_output+"["+str(i)+"].get("+tmp_key+"))"
print >>fo,"\t\t\t\t\t\t"+tmp_gb_output+"["+str(i)+"].put("+tmp_key+",min_tmp);"
print >>fo,"\t\t\t\t}else{"
print >>fo,"\t\t\t\t\t" + tmp_gb_output+"["+str(i)+"].put("+tmp_key+",(double)"+tmp + ");"
print >>fo,"\t\t\t\t}"
elif func_name == "SUM":
tmp = __select_func_convert_to_java__(exp,buf_dict)
print >>fo,"\t\t\t\tif(" + tmp_gb_output + "["+str(i)+"].containsKey("+tmp_key + ")){"
print >>fo,"\t\t\t\t\tDouble sum_tmp = (double)"+tmp+";"
print >>fo,"\t\t\t\t\t sum_tmp += " +tmp_gb_output+"[" +str(i)+"].get("+tmp_key+");"
print >>fo,"\t\t\t\t\t"+tmp_gb_output+"["+str(i)+"].put("+tmp_key+", sum_tmp);"
print >>fo,"\t\t\t\t}else{"
print >>fo,"\t\t\t\t\t" + tmp_gb_output+"["+str(i)+"].put("+tmp_key+",(double)"+tmp+");";
print >>fo,"\t\t\t\t}"
elif func_name == "AVG":
tmp = __select_func_convert_to_java__(exp,buf_dict)
print >>fo,"\t\t\t\tif(" + tmp_gb_output + "["+str(i)+"].containsKey("+tmp_key + ")){"
print >>fo,"\t\t\t\t\tDouble sum_tmp = (double)"+tmp+";"
print >>fo,"\t\t\t\t\tsum_tmp += " +tmp_gb_output+"[" +str(i)+"].get("+tmp_key+");"
print >>fo,"\t\t\t\t\tInteger count = "+tmp_count_output+"["+str(i)+"].get("+tmp_key+")+1;"
print >>fo,"\t\t\t\t\t"+tmp_count_output+"["+str(i)+"].put("+tmp_key+",count);"
print >>fo,"\t\t\t\t\t"+tmp_gb_output+"["+str(i)+"].put("+tmp_key+", sum_tmp);"
print >>fo,"\t\t\t\t}else{"
print >>fo,"\t\t\t\t\t" + tmp_gb_output+"["+str(i)+"].put("+tmp_key+",(double)"+tmp+");"
print >>fo,"\t\t\t\t\t"+tmp_count_output+"["+str(i)+"].put("+tmp_key+",1);"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t\t\t"+tmp_count_output+"["+tmp_key+"] += 1;"
print >>fo,"\t\t\t\t\t"+tmp_count_output+"["+tmp_key+"] = 1;"
elif func_name == "COUNT_DISTINCT":
tmp = __select_func_convert_to_java__(exp,buf_dict)
print >>fo,"\t\t\t\tif(" + tmp_dc_output +"["+str(i)+"].containsKey(" + tmp_key + ")){"
print >>fo,"\t\t\t\t\tif(!"+tmp_dc_output+"["+str(i)+"].get("+tmp_key+").contains("+tmp+")){"
print >>fo,"\t\t\t\t\t\tArrayList tmp_al = "+tmp_dc_output + "["+str(i)+"].get("+tmp_key+").add("+tmp+");"
print >>fo,"\t\t\t\t\t\t"+tmp_dc_output+"["+str(i)+"].put("+tmp_key+",tmp_al);"
print >>fo,"\t\t\t\t\t}"
print >>fo,"\t\t\t\t}else{"
print >>fo,"\t\t\t\t\t\t"+tmp_dc_output+"["+str(i)+"].put("+tmp_key+","+tmp + ");"
print >>fo,"\t\t\t\t}"
elif func_name == "COUNT":
print >>fo,"\t\t\t\tif(" + tmp_count_output +"["+str(i)+ "].containsKey("+tmp_key + ")){"
print >>fo,"\t\t\t\t\tInteger count = "+tmp_count_output+"["+str(i)+"].get("+tmp_key+")+1;"
print >>fo,"\t\t\t\t\t"+tmp_count_output+"["+str(i)+"].put("+tmp_key+",count);"
print >>fo,"\t\t\t\t}else{"
print >>fo,"\t\t\t\t\t"+tmp_count_output+"["+str(i)+"].put("+tmp_key+",1);"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}" ########## end of for
for i in range(0,len(gb_exp_list)):
exp = gb_exp_list[i]
func_name = ystree.__groupby_func_name__(exp)
if func_name == "AVG":
print >>fo,"\t\t\tfor(Object tmp_key:"+tmp_gb_output+"["+str(i) + "].keySet()){"
print >>fo,"\t\t\t\tDouble count = (double) "+tmp_count_output+"["+str(i)+"].get(tmp_key);"
print >>fo,"\t\t\t\tDouble avg = "+tmp_gb_output+"["+str(i) + "].get(tmp_key)/count;"
print >>fo,"\t\t\t\t"+tmp_gb_output+"["+str(i)+"].put(tmp_key.toString(),avg);"
print >>fo,"\t\t\t}"
elif func_name == "COUNT_DISTINCT":
print >>fo,"\t\t\tfor(Object tmp_key:" + tmp_dc_output+"["+str(i) +"].keySet()){"
print >>fo,"\t\t\t\tDouble count = (double)"+tmp_dc_output+"["+str(i)+"].get(tmp_key).size();"
print >>fo,"\t\t\t\t"+tmp_gb_output +"["+str(i)+"].put(tmp_key.toString(),count);"
print >>fo,"\t\t\t}"
elif func_name == "COUNT":
print >>fo,"\t\t\tfor(Object tmp_key:" + tmp_count_output+"["+str(i) +"].keySet()){"
print >>fo,"\t\t\t\tDouble count = (double)"+tmp_count_output+"["+str(i)+"].get(tmp_key);"
print >>fo,"\t\t\t\t"+tmp_gb_output +"["+str(i)+"].put(tmp_key.toString(),count);"
print >>fo,"\t\t\t}"
print >>fo,"\t\t\tfor(Object tmp_key:"+tmp_gb_output+"[0].keySet()){"
print >>fo,"\t\t\t\tString[] tmp_buf =((String) "+tmp_gb_input + ".get(0)).split(\"\\\|\");"
print >>fo,"\t\t\t\tfor(int i=0;i<"+tmp_gb_input+".size();i++){"
print >>fo,"\t\t\t\t\ttmp_buf =((String) "+tmp_gb_input + ".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\t\tif(((String)tmp_key).compareTo("+tmp_key+")==0)"
print >>fo,"\t\t\t\t\t\tbreak;"
print >>fo,"\t\t\t\t}"
buf_dict={}
for tn in x.table_list:
buf_dict[tn] = "tmp_buf"
print >>fo,"\t\t\t\tString tmp_result = \"\";"
print >>fo,"\t\t\t\tString gb_key = (String)tmp_key;"
col_list = []
if x.having_clause is not None:
ystree.__get_gb_list__(tree.having_clause.where_condition_exp,col_list)
having_len = len(col_list)
for i in range(0,len(x.select_list.tmp_exp_list)-having_len):
exp = x.select_list.tmp_exp_list[i]
if ystree.__groupby_func_name__(exp) not in agg_func_list:
if isinstance(exp,ystree.YRawColExp):
tmp_exp = "tmp_buf["+str(exp.column_name) + "]"
elif isinstance(exp,ystree.YFuncExp):
tmp_exp = __select_func_convert_to_java__(exp,buf_dict)
tmp_exp += ".toString()"
elif isinstance(exp,ystree.YConsExp):
tmp_exp = "\""
tmp_exp += __para_to_java__(exp.cons_type,exp.cons_value,None)
tmp_exp += "\""
print >>fo,"\t\t\t\ttmp_result = tmp_result.concat("+tmp_exp+");"
print >>fo,"\t\t\t\ttmp_result = tmp_result.concat(\"|\");"
else :### groupby exp
buf_dict["AGG"] = tmp_gb_output
tmp_result = __gb_exp_to_java__(exp,gb_exp_list,buf_dict,"gb_key")
print >>fo,"\t\t\t\ttmp_result = tmp_result.concat("+tmp_result+"+\"\");"
print >>fo,"\t\t\t\ttmp_result = tmp_result.concat(\"|\");"
if x.where_condition is not None:
tmp_list = []
__get_gb_exp__(x.where_condition.where_condition_exp,tmp_list)
for tmp in tmp_list:
for exp in gb_exp_list:
if tmp.compare(exp) is True:
func_obj = tmp.func_obj
exp_index = gb_exp_list.index(exp)
new_exp = ystree.YRawColExp("AGG",exp_index)
new_exp.column_name = int(new_exp.column_name)
new_exp.column_type = tmp.get_value_type()
func_obj.replace(tmp,new_exp)
break
buf_dict = {}
buf_dict["AGG"] = tmp_gb_output
for x in tree.table_list:
buf_dict[x] = "tmp_buf"
print >>fo,"\t\t\t\t" + __where_convert_to_java__(x.where_condition.where_condition_exp,buf_dict)+ "){\n"
print >>fo,"\t\t\t\tjfc_output[" + str(tree.jfc_node_list.index(x)) + "].add(tmp_result);"
print >>fo,"\t\t\t\t}"
else:
print >>fo,"\t\t\t\tjfc_output[" + str(tree.jfc_node_list.index(x)) + "].add(tmp_result);"
print >>fo,"\t\t\t}"
elif isinstance(x,ystree.TwoJoinNode):
jfc_left_buf = "jfc_left_buf"
jfc_right_buf = "jfc_right_buf"
tmp_left_buf = jfc_left_buf+"_"+str(tree.jfc_node_list.index(x))
tmp_right_buf = jfc_right_buf+"_"+str(tree.jfc_node_list.index(x))
buf_dict["LEFT"] = tmp_left_buf
buf_dict["RIGHT"] = tmp_right_buf
reduce_value_type = "Text"
if x.left_child in tree.it_node_list:
left_index = tree.it_node_list.index(x.left_child)
left_input = "it_output[" + str(left_index) + "]"
elif x.left_child in tree.jfc_node_list:
left_index = tree.jfc_node_list.index(x.left_child)
left_input = "jfc_output[" + str(left_index) + "]"
elif isinstance(x.left_child,ystree.TableNode):
left_index = tree.it_node_list.index(x)
left_input = left_array + "_" + str(left_index)
else:
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
if x.right_child in tree.it_node_list:
right_index = tree.it_node_list.index(x.right_child)
right_input ="it_output[" + str(right_index) + "]"
elif x.right_child in tree.jfc_node_list:
right_index = tree.jfc_node_list.index(x.right_child)
right_input ="jfc_output[" + str(right_index) + "]"
elif isinstance(x.right_child,ystree.TableNode):
right_index = tree.it_node_list.index(x)
right_input = right_array + "_" + str(right_index)
else:
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
if x.join_explicit is True:
join_type = x.join_type.upper()
if join_type == "LEFT":
print >>fo,"\t\t\tfor(int i=0;i<"+left_input+".size();i++){"
print >>fo,"\t\t\t\tString[] "+tmp_left_buf+"=((String)"+left_input+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\tif("+right_input+".size()>0){"
print >>fo,"\t\t\t\t\tfor(int j=0;j<"+right_input+".size();j++){"
print >>fo,"\t\t\t\t\t\tString[] "+tmp_right_buf+"=((String)"+right_input+".get(j)).split(\"\\\|\");"
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(exp,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t}else{"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"LEFT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
new_where = __gen_join_where__(exp,"LEFT")
print >>fo,"\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}"
elif join_type == "RIGHT":
print >>fo,"\t\t\tfor(int i=0;i<"+right_input+".size();i++){"
print >>fo,"\t\t\t\tString[] "+tmp_right_buf+"=((String)"+right_input+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\tif("+left_input+".size()>0){"
print >>fo,"\t\t\t\t\tfor(int j=0;j<"+left_input+".size();j++){"
print >>fo,"\t\t\t\t\t\tString[] "+tmp_left_buf+"=((String)"+left_input+".get(j)).split(\"\\\|\");"
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(exp,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t}else{"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"RIGHT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
new_where = __gen_join_where__(exp,"RIGHT")
print >>fo,"\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}"
elif join_type == "FULL":
print >>fo, "\t\t\tif(" + left_input+".size()>0&&"+right_input+".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<"+left_input+".size();i++){"
print >>fo, "\t\t\t\t\tString[] "+tmp_left_buf+"=((String)"+left_input+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\t\tfor(int j=0;j<"+right_input+".size();j++){"
print >>fo,"\t\t\t\t\t\tString[] "+tmp_right_buf+"=((String)"+right_input+".get(j)).split(\"\\\|\");"
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\t\tif("+__where_convert_to_java__(exp,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t\t}"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}else if (" + left_input + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<"+left_input+".size();i++){"
print >>fo, "\t\t\t\t\tString[] "+tmp_left_buf+"=((String)"+left_input+".get(i)).split(\"\\\|\");"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"LEFT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
new_where = __gen_join_where__(exp,"LEFT")
print >>fo,"\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}else if (" + right_input + ".size()>0){"
print >>fo, "\t\t\t\tfor(int i=0;i<"+right_input+".size();i++){"
print >>fo, "\t\t\t\t\tString[] "+tmp_right_buf+"=((String)"+right_input+".get(i)).split(\"\\\|\");"
new_list = []
__gen_join_list__(x.select_list.tmp_exp_list,new_list,"RIGHT")
reduce_value = __gen_mr_value__(new_list,reduce_value_type,buf_dict)
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
new_where = __gen_join_where__(exp,"RIGHT")
print >>fo,"\t\t\t\t\tif("+__where_convert_to_java__(new_where,buf_dict)+"){"
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\t\tjfc_output["+str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo, "\t\t\t\t}"
print >>fo, "\t\t\t}"
else:
print >>sys.stderr,"Internal Error: gen_composite"
exit(29)
else:
reduce_value = __gen_mr_value__(x.select_list.tmp_exp_list,reduce_value_type,buf_dict)
print >>fo,"\t\t\tfor(int i=0;i<"+left_input+".size();i++){"
print >>fo,"\t\t\t\tString[] "+tmp_left_buf+" = ((String)"+left_input+".get(i)).split(\"\\\|\");"
print >>fo,"\t\t\t\tfor(int j=0;j<"+right_input+".size();j++){"
print >>fo,"\t\t\t\t\tString[] "+tmp_right_buf+" = ((String)"+right_input+".get(j)).split(\"\\\|\");"
if x.where_condition is not None:
exp = x.where_condition.where_condition_exp
print >>fo,"\t\t\t\t\tif(" +__where_convert_to_java__(exp,buf_dict) + "){"
print >>fo,"\t\t\t\t\t\tjfc_output[" +str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t\t}"
else:
print >>fo,"\t\t\t\t\tjfc_output[" +str(tree.jfc_node_list.index(x))+"].add("+reduce_value+");"
print >>fo,"\t\t\t\t}"
print >>fo,"\t\t\t}"
### generate final output
print >>fo,"\t\t\tNullWritable key_op=NullWritable.get();"
for x in tree.jfc_node_list:
#####fix me here: right now only one output is supported here
if x.parent not in tree.jfc_node_list:
print >>fo,"\t\t\tfor(int i=0;i<jfc_output["+str(tree.jfc_node_list.index(x))+"].size();i++){"
print >>fo,"\t\t\t\tString jfc_result = (String)jfc_output["+str(tree.jfc_node_list.index(x))+"].get(i);"
print >>fo,"\t\t\t\tcontext.write(key_op, new Text(jfc_result));"
print >>fo,"\t\t\t}"
print >>fo, "\t\t}\n"
print >>fo, "\t}\n"
__gen_main__(tree,fo,map_key_type,map_value_type,reduce_key_type,reduce_value_type,True)
def __gen_main__(tree,fo,map_key_type,map_value_type,reduce_key_type,reduce_value_type,reduce_bool):
print >>fo,"\tpublic int run(String[] args) throws Exception{\n"
jobname = fo.name.split(".java")[0]
print >>fo, "\t\tConfiguration conf = new Configuration();"
print >>fo, "\t\tJob job = new Job(conf,\"" + jobname + "\");"
print >>fo, "\t\tjob.setJarByClass("+jobname + ".class);"
print >>fo,"\t\tjob.setMapOutputKeyClass(" + map_key_type+".class);"
print >>fo,"\t\tjob.setMapOutputValueClass(" + map_value_type+ ".class);"
print >>fo,"\t\tjob.setOutputKeyClass("+reduce_key_type+".class);"
print >>fo,"\t\tjob.setOutputValueClass("+reduce_value_type+".class);"
print >>fo,"\t\tjob.setMapperClass(Map.class);"
if reduce_bool is True:
print >>fo,"\t\tjob.setReducerClass(Reduce.class);"
if isinstance(tree,ystree.TwoJoinNode):
print >>fo, "\t\tFileInputFormat.addInputPath(job,new Path(args[0]));"
print >>fo, "\t\tFileInputFormat.addInputPath(job,new Path(args[1]));"
print >>fo,"\t\tFileOutputFormat.setOutputPath(job, new Path(args[2]));"
elif isinstance(tree,ystree.CompositeNode):
in_len = len(tree.mapoutput.keys())
for i in range(0,in_len):
print >>fo,"\t\tFileInputFormat.addInputPath(job,new Path(args["+str(i)+"]));"
print >>fo,"\t\tFileOutputFormat.setOutputPath(job, new Path(args[" + str(in_len) + "]));";
else:
print >>fo,"\t\tFileInputFormat.addInputPath(job, new Path(args[0]));"
print >>fo,"\t\tFileOutputFormat.setOutputPath(job, new Path(args[1]));"
print >>fo,"\t\treturn (job.waitForCompletion(true) ? 0 : 1);"
print >>fo,"\t}\n"
print >>fo, "\tpublic static void main(String[] args) throws Exception {\n"
print >>fo, "\t\t\tint res = ToolRunner.run(new Configuration(), new " + jobname + "(), args);"
print >>fo, "\t\t\tSystem.exit(res);"
print >>fo, "\t}\n"
def __tablenode_code_gen__(tree,fo):
__gen_des__(fo)
__gen_header__(fo)
print >>fo,"public class "+fo.name.split(".java")[0]+" extends Configured implements Tool{\n"
__tablenode_gen_mr__(tree,fo)
print >>fo,"}\n"
def __orderby_code_gen__(tree,fo):
__gen_des__(fo)
__gen_header__(fo)
print >>fo,"public class " +fo.name.split(".java")[0] + " extends Configured implements Tool{\n"
__orderby_gen_mr__(tree,fo)
print >>fo,"}\n"
def __groupby_code_gen__(tree,fo):
__gen_des__(fo)
__gen_header__(fo)
print >>fo,"public class "+fo.name.split(".java")[0]+" extends Configured implements Tool{\n"
__groupby_gen_mr__(tree,fo)
print >>fo, "}\n"
def __join_code_gen__(tree,left_name,fo):
__gen_des__(fo)
__gen_header__(fo)
print >>fo,"public class "+fo.name.split(".java")[0]+" extends Configured implements Tool{\n"
__join_gen_mr__(tree,left_name,fo)
print >>fo, "}\n"
def __composite_code_gen__(tree,fo):
__gen_des__(fo)
__gen_header__(fo)
print >>fo,"public class "+fo.name.split(".java")[0]+" extends Configured implements Tool{\n"
__composite_gen_mr__(tree,fo)
print >>fo, "}\n"
def generate_code(tree,filename):
op_name = filename + ".java"
ret_name = filename
if isinstance(tree,ystree.TableNode):
tree.output = filename
fo = open(op_name,"w")
__tablenode_code_gen__(tree,fo)
return ret_name
elif isinstance(tree,ystree.OrderByNode):
tree.output = filename
fo = open(op_name,"w")
__orderby_code_gen__(tree,fo)
if tree.composite is None:
if not isinstance(tree.child,ystree.TableNode):
filename = filename[:-1] + str(int(filename[-1])+1)
ret_name = generate_code(tree.child,filename)
else:
filename = filename[:-1] + str(int(filename[-1])+1)
ret_name = generate_code(tree.composite,filename)
return ret_name
elif isinstance(tree,ystree.SelectProjectNode):
tree.output = filename
fo = open(op_name,"w")
ret_name = generate_code(tree.child,filename)
return ret_name
elif isinstance(tree,ystree.GroupByNode):
tree.output = filename
fo = open(op_name,"w")
__groupby_code_gen__(tree,fo)
if tree.composite is None:
if not isinstance(tree.child,ystree.TableNode):
filename = filename[:-1] + str(int(filename[-1])+1)
ret_name = generate_code(tree.child,filename)
else:
filename = filename[:-1] + str(int(filename[-1])+1)
ret_name = generate_code(tree.composite,filename)
return ret_name
elif isinstance(tree,ystree.TwoJoinNode):
tree.output = filename
fo = open(op_name,"w")
if tree.left_composite is not None and tree.right_composite is not None:
new_name = filename[:-1] + str(int(filename[-1])+1)
if len(tree.left_composite.jfc_node_list) == 0:
index = -1
for node in tree.left_composite.it_node_list:
if tree == node.parent:
index = tree.left_composite.it_node_list.index(node)
break
__join_code_gen__(tree,str(index),fo)
else:
__join_code_gen__(tree,new_name,fo)
new_name = generate_code(tree.left_composite,new_name)
new_name = new_name[:-1] +str(int(new_name[-1])+1)
ret_name = generate_code(tree.right_composite,new_name)
elif tree.left_composite is not None:
new_name = filename[:-1] +str(int(filename[-1])+1)
if len(tree.left_composite.jfc_node_list) == 0:
for node in tree.left_composite.it_node_list:
if tree == node.parent:
index = tree.left_composite.it_node_list.index(node)
break
__join_code_gen__(tree,str(index),fo)
else:
__join_code_gen__(tree,new_name,fo)
new_name = generate_code(tree.left_composite,new_name)
ret_name = new_name
if not isinstance(tree.right_child,ystree.TableNode):
new_name = new_name[:-1] + str(int(new_name[-1])+1)
ret_name = generate_code(tree.right_child,new_name)
elif tree.right_composite is not None:
if not isinstance(tree.left_child,ystree.TableNode):
new_name = filename[:-1] +str(int(filename[-1])+1)
__join_code_gen__(tree,new_name,fo)
new_name = generate_code(tree.left_child,new_name)
else:
new_name = filename
new_name = new_name[:-1] +str(int(new_name[-1])+1)
ret_name = generate_code(tree.right_composite,new_name)
else:
if not isinstance(tree.left_child,ystree.TableNode):
new_name = filename[:-1] +str(int(filename[-1])+1)
__join_code_gen__(tree,new_name,fo)
ret_name = generate_code(tree.left_child,new_name)
else:
ret_name = filename
__join_code_gen__(tree,tree.left_child.table_name,fo)
if not isinstance(tree.right_child,ystree.TableNode):
new_name = ret_name[:-1] + str(int(ret_name[-1])+1)
ret_name = generate_code(tree.right_child,new_name)
return ret_name
elif isinstance(tree,ystree.CompositeNode):
### to make sure that the CompositeNode is only visited once
if tree.dep == -1:
return ret_name
tree.output = filename
fo = open(op_name,"w")
tree.dep = -1
__composite_code_gen__(tree,fo)
if len(tree.child_list) > 0:
i=int(filename[-1]) +1
for x in tree.child_list:
if isinstance(x,ystree.TableNode):
continue
new_name = filename[:-1]+str(i)
new_name = generate_code(x,new_name)
ret_name = new_name
i = i+1
return ret_name
fo.close()
def compile_class(tree,codedir,package_path,filename,fo):
version = "0.21.0"
if "HADOOP_HOME" in os.environ:
version = commands.getoutput("$HADOOP_HOME/bin/hadoop version").split("\n")[0].split(" ")[1]
cmd = "javac -classpath $HADOOP_HOME/hadoop-common-"+version+".jar:$HADOOP_HOME/hadoop-hdfs-"+version+".jar:$HADOOP_HOME/hadoop-mapred-"+version+".jar "
cmd += codedir + "/*.java -d ."
print >>fo,cmd
if config.compile_jar is True:
os.system(cmd)
def generate_jar(jardir,path,filename,fo):
cmd = "jar -cf " +jardir + "/"+ filename + ".jar " + path
print >>fo,cmd
if config.compile_jar is True:
os.system(cmd)
def execute_jar(tree,jardir,jarname,classname,input_path,output_path,fo):
ret_name = classname
if isinstance(tree,ystree.TableNode):
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath + classname + " " + input_path + "/" + tree.table_name + "/"
cmd += " " + output_path + "/" + tree.table_name + "/"
print >>fo, cmd
if config.compile_jar is True and config.exec_jar is True:
os.system(cmd)
elif isinstance(tree,ystree.OrderByNode):
if not isinstance(tree.child,ystree.TableNode):
new_name = classname[:-1] + str(int(classname[-1])+1)
if tree.composite is not None:
if tree.composite.dep != -2:
ret_name = execute_jar(tree.composite,jardir,jarname,new_name,input_path,output_path,fo)
if len(tree.composite.jfc_node_list) == 0:
index = -1
for node in tree.composite.it_node_list:
if node.parent == tree:
index = tree.composite.it_node_list.index(node)
break
new_name = tree.composite.output + "/" + str(index)
else:
ret_name = execute_jar(tree.child,jardir,jarname,new_name,input_path,output_path,fo)
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath +classname + " " + output_path + "/" + new_name
cmd += " " + output_path + "/" + classname + "/"
else:
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath +classname + " " + input_path + "/" + tree.child.table_name + "/"
cmd += " " + output_path + "/" + classname + "/"
print >>fo,cmd
if config.compile_jar is True and config.exec_jar is True:
os.system(cmd)
elif isinstance(tree,ystree.SelectProjectNode):
ret_name = execute_jar(tree.child,jardir,jarname,classname,input_path,output_path,fo)
elif isinstance(tree,ystree.GroupByNode):
if not isinstance(tree.child,ystree.TableNode):
new_name = classname[:-1] + str(int(classname[-1])+1)
if tree.composite is not None:
if tree.composite.dep != -2:
ret_name = execute_jar(tree.composite,jardir,jarname,new_name,input_path,output_path,fo)
if len(tree.composite.jfc_node_list) == 0:
index = -1
for node in tree.composite.it_node_list:
if node.parent == tree:
index = tree.composite.it_node_list.index(node)
break
new_name = tree.composite.output + "/" + str(index)
else:
ret_name = execute_jar(tree.child,jardir,jarname,new_name,input_path,output_path,fo)
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath + classname + " " + output_path + "/" + new_name
cmd += " " + output_path + "/" + classname + "/"
else:
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath + classname + " " + input_path + "/" + tree.child.table_name + "/"
cmd += " " + output_path + "/" + classname + "/"
print >>fo,cmd
if config.compile_jar is True and config.exec_jar is True:
os.system(cmd)
elif isinstance(tree,ystree.TwoJoinNode):
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath +classname + " " + input_path + "/"
if not isinstance(tree.left_child,ystree.TableNode):
new_name = classname[:-1] + str(int(classname[-1])+1)
if tree.left_composite is not None:
if tree.left_composite.dep != -2:
ret_name = execute_jar(tree.left_composite,jardir,jarname,new_name,input_path,output_path,fo)
if len(tree.left_composite.jfc_node_list) == 0:
index = -1
for node in tree.left_composite.it_node_list:
if node.parent == tree:
index = tree.left_composite.it_node_list.index(node)
##### Make sure the node in the it_node_list is counted only once
tree.left_composite.it_node_list[index].parent = None
break
new_name = tree.left_composite.output + "/" + str(index)
else:
ret_name = execute_jar(tree.left_child,jardir,jarname,new_name,input_path,output_path,fo)
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath +classname + " " + output_path + "/"
cmd += new_name + "/"
else:
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath +classname + " " + input_path + "/"
cmd += tree.left_child.table_name + "/"
ret_name = classname
if not isinstance(tree.right_child,ystree.TableNode):
new_name = ret_name[:-1] + str(int(ret_name[-1])+1)
if tree.right_composite is not None:
if tree.right_composite.dep != -2:
ret_name = execute_jar(tree.right_composite,jardir,jarname,new_name,input_path,output_path,fo)
if len(tree.right_composite.jfc_node_list) == 0:
index = -1
for node in tree.right_composite.it_node_list:
if node.parent == tree:
index = tree.right_composite.it_node_list.index(node)
tree.right_composite.it_node_list[index].parent = None
break
new_name = tree.right_composite.output + "/" + str(index)
else:
ret_name = execute_jar(tree.right_child,jardir,jarname,new_name,input_path,output_path,fo)
cmd += " " + output_path + "/"
cmd += new_name + "/"
else:
cmd += " " + input_path + "/"
cmd += tree.right_child.table_name + "/"
cmd += " " + output_path + "/" + classname + "/"
print >>fo,cmd
if config.compile_jar is True and config.exec_jar is True:
os.system(cmd)
elif isinstance(tree,ystree.CompositeNode):
### make sure the Composite Node is visited only once
if tree.dep == -2:
return ret_name
tree.dep = -2
child_list = []
new_name = classname[:-1] + str(int(classname[-1])+1)
child_list.append(new_name)
cmd = "$HADOOP_HOME/bin/hadoop jar " + jardir + "/" + jarname + ".jar " + packagepath + classname + " "
for node in tree.child_list:
ret_name = execute_jar(node,jardir,jarname,new_name,input_path,output_path,fo)
new_name = ret_name[:-1] + str(int(new_name[-1])+1)
child_list.append(new_name)
for tn in tree.mapoutput.keys():
if tn in child_list:
cmd += output_path + "/" + tn + " "
else:
cmd += input_path + "/" + tn + " "
cmd += output_path + "/" + classname
print >>fo,cmd
if config.compile_jar is True and config.exec_jar is True:
os.system(cmd)
return ret_name
def ysmart_code_gen(argv,input_path,output_path):
pwd = os.getcwd()
resultdir = "./result"
codedir = "./YSmartCode"
jardir = "./YSmartJar"
global packagepath
global packagename
tree_node = ystree.ysmart_tree_gen(argv[1],argv[2])
if config.turn_on_correlation is True:
tree_node = correlation.ysmart_correlation(tree_node)
if tree_node is None:
exit(-1)
if os.path.exists(resultdir) is False:
os.makedirs(resultdir)
os.chdir(resultdir)
if os.path.exists(codedir) or os.path.exists(jardir):
pass
else:
os.makedirs(codedir)
os.makedirs(jardir)
packagepath += config.queryname + "/"
packagename += "." + config.queryname
config.queryname += "1"
os.chdir(codedir)
generate_code(tree_node,config.queryname)
os.chdir(pwd)
os.chdir(resultdir)
fo = open(config.scriptname,'w')
compile_class(tree_node,codedir,packagepath,config.queryname,fo)
generate_jar(jardir,packagepath,config.queryname,fo)
execute_jar(tree_node,jardir,config.queryname,config.queryname,input_path,output_path,fo)
os.chdir(pwd)
| 43.304335
| 165
| 0.538044
| 19,971
| 147,841
| 3.68289
| 0.024736
| 0.046471
| 0.043235
| 0.076233
| 0.879594
| 0.838806
| 0.804571
| 0.765387
| 0.722206
| 0.683934
| 0
| 0.005164
| 0.29529
| 147,841
| 3,413
| 166
| 43.317023
| 0.700801
| 0.022822
| 0
| 0.697825
| 0
| 0.005341
| 0.184405
| 0.04822
| 0.002671
| 0
| 0
| 0
| 0
| 1
| 0.012972
| false
| 0.000382
| 0.008775
| 0
| 0.035864
| 0.305609
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ea75190cdebd04db1056d5c240547c4dd25d681d
| 180
|
py
|
Python
|
sh_app/admin.py
|
allenkim/skill-huddle
|
5f2fab44f40df7dec889270ea18308b40b80478e
|
[
"MIT"
] | null | null | null |
sh_app/admin.py
|
allenkim/skill-huddle
|
5f2fab44f40df7dec889270ea18308b40b80478e
|
[
"MIT"
] | null | null | null |
sh_app/admin.py
|
allenkim/skill-huddle
|
5f2fab44f40df7dec889270ea18308b40b80478e
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from sh_app.models import *
admin.site.register(SH_User)
admin.site.register(League)
admin.site.register(Huddle)
admin.site.register(Suggestion)
| 20
| 32
| 0.816667
| 27
| 180
| 5.37037
| 0.518519
| 0.248276
| 0.468966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 180
| 8
| 33
| 22.5
| 0.873494
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
57b46548fb2a549210f8e3224d33e87a72c539a7
| 59
|
py
|
Python
|
algoneer/algorithm/__init__.py
|
algoneer/algoneer-py
|
5f300543116278c91a9cf8c9ef5a1375e3f1e75d
|
[
"MIT"
] | 10
|
2019-08-05T16:06:12.000Z
|
2020-12-19T16:40:48.000Z
|
algoneer/algorithm/__init__.py
|
algoneer/algoneer-py
|
5f300543116278c91a9cf8c9ef5a1375e3f1e75d
|
[
"MIT"
] | null | null | null |
algoneer/algorithm/__init__.py
|
algoneer/algoneer-py
|
5f300543116278c91a9cf8c9ef5a1375e3f1e75d
|
[
"MIT"
] | 1
|
2020-04-27T08:50:14.000Z
|
2020-04-27T08:50:14.000Z
|
from .algorithm import Algorithm, AlgorithmAlgorithmSchema
| 29.5
| 58
| 0.881356
| 5
| 59
| 10.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 59
| 1
| 59
| 59
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
57de01c328bdb868da32a09b6d817db928111e98
| 55
|
py
|
Python
|
backend/app/views/sketch_constructs/__init__.py
|
Edinburgh-Genome-Foundry/CUBA
|
d57565951ead619ef9263e8b356b451001fb910f
|
[
"MIT"
] | 15
|
2018-02-12T13:12:13.000Z
|
2021-08-15T11:37:59.000Z
|
backend/app/views/sketch_constructs/__init__.py
|
Edinburgh-Genome-Foundry/CUBA
|
d57565951ead619ef9263e8b356b451001fb910f
|
[
"MIT"
] | 9
|
2020-06-05T17:54:54.000Z
|
2022-02-12T12:03:19.000Z
|
backend/app/views/sketch_constructs/__init__.py
|
Edinburgh-Genome-Foundry/CUBA
|
d57565951ead619ef9263e8b356b451001fb910f
|
[
"MIT"
] | 3
|
2018-10-18T13:08:50.000Z
|
2020-08-17T14:09:46.000Z
|
from .SketchConstructsView import SketchConstructsView
| 27.5
| 54
| 0.909091
| 4
| 55
| 12.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 55
| 1
| 55
| 55
| 0.980392
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
57e0e561d076302aeaa6f7ea3bb9341fa119c7dd
| 27
|
py
|
Python
|
sources/__init__.py
|
AB9IL/stream-sources
|
ede8bd3ad7d51723d489192d0a6c5b2ea31ffe56
|
[
"Unlicense"
] | 40
|
2018-03-06T03:57:51.000Z
|
2021-12-10T02:53:51.000Z
|
sources/__init__.py
|
AB9IL/stream-sources
|
ede8bd3ad7d51723d489192d0a6c5b2ea31ffe56
|
[
"Unlicense"
] | 18
|
2022-03-16T21:06:49.000Z
|
2022-03-30T20:45:48.000Z
|
sources/__init__.py
|
AB9IL/stream-sources
|
ede8bd3ad7d51723d489192d0a6c5b2ea31ffe56
|
[
"Unlicense"
] | 5
|
2018-04-01T18:14:40.000Z
|
2020-12-07T13:47:06.000Z
|
from .source import Source
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
57f9453d39b340c8bb70f685bedc3c17bf23cf05
| 34
|
py
|
Python
|
src/infrastructure/langtools.py
|
PeachyPrinter/peachyprinter
|
6d82b9eaaa03129870aa637eabdc0cb66e90b626
|
[
"Apache-2.0"
] | 12
|
2016-05-12T14:05:30.000Z
|
2021-04-03T06:03:37.000Z
|
src/infrastructure/langtools.py
|
PeachyPrinter/peachyprinter
|
6d82b9eaaa03129870aa637eabdc0cb66e90b626
|
[
"Apache-2.0"
] | 1
|
2016-02-03T21:46:19.000Z
|
2016-02-04T01:48:31.000Z
|
src/infrastructure/langtools.py
|
PeachyPrinter/peachyprinter
|
6d82b9eaaa03129870aa637eabdc0cb66e90b626
|
[
"Apache-2.0"
] | 12
|
2016-01-27T15:14:25.000Z
|
2020-08-21T00:44:43.000Z
|
def _(string):
return string
| 8.5
| 17
| 0.647059
| 4
| 34
| 5.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264706
| 34
| 3
| 18
| 11.333333
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
17ae4d8f1eea0a417f9c6a5f0a5a0193dee1d15e
| 226
|
py
|
Python
|
tests/test_all_challenges_load.py
|
burrowsa/thebutton
|
b7a25cdf2ef7bef961f355d64a123090e8ccbe49
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_all_challenges_load.py
|
burrowsa/thebutton
|
b7a25cdf2ef7bef961f355d64a123090e8ccbe49
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_all_challenges_load.py
|
burrowsa/thebutton
|
b7a25cdf2ef7bef961f355d64a123090e8ccbe49
|
[
"BSD-2-Clause"
] | 1
|
2020-09-03T03:39:26.000Z
|
2020-09-03T03:39:26.000Z
|
from thebutton.loader import load_all_challenges
import os
def test_all_challenges_load():
for challenge in load_all_challenges(os.path.join(os.path.dirname(os.path.dirname(__file__)), "challenges"), True):
pass
| 28.25
| 119
| 0.769912
| 33
| 226
| 4.939394
| 0.575758
| 0.239264
| 0.208589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128319
| 226
| 7
| 120
| 32.285714
| 0.827411
| 0
| 0
| 0
| 0
| 0
| 0.044248
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
17d0435f718ec1fdd2416a035c5db3f9bef411e5
| 43
|
py
|
Python
|
src/admin/__init__.py
|
isaachenrion/jets
|
59aeba81788d0741af448192d9dfb764fb97cf8d
|
[
"BSD-3-Clause"
] | 9
|
2017-10-09T17:01:52.000Z
|
2018-06-12T18:06:05.000Z
|
src/admin/__init__.py
|
isaachenrion/jets
|
59aeba81788d0741af448192d9dfb764fb97cf8d
|
[
"BSD-3-Clause"
] | 31
|
2017-11-01T14:39:02.000Z
|
2018-04-18T15:34:24.000Z
|
src/admin/__init__.py
|
isaachenrion/jets
|
59aeba81788d0741af448192d9dfb764fb97cf8d
|
[
"BSD-3-Clause"
] | 10
|
2017-10-17T19:23:14.000Z
|
2020-07-05T04:44:45.000Z
|
from ._Administrator import _Administrator
| 21.5
| 42
| 0.883721
| 4
| 43
| 9
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aa02a7e7d7328fe309a06df4737b4092ec061b04
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/setuptools/command/py36compat.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/setuptools/command/py36compat.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/setuptools/command/py36compat.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/ef/22/d6/cd08f5efd127c77a49f15d5c0c30b378b30531df5725794afa2653ab96
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 0
| 96
| 1
| 96
| 96
| 0.479167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
aa03f1b6f2a754f3f2674968cc162af99c2b8e5c
| 27
|
py
|
Python
|
nu/v3/Membranes/Leak/__init__.py
|
bullgom/pysnn2
|
dad5ae26b029afd5c5bf76fe141249b0f7b7a36c
|
[
"MIT"
] | 9
|
2021-06-08T13:29:26.000Z
|
2022-03-29T17:29:46.000Z
|
nu/v3/Membranes/Leak/__init__.py
|
bullgom/pysnn2
|
dad5ae26b029afd5c5bf76fe141249b0f7b7a36c
|
[
"MIT"
] | null | null | null |
nu/v3/Membranes/Leak/__init__.py
|
bullgom/pysnn2
|
dad5ae26b029afd5c5bf76fe141249b0f7b7a36c
|
[
"MIT"
] | null | null | null |
from .Linear import Linear
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aa12231dc22534888bd56b7c16987be0afdbf1c9
| 43,067
|
py
|
Python
|
src/py-common/nightjar_common/envoy_transform/tests/service_test.py
|
groboclown/nightjar-mesh
|
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
|
[
"MIT"
] | 3
|
2019-12-23T23:46:02.000Z
|
2020-08-07T23:10:20.000Z
|
src/py-common/nightjar_common/envoy_transform/tests/service_test.py
|
groboclown/nightjar-mesh
|
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
|
[
"MIT"
] | 2
|
2020-02-07T15:59:15.000Z
|
2020-08-05T21:55:27.000Z
|
src/py-common/nightjar_common/envoy_transform/tests/service_test.py
|
groboclown/nightjar-mesh
|
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
|
[
"MIT"
] | 1
|
2020-05-28T00:46:05.000Z
|
2020-05-28T00:46:05.000Z
|
"""Test the service module"""
# pylint: disable=C0302
from typing import Dict, Any
import unittest
from .. import service, common
from ...validation import validate_discovery_map
class ServiceTest(unittest.TestCase): # pylint: disable=R0904
"""Test the service functions."""
def test_create_service_color_proxy_input__no_namespace(self) -> None:
"""Test create_service_color_proxy_input with no matching namespace."""
res = service.create_service_color_proxy_input(
_mk_doc({}),
'n1', 's', 'c', 160, 170,
)
self.assertEqual(1, res)
def test_create_service_color_proxy_input__no_clusters(self) -> None:
"""Test create_service_color_proxy_input with no matching namespace."""
res = service.create_service_color_proxy_input(
_mk_doc({'namespaces': [_mk_namespace({})]}),
'n1', 's', 'c', 160, 170,
)
self.assertEqual(2, res)
def test_create_service_color_proxy_input__minimal(self) -> None:
"""Test create_service_color_proxy_input with no matching namespace."""
res = service.create_service_color_proxy_input(
_mk_doc({'namespaces': [_mk_namespace({'service-colors': [_mk_service_color({})]})]}),
'n1', 's', 'c', 160, 170,
)
self.assertEqual({
'schema-version': 'v1',
'network_name': 'nk1',
'service_member': 's-c',
'has_admin_port': True,
'admin_port': 170,
'has_clusters': False,
'clusters': [],
'listeners': [{'has_mesh_port': True, 'mesh_port': 160, 'routes': []}],
}, res)
def test_create_clusters__no_local_services(self) -> None:
"""Test create_clusters with no local services."""
res = service.create_clusters('n1', 's1', 'c1', _mk_doc({}))
self.assertIsNone(res)
def test_create_clusters__no_matching_services(self) -> None:
"""Test create_clusters with no mathing services."""
res = service.create_clusters('n1', 's1', 'c1', _mk_doc({
'namespaces': [_mk_namespace({'service-colors': [_mk_service_color({})]})],
}))
self.assertIsNone(res)
def test_create_clusters__local_and_nonlocal(self) -> None:
"""Test create_nonlocal_namespace_clusters with no non-local namespaces."""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'namespace': 'n1',
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/c/1'},
})],
'instances': [{'ipv4': '1.2.3.4', 'port': 12}],
'namespace-egress': [{
'namespace': 'n2',
'interface': {'ipv4': '127.0.0.1', 'port': 100},
}],
})],
}),
_mk_namespace({
'namespace': 'n2',
'gateways': {
'prefer-gateway': False,
'protocol': 'HTTP2',
'instances': [{'ipv6': '::3', 'port': 90}],
},
'service-colors': [_mk_service_color({
'service': 'rs', 'color': 'rc',
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/r/1'},
'default-access': True,
})],
'instances': [{'ipv4': '123.45.67.89', 'port': 990}],
})],
}),
]})
res = service.create_clusters('n1', 's', 'c', discovery_map)
self.assertIsNotNone(res)
assert res is not None # mypy requirement
self.assertEqual([
{
'name': 'local-s-c-1',
'endpoints': [{'host': '1.2.3.4', 'port': 12}],
'hosts_are_hostname': False,
'hosts_are_ipv4': True,
'hosts_are_ipv6': False,
'uses_http2': False,
},
{
'name': 'remote-n2-rs-rc-1',
'endpoints': [{'host': '123.45.67.89', 'port': 990}],
'hosts_are_hostname': False,
'hosts_are_ipv4': True,
'hosts_are_ipv6': False,
'uses_http2': False,
},
], [ec.get_context() for ec in res])
def test_create_local_namespace_clusters__no_services(self) -> None:
"""Test create_local_namespace_clusters with no services"""
res = service.create_local_namespace_clusters([])
self.assertEqual([], res)
def test_create_local_namespace_clusters__no_service_instances(self) -> None:
"""Test create_local_namespace_clusters with no instances"""
res = service.create_local_namespace_clusters([_mk_service_color({})])
self.assertEqual([], res)
def test_create_local_namespace_clusters__one_instance(self) -> None:
"""Test create_local_namespace_clusters with no instances"""
res = service.create_local_namespace_clusters([_mk_service_color({
'instances': [{'ipv4': '1.2.3.4', 'port': 123}],
})])
self.assertEqual(
[{
'name': 'local-s-c-1',
'endpoints': [{'host': '1.2.3.4', 'port': 123}],
'hosts_are_hostname': False,
'hosts_are_ipv4': True,
'hosts_are_ipv6': False,
'uses_http2': False,
}],
[cl.get_context() for cl in res],
)
def test_create_nonlocal_namespace_clusters__no_nonlocal(self) -> None:
"""Test create_nonlocal_namespace_clusters with no non-local namespaces."""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({
'namespace': 'n1', 'service-colors': [_mk_service_color({})],
})]})
res = service.create_nonlocal_namespace_clusters(
'n1', discovery_map['namespaces'][0]['service-colors'][0], discovery_map,
)
self.assertEqual([], res)
def test_create_nonlocal_namespace_clusters__nonlocal_no_gateways(self) -> None:
"""Test create_nonlocal_namespace_clusters with no non-local namespaces."""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'namespace': 'n1',
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/c/1'},
})],
'namespace-egress': [{
'namespace': 'n2',
'interface': {'ipv4': '127.0.0.1', 'port': 100},
}],
})],
}),
_mk_namespace({
'namespace': 'n2',
'gateways': {
'prefer-gateway': False,
'protocol': 'HTTP2',
'instances': [{'ipv6': '::3', 'port': 90}],
},
'service-colors': [_mk_service_color({
'service': 'rs', 'color': 'rc',
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/r/1'},
'default-access': True,
})],
'instances': [{'ipv4': '123.45.67.89', 'port': 990}],
})],
}),
]})
res = service.create_nonlocal_namespace_clusters(
'n1', discovery_map['namespaces'][0]['service-colors'][0], discovery_map,
)
self.assertEqual([{
'name': 'remote-n2-rs-rc-1',
'endpoints': [{'host': '123.45.67.89', 'port': 990}],
'hosts_are_hostname': False,
'hosts_are_ipv4': True,
'hosts_are_ipv6': False,
'uses_http2': False,
}], [ec.get_context() for ec in res])
def test_create_nonlocal_namespace_clusters__nonlocal_gateways(self) -> None:
"""Test create_nonlocal_namespace_clusters with no non-local namespaces."""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'namespace': 'n1',
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/c/1'},
})],
'namespace-egress': [{
'namespace': 'n2',
'interface': {'ipv4': '127.0.0.1', 'port': 100},
}],
})],
}),
_mk_namespace({
'namespace': 'n2',
'gateways': {
'prefer-gateway': True,
'protocol': 'HTTP2',
'instances': [{'ipv6': '::3', 'port': 90}],
},
'service-colors': [_mk_service_color({
'service': 'rs', 'color': 'rc',
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/r/1'},
'default-access': True,
})],
'instances': [{'ipv4': '123.45.67.89', 'port': 990}],
})],
}),
]})
res = service.create_nonlocal_namespace_clusters(
'n1', discovery_map['namespaces'][0]['service-colors'][0], discovery_map,
)
self.assertEqual([{
'name': 'remote-n2-gateway',
'endpoints': [{'host': '::3', 'port': 90}],
'hosts_are_hostname': False,
'hosts_are_ipv4': False,
'hosts_are_ipv6': True,
'uses_http2': True,
}], [ec.get_context() for ec in res])
def test_create_service_color_cluster__no_instances(self) -> None:
"""Test create_service_color_cluster with no instances"""
res = service.create_service_color_cluster('cs', _mk_service_color({}))
self.assertIsNone(res)
def test_create_service_color_cluster__two_instances(self) -> None:
"""Test create_service_color_cluster with no instances"""
res = service.create_service_color_cluster('cs', _mk_service_color({
'instances': [
{'hostname': 'xyz', 'port': 99},
{'hostname': 'abc', 'port': 98},
],
}))
self.assertIsNotNone(res)
assert res is not None # mypy requirement
self.assertEqual(
{
'name': 'cs',
'endpoints': [{'host': 'xyz', 'port': 99}, {'host': 'abc', 'port': 98}],
'hosts_are_hostname': True,
'hosts_are_ipv4': False,
'hosts_are_ipv6': False,
'uses_http2': False,
},
res.get_context(),
)
def test_create_gateway_cluster__no_instances(self) -> None:
"""Test create_gateway_cluster with no instances"""
res = service.create_gateway_cluster('c', {
'protocol': 'HTTP2',
'prefer-gateway': False,
'instances': [],
})
self.assertIsNone(res)
def test_create_gateway_cluster__two_instances(self) -> None:
"""Test create_gateway_cluster with no instances"""
res = service.create_gateway_cluster('c', {
'protocol': 'HTTP2',
'prefer-gateway': False,
'instances': [
{'ipv4': '127.0.0.1', 'port': 12},
{'ipv4': '99.0.0.9', 'port': 12},
],
})
self.assertIsNotNone(res)
assert res is not None # mypy requirement
self.assertEqual(
{
'name': 'c',
'uses_http2': True,
'hosts_are_ipv6': False,
'hosts_are_ipv4': True,
'hosts_are_hostname': False,
'endpoints': [
{'host': '127.0.0.1', 'port': 12},
{'host': '99.0.0.9', 'port': 12},
],
},
res.get_context(),
)
def test_create_route_listeners(self) -> None:
"""Test create_route_listeners with basic local and non-local routes."""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'namespace': 'n1',
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/c/1'},
})],
'namespace-egress': [{
'namespace': 'n2',
'interface': {'ipv4': '127.0.0.1', 'port': 100},
}],
})],
}),
_mk_namespace({
'namespace': 'n2',
'gateways': {
'prefer-gateway': False,
'protocol': 'HTTP2',
'instances': [{'ipv6': '::3', 'port': 90}],
},
'service-colors': [_mk_service_color({
'service': 'rs', 'color': 'rc',
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/r/1'},
})],
})],
}),
]})
res = service.create_route_listeners(900, 'n1', 's', 'c', discovery_map)
self.assertEqual(
[
{
'has_mesh_port': True,
'mesh_port': 900,
'routes': [{
'route_path': '/c/1',
'clusters': [{
'cluster_name': 'local-s-c-1',
'route_weight': 1,
}],
'has_header_filters': False,
'header_filters': [],
'has_query_filters': False,
'query_filters': [],
'has_many_clusters': False,
'has_one_cluster': True,
'path_is_case_sensitive': True,
'path_is_exact': True,
'path_is_prefix': False,
'path_is_regex': False,
'total_cluster_weight': 1,
}],
}, {
'has_mesh_port': True,
'mesh_port': 100,
'routes': [{
'route_path': '/r/1',
'clusters': [{
'cluster_name': 'remote-n2-rs-rc-1',
'route_weight': 1,
}],
'has_header_filters': False,
'header_filters': [],
'has_query_filters': False,
'query_filters': [],
'has_many_clusters': False,
'has_one_cluster': True,
'path_is_case_sensitive': True,
'path_is_exact': True,
'path_is_prefix': False,
'path_is_regex': False,
'total_cluster_weight': 1,
}],
},
],
[el.get_context() for el in res],
)
def test_create_local_route_listener__no_routes(self) -> None:
"""Test create_local_route_listener with no routes."""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({})]})
res = service.create_local_route_listener(60, 'n1', discovery_map)
self.assertEqual({
'has_mesh_port': True, 'mesh_port': 60, 'routes': [],
}, res.get_context())
def test_create_local_route_listener__one_routes(self) -> None:
"""Test create_local_route_listener with one route."""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/s/1'},
})],
})],
})]})
res = service.create_local_route_listener(60, 'n1', discovery_map)
self.assertEqual({
'has_mesh_port': True, 'mesh_port': 60, 'routes': [{
'route_path': '/s/1',
'clusters': [{'cluster_name': 'local-s-c-1', 'route_weight': 1}],
'has_header_filters': False,
'header_filters': [],
'has_query_filters': False,
'query_filters': [],
'has_many_clusters': False,
'has_one_cluster': True,
'path_is_case_sensitive': True,
'path_is_exact': True,
'path_is_prefix': False,
'path_is_regex': False,
'total_cluster_weight': 1,
}],
}, res.get_context())
def test_create_nonlocal_route_listeners__no_such_namespace(self) -> None:
"""Tests create_nonlocal_route_listeners with no given namespace"""
discovery_map = _mk_doc({})
res = service.create_nonlocal_route_listeners('n1', 's', 'c', discovery_map)
self.assertEqual([], res)
def test_create_nonlocal_route_listeners__no_service_colors(self) -> None:
"""Tests create_nonlocal_route_listeners with no service colors"""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({})]})
res = service.create_nonlocal_route_listeners('n1', 's', 'c', discovery_map)
self.assertEqual([], res)
def test_create_nonlocal_route_listeners__no_matching_service_colors(self) -> None:
"""Tests create_nonlocal_route_listeners with no service colors"""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({
'service-colors': [_mk_service_color({})],
})]})
res = service.create_nonlocal_route_listeners('n1', 's2', 'c2', discovery_map)
self.assertEqual([], res)
def test_create_nonlocal_route_listeners__no_nonlocal(self) -> None:
"""Tests create_nonlocal_route_listeners with no non-local namespaces"""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({'service-colors': [_mk_service_color({})]}),
]})
res = service.create_nonlocal_route_listeners('n1', 's', 'c', discovery_map)
self.assertEqual([], res)
def test_create_nonlocal_route_listeners__one_preferred_gateway(self) -> None:
"""Tests create_nonlocal_route_listeners with a non-local namespace,
which prefers use of a gateway"""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'namespace': 'n1',
'service-colors': [_mk_service_color({
'namespace-egress': [{
'namespace': 'n2',
'interface': {'ipv4': '127.0.0.1', 'port': 100},
}],
})],
}),
_mk_namespace({
'namespace': 'n2',
'gateways': {
'prefer-gateway': True,
'protocol': 'HTTP2',
'instances': [{'ipv6': '::3', 'port': 90}],
},
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/s/1'},
})],
})],
}),
]})
res = service.create_nonlocal_route_listeners('n1', 's', 'c', discovery_map)
self.assertEqual([{
'has_mesh_port': True,
'mesh_port': 100,
'routes': [{
# Routes to gateways are '/' always.
'route_path': '/',
'clusters': [{'cluster_name': 'remote-n2-gateway', 'route_weight': 1}],
'has_header_filters': False,
'header_filters': [],
'has_query_filters': False,
'query_filters': [],
'has_many_clusters': False,
'has_one_cluster': True,
'path_is_case_sensitive': True,
'path_is_exact': False,
'path_is_prefix': True,
'path_is_regex': False,
'total_cluster_weight': 1,
}],
}], [c.get_context() for c in res])
def test_create_nonlocal_route_listeners__one_service_direct(self) -> None:
"""Tests create_nonlocal_route_listeners with a non-local namespace,
which prefers use of a gateway"""
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'namespace': 'n1',
'service-colors': [_mk_service_color({
'namespace-egress': [{
'namespace': 'n2',
'interface': {'ipv4': '127.0.0.1', 'port': 100},
}],
})],
}),
_mk_namespace({
'namespace': 'n2',
'gateways': {
'prefer-gateway': False,
'protocol': 'HTTP2',
'instances': [{'ipv6': '::3', 'port': 90}],
},
'service-colors': [_mk_service_color({
'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/s/1'},
})],
})],
}),
]})
res = service.create_nonlocal_route_listeners('n1', 's', 'c', discovery_map)
self.assertEqual([{
'has_mesh_port': True,
'mesh_port': 100,
'routes': [{
'route_path': '/s/1',
'clusters': [{'cluster_name': 'remote-n2-s-c-1', 'route_weight': 1}],
'has_header_filters': False,
'header_filters': [],
'has_query_filters': False,
'query_filters': [],
'has_many_clusters': False,
'has_one_cluster': True,
'path_is_case_sensitive': True,
'path_is_exact': True,
'path_is_prefix': False,
'path_is_regex': False,
'total_cluster_weight': 1,
}],
}], [c.get_context() for c in res])
def test_create_remote_gateway_listener(self) -> None:
"""Test create_remote_gateway_listener"""
listener = service.create_remote_gateway_listener('c1', {'port': 5})
self.assertIsNotNone(listener)
assert listener is not None # mypy requirement
self.assertEqual(
{
'has_mesh_port': True,
'mesh_port': 5,
'routes': [{
'clusters': [{'cluster_name': 'c1', 'route_weight': 1}],
'has_header_filters': False,
'has_many_clusters': False,
'has_one_cluster': True,
'has_query_filters': False,
'header_filters': [],
'path_is_case_sensitive': True,
'path_is_exact': False,
'path_is_prefix': True,
'path_is_regex': False,
'query_filters': [],
'route_path': '/',
'total_cluster_weight': 1,
}],
},
listener.get_context(),
)
def test_create_remote_namespace_listener__no_services(self) -> None:
"""Test create_remote_namespace_listener with no services."""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({})]})
listener = service.create_remote_namespace_listener(
'n2', {'port': 2}, discovery_map['namespaces'][0],
)
self.assertIsNotNone(listener)
assert listener is not None # mypy requirement
self.assertEqual(
{'has_mesh_port': True, 'mesh_port': 2, 'routes': []},
listener.get_context(),
)
def test_create_remote_namespace_listener__one_service(self) -> None:
"""Test create_remote_namespace_listener with no services."""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({
'service-colors': [_mk_service_color({'routes': [_mk_route({
'path-match': {'match-type': 'exact', 'value': '/a'},
})]})],
})]})
listener = service.create_remote_namespace_listener(
'n2', {'port': 2}, discovery_map['namespaces'][0],
)
self.assertIsNotNone(listener)
assert listener is not None # mypy requirement
self.assertEqual(
{'has_mesh_port': True, 'mesh_port': 2, 'routes': [{
'clusters': [{'cluster_name': 'remote-n1-s-c-1', 'route_weight': 1}],
'has_many_clusters': False,
'has_one_cluster': True,
'has_header_filters': False,
'header_filters': [],
'has_query_filters': False,
'query_filters': [],
'path_is_case_sensitive': True,
'path_is_exact': True,
'path_is_prefix': False,
'path_is_regex': False,
'route_path': '/a',
'total_cluster_weight': 1,
}]},
listener.get_context(),
)
def test_can_local_namespace_access_remote__no_services(self) -> None:
"""Test can_local_namespace_access_remote with no services defined."""
res = service.can_local_namespace_access_remote('n1', _mk_namespace({}))
self.assertFalse(res)
def test_can_local_namespace_access_remote__no_access(self) -> None:
"""Test can_local_namespace_access_remote with no remote access."""
res = service.can_local_namespace_access_remote('n1', _mk_namespace({'service-colors': [
_mk_service_color({'routes': [_mk_route({'default-access': False})]}),
]}))
self.assertFalse(res)
def test_can_local_namespace_access_remote__access(self) -> None:
"""Test can_local_namespace_access_remote with allowed remote access."""
res = service.can_local_namespace_access_remote('n1', _mk_namespace({'service-colors': [
_mk_service_color({'routes': [_mk_route({'default-access': True})]}),
]}))
self.assertTrue(res)
def test_find_namespace_service_colors__match(self) -> None:
"""Test find_namespace_service_colors with no matching namespace"""
scl = [_mk_service_color({})]
discovery_map = _mk_doc({'namespaces': [_mk_namespace({
'namespace': 'n1', 'service-colors': scl,
})]})
res = service.find_namespace_service_colors('n1', discovery_map)
self.assertEqual(scl, res)
def test_find_namespace_service_colors__no_namespace(self) -> None:
"""Test find_namespace_service_colors with no matching namespace"""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({'namespace': 'n1'})]})
res = service.find_namespace_service_colors('n2', discovery_map)
self.assertIsNone(res)
def test_find_service_color__no_match(self) -> None:
"""Test find_service_color with no match."""
scl = [_mk_service_color({'service': 'x', 'color': 'y'})]
res = service.find_service_color('s', 'c', scl)
self.assertIsNone(res)
def test_find_service_color__partial(self) -> None:
"""Test find_service_color with partial matches."""
scl = [_mk_service_color({'service': 'x', 'color': 'y'})]
res = service.find_service_color('s', 'y', scl)
self.assertIsNone(res)
res = service.find_service_color('x', 'c', scl)
self.assertIsNone(res)
def test_find_service_color__first_match(self) -> None:
"""Test find_service_color with partial matches."""
scl = [
_mk_service_color({'service': 's', 'color': 'c', 'x': 'y'}),
_mk_service_color({'service': 's', 'color': 'c', 'a': 'b'}),
]
res = service.find_service_color('s', 'c', scl)
self.assertEqual(scl[0], res)
def test_find_nonlocal_namespaces__none(self) -> None:
"""Test find_nonlocal_namespaces with no non-local namespaces."""
discovery_map = _mk_doc({'namespaces': [_mk_namespace({
'service-colors': [_mk_service_color({})],
})]})
res = service.find_nonlocal_namespaces(
discovery_map['namespaces'][0]['namespace'],
discovery_map['namespaces'][0]['service-colors'][0],
discovery_map,
)
self.assertEqual([], res)
def test_find_nonlocal_namespaces__two(self) -> None:
"""Test find_nonlocal_namespaces with two non-local namespaces."""
nl1 = _mk_namespace({'namespace': 'n2', 'service-colors': [_mk_service_color({
'routes': [_mk_route({})],
})]})
nl2 = _mk_namespace({'namespace': 'n3', 'service-colors': [_mk_service_color({
'routes': [_mk_route({})],
})]})
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'service-colors': [_mk_service_color({
'namespace-egress': [
{'namespace': 'n2', 'interface': {'ipv4': '127.0.0.1', 'port': 2}},
{'namespace': 'n3', 'interface': {'ipv4': '127.0.0.1', 'port': 2}},
],
})],
}),
nl1, nl2,
]})
res = service.find_nonlocal_namespaces(
discovery_map['namespaces'][0]['namespace'],
discovery_map['namespaces'][0]['service-colors'][0],
discovery_map,
)
self.assertEqual([nl1, nl2], res)
def test_find_nonlocal_namespaces__some(self) -> None:
"""Test find_nonlocal_namespaces with two non-local namespaces, only one of which
has an egress."""
nl1 = _mk_namespace({'namespace': 'n2', 'service-colors': [_mk_service_color({
'routes': [_mk_route({})],
})]})
nl2 = _mk_namespace({'namespace': 'n3', 'service-colors': [_mk_service_color({
'routes': [_mk_route({})],
})]})
discovery_map = _mk_doc({'namespaces': [
_mk_namespace({
'service-colors': [_mk_service_color({
'namespace-egress': [
{'namespace': 'n2', 'interface': {'ipv4': '127.0.0.1', 'port': 2}},
],
})],
}),
nl1, nl2,
]})
res = service.find_nonlocal_namespaces(
discovery_map['namespaces'][0]['namespace'],
discovery_map['namespaces'][0]['service-colors'][0],
discovery_map,
)
self.assertEqual([nl1], res)
def test_get_namespace_egress_instance__empty(self) -> None:
"""Test get_namespace_egress_instance with no matching namespace"""
res = service.get_namespace_egress_instance('n2', _mk_service_color({
'namespace-egress': [],
}))
self.assertEqual(None, res)
def test_get_namespace_egress_instance__no_match(self) -> None:
"""Test get_namespace_egress_instance with no matching namespace"""
res = service.get_namespace_egress_instance('n2', _mk_service_color({
'namespace-egress': [{'namespace': 'x', 'interface': {'ipv4': '127.0.0.1', 'port': 2}}],
}))
self.assertEqual(None, res)
def test_get_namespace_egress_instance__match(self) -> None:
"""Test get_namespace_egress_instance with no matching namespace"""
egress = {'namespace': 'n2', 'interface': {'ipv4': '127.0.0.1', 'port': 2}}
res = service.get_namespace_egress_instance('n2', _mk_service_color({
'namespace-egress': [egress],
}))
self.assertEqual(egress['interface'], res)
def test_group_service_colors_by_route__empty(self) -> None:
"""Test group_service_colors_by_route with no service colors."""
res = service.group_service_colors_by_route([], None, service.create_local_cluster_name)
self.assertEqual({}, res)
def test_group_service_colors_by_route__several_services_one_public_route(self) -> None:
"""Test group_service_colors_by_route with no service colors."""
route_1 = _mk_route({'path-match': {'match-type': 'prefix', 'value': '/a'}, 'weight': 2})
discovery_map = _mk_doc({
'namespaces': [_mk_namespace({
'service-colors': [
_mk_service_color({
'color': 'blue',
'instances': [{'ipv6': '::1', 'port': 6}],
'routes': [route_1],
}),
_mk_service_color({
'color': 'blue', 'index': 2,
'instances': [{'ipv6': '::1', 'port': 8}],
'routes': [route_1],
}),
_mk_service_color({
'color': 'green',
'instances': [{'ipv6': '::2', 'port': 6}],
'routes': [route_1],
}),
],
})],
})
res = service.group_service_colors_by_route(
discovery_map['namespaces'][0]['service-colors'],
None,
service.create_local_cluster_name,
)
matched_route = common.RouteMatcher(common.RoutePathMatcher('/a', 'prefix', True), [], [])
self.assertEqual(
{
matched_route: [
('local-s-blue-1', {
'default-access': True,
'namespace-access': [],
'path-match': {'match-type': 'prefix', 'value': '/a'},
'weight': 2,
}),
('local-s-blue-2', {
'default-access': True,
'namespace-access': [],
'path-match': {'match-type': 'prefix', 'value': '/a'},
'weight': 2,
}),
('local-s-green-1', {
'default-access': True,
'namespace-access': [],
'path-match': {'match-type': 'prefix', 'value': '/a'},
'weight': 2,
}),
],
},
res,
)
def test_group_service_colors_by_route__private_remote(self) -> None:
"""Test group_service_colors_by_route with no service colors."""
route_1 = _mk_route({
'path-match': {'match-type': 'prefix', 'value': '/a'}, 'weight': 2,
'namespace-access': [{'namespace': 'n1', 'access': False}],
})
discovery_map = _mk_doc({
'namespaces': [_mk_namespace({
'service-colors': [
_mk_service_color({
'color': 'blue',
'instances': [{'ipv6': '::1', 'port': 6}],
'routes': [route_1],
}),
],
})],
})
res = service.group_service_colors_by_route(
discovery_map['namespaces'][0]['service-colors'],
'n1',
service.create_local_cluster_name,
)
self.assertEqual({}, res)
def test_get_route_matcher_key__defaults(self) -> None:
"""Test get_route_matcher_key, with as many default values as possible."""
res = service.get_route_matcher_key(_mk_route({
'path-match': {
'match-type': 'foo',
'value': '/bar',
},
}))
self.assertEqual(
common.RouteMatcher(
common.RoutePathMatcher('/bar', 'foo', True),
[], [],
),
res,
)
def test_get_route_matcher_key__full(self) -> None:
"""Test get_route_matcher_key, with everything filled in."""
res = service.get_route_matcher_key(_mk_route({
'path-match': {
'match-type': 'foo',
'value': '/bar',
'case-sensitive': False,
},
'headers': [{
'header-name': 'boil',
'match-type': 'tuna', 'case-sensitive': False,
'value': 'melt', 'invert': True,
}],
'query-parameters': [{
'parameter-name': 'curdle',
'match-type': 'marlin', 'case-sensitive': False,
'value': 'hook', 'invert': True,
}],
}))
self.assertEqual(
common.RouteMatcher(
common.RoutePathMatcher('/bar', 'foo', False),
[common.HeaderQueryMatcher('boil', 'tuna', False, 'melt', True)],
[common.HeaderQueryMatcher('curdle', 'marlin', False, 'hook', False)],
),
res,
)
def test_parse_header_query_matcher__allow_ignore__defaults(self) -> None:
"""Test parse_header_query_matcher with as many default values as possible,
allowing ignores."""
res = service.parse_header_query_matcher(
{'name': 'abc', 'match-type': 'tuna'},
'name',
True,
)
self.assertEqual(
common.HeaderQueryMatcher('abc', 'tuna', True, None, False),
res,
)
def test_parse_header_query_matcher__allow_ignore__full(self) -> None:
"""Test parse_header_query_matcher with as many default values as possible,
allowing ignores."""
res = service.parse_header_query_matcher(
{
'name': 'abc', 'match-type': 'tuna', 'case-sensitive': False,
'value': 'melt', 'invert': True,
},
'name',
True,
)
self.assertEqual(
common.HeaderQueryMatcher('abc', 'tuna', False, 'melt', True),
res,
)
def test_parse_header_query_matcher__disallow_ignore__full(self) -> None:
"""Test parse_header_query_matcher with as many default values as possible,
allowing ignores."""
res = service.parse_header_query_matcher(
{
'name': 'abc', 'match-type': 'tuna', 'case-sensitive': False,
'value': 'melt', 'invert': True,
},
'name',
False,
)
self.assertEqual(
common.HeaderQueryMatcher('abc', 'tuna', False, 'melt', False),
res,
)
def test_can_local_namespace_access_route__no_namespace1(self) -> None:
"""Test can_local_namespace_access_route with no matching namespace"""
route = _mk_route({
'default-access': True,
})
res = service.can_local_namespace_access_route('n1', route)
self.assertTrue(res)
def test_can_local_namespace_access_route__no_namespace2(self) -> None:
"""Test can_local_namespace_access_route with no matching namespace"""
route = _mk_route({
'namespace-access': [{'namespace': 'n2', 'access': True}],
'default-access': False,
})
res = service.can_local_namespace_access_route('n1', route)
self.assertFalse(res)
def test_can_local_namespace_access_route__with_namespace(self) -> None:
"""Test can_local_namespace_access_route with no matching namespace"""
route = _mk_route({
'namespace-access': [{'namespace': 'n1', 'access': True}],
'default-access': False,
})
res = service.can_local_namespace_access_route('n1', route)
self.assertTrue(res)
def test_can_local_namespace_access_route__with_namespace_false(self) -> None:
"""Test can_local_namespace_access_route with no matching namespace"""
route = _mk_route({
'namespace-access': [{'namespace': 'n1', 'access': False}],
'default-access': True,
})
res = service.can_local_namespace_access_route('n1', route)
self.assertFalse(res)
def test_find_namespace__none(self) -> None:
"""Test find_namespace with no namespaces"""
discovery_map = _mk_doc({})
res = service.find_namespace('n1', discovery_map)
self.assertIsNone(res)
def test_find_namespace(self) -> None:
"""Test find_namespace"""
namespace = _mk_namespace({
'namespace': 'n1',
})
discovery_map = _mk_doc({
'namespaces': [namespace],
})
res = service.find_namespace('n1', discovery_map)
self.assertEqual(namespace, res)
def test_create_nonlocal_service_cluster_name(self) -> None:
"""Test the create_nonlocal_service_cluster_name function"""
res = service.create_nonlocal_service_cluster_name('n1', 's1', 'c1', 100)
self.assertEqual('remote-n1-s1-c1-100', res)
def test_create_nonlocal_gateway_cluster_name(self) -> None:
"""Test the create_nonlocal_gateway_cluster_name function"""
res = service.create_nonlocal_gateway_cluster_name('n1')
self.assertEqual('remote-n1-gateway', res)
def test_create_local_cluster_name(self) -> None:
"""Test the create_local_cluster_name function"""
res = service.create_local_cluster_name('s1', 'c1', 65535)
self.assertEqual('local-s1-c1-65535', res)
# ---------------------------------------------------------------------------
# discovery-map data construction helpers.
# The main entry, `mk_doc`, performs the validate.
def _mk_doc(defaults: Dict[str, Any]) -> Dict[str, Any]:
ret: Dict[str, Any] = {
'schema-version': 'v1',
'document-version': 'x',
'namespaces': [],
}
ret.update(defaults)
validate_discovery_map(ret)
return ret
def _mk_namespace(defaults: Dict[str, Any]) -> Dict[str, Any]:
ret: Dict[str, Any] = {
'namespace': 'n1',
'network-id': 'nk1',
'gateways': {'instances': [], 'prefer-gateway': False, 'protocol': 'http1.1'},
'service-colors': [],
}
ret.update(defaults)
return ret
def _mk_service_color(defaults: Dict[str, Any]) -> Dict[str, Any]:
ret: Dict[str, Any] = {
'service': 's', 'color': 'c', 'index': 1,
'routes': [], 'namespace-egress': [], 'instances': [],
}
ret.update(defaults)
return ret
def _mk_route(defaults: Dict[str, Any]) -> Dict[str, Any]:
ret: Dict[str, Any] = {
'path-match': {'match-type': 'exact', 'value': '/'},
'weight': 1,
'namespace-access': [],
'default-access': True,
}
ret.update(defaults)
return ret
| 41.21244
| 100
| 0.509485
| 4,146
| 43,067
| 4.949349
| 0.058852
| 0.040936
| 0.030994
| 0.032164
| 0.887524
| 0.862329
| 0.824025
| 0.775828
| 0.717982
| 0.679435
| 0
| 0.020157
| 0.344533
| 43,067
| 1,044
| 101
| 41.251916
| 0.706755
| 0.092507
| 0
| 0.689655
| 0
| 0
| 0.189271
| 0.003978
| 0
| 0
| 0
| 0
| 0.080089
| 1
| 0.070078
| false
| 0
| 0.004449
| 0
| 0.080089
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
aa1295253da1e16ecb33426fcf9edc92633d605d
| 156
|
py
|
Python
|
backend/src/baserow/api/user/errors.py
|
OpenSourceMarketing/Baserow
|
5778059fde594a3b1838f7a964bc5060ab45bbad
|
[
"MIT"
] | null | null | null |
backend/src/baserow/api/user/errors.py
|
OpenSourceMarketing/Baserow
|
5778059fde594a3b1838f7a964bc5060ab45bbad
|
[
"MIT"
] | 7
|
2021-03-19T12:07:13.000Z
|
2022-02-10T14:47:21.000Z
|
backend/src/baserow/api/user/errors.py
|
OpenSourceMarketing/Baserow
|
5778059fde594a3b1838f7a964bc5060ab45bbad
|
[
"MIT"
] | null | null | null |
ERROR_ALREADY_EXISTS = 'ERROR_EMAIL_ALREADY_EXISTS'
ERROR_USER_NOT_FOUND = 'ERROR_USER_NOT_FOUND'
ERROR_INVALID_OLD_PASSWORD = 'ERROR_INVALID_OLD_PASSWORD'
| 39
| 57
| 0.884615
| 23
| 156
| 5.26087
| 0.434783
| 0.214876
| 0.297521
| 0.280992
| 0.322314
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057692
| 156
| 3
| 58
| 52
| 0.823129
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
aa147759ac5486c13ad4ec19d96dcb82ea5ea81a
| 164
|
py
|
Python
|
lightmodels/disparity_expansion_v2/__init__.py
|
cshyundev/LW-PSMNet
|
d80d3b12c55ba30c781a7578a4728a2cd6321866
|
[
"MIT"
] | 1
|
2022-01-22T14:00:27.000Z
|
2022-01-22T14:00:27.000Z
|
lightmodels/disparity_expansion_v2/__init__.py
|
cshyundev/LW-PSMNet
|
d80d3b12c55ba30c781a7578a4728a2cd6321866
|
[
"MIT"
] | null | null | null |
lightmodels/disparity_expansion_v2/__init__.py
|
cshyundev/LW-PSMNet
|
d80d3b12c55ba30c781a7578a4728a2cd6321866
|
[
"MIT"
] | null | null | null |
from .basic import PSMNet as basic
from .stackhourglass import PSMNet as disparity_expansion_v2
# 3D CNN의 채널수는 절반으로 압축하고, cost volume의 disparity 차원수를 1/2로 확장시킨 모델
| 32.8
| 66
| 0.810976
| 27
| 164
| 4.851852
| 0.777778
| 0.183206
| 0.21374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0.158537
| 164
| 4
| 67
| 41
| 0.92029
| 0.390244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a4e6312c5b41e467225f66fefe9bf62a655aa78b
| 27
|
py
|
Python
|
Cyrcos/__init__.py
|
GKing-Dev/Cyrcos
|
c5cf88682f4ac1b681d37694285dfddd30c1d27b
|
[
"MIT"
] | 2
|
2019-06-10T18:00:16.000Z
|
2019-11-02T16:56:52.000Z
|
Cyrcos/__init__.py
|
GKing-Dev/Cyrcos
|
c5cf88682f4ac1b681d37694285dfddd30c1d27b
|
[
"MIT"
] | null | null | null |
Cyrcos/__init__.py
|
GKing-Dev/Cyrcos
|
c5cf88682f4ac1b681d37694285dfddd30c1d27b
|
[
"MIT"
] | null | null | null |
from .Cyrcos import Cyrcos
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a4f84e0bc6ee6dbec2fb08e1263af59044ebbcf0
| 979
|
py
|
Python
|
companymatching/base.py
|
Gawaboumga/CompanyMatching
|
4c07ca8cb60da8df1c8172bf233a1be016a82930
|
[
"MIT"
] | 4
|
2020-04-28T08:20:51.000Z
|
2022-03-07T05:46:43.000Z
|
companymatching/base.py
|
Gawaboumga/CompanyMatching
|
4c07ca8cb60da8df1c8172bf233a1be016a82930
|
[
"MIT"
] | null | null | null |
companymatching/base.py
|
Gawaboumga/CompanyMatching
|
4c07ca8cb60da8df1c8172bf233a1be016a82930
|
[
"MIT"
] | 2
|
2019-11-26T02:21:13.000Z
|
2020-07-21T04:29:48.000Z
|
from .MatchingType import MatchingType
class MatcherMixin:
additional_flag = MatchingType.NoMatch
def match(self, lhs, rhs, original_lhs, original_rhs, **parameters):
new_lhs, new_rhs = self.normalize(lhs, rhs, original_lhs, original_rhs, **parameters)
return self.compare(new_lhs, new_rhs, original_lhs, original_rhs, **parameters), new_lhs, new_rhs
def compare(self, lhs, rhs, original_lhs, original_rhs, **parameters):
return None
def normalize(self, lhs, rhs, original_lhs, original_rhs, **parameters):
return lhs, rhs
class ComparerMixin(MatcherMixin):
def compare(self, lhs, rhs, original_lhs, original_rhs, **parameters):
return self.compare(lhs, rhs, original_lhs, original_rhs, **parameters), lhs, rhs
class NormalizerMixin(MatcherMixin):
def normalize(self, lhs, rhs, original_lhs, original_rhs, **parameters):
return self.normalize(lhs, rhs, original_lhs, original_rhs, **parameters)
| 33.758621
| 105
| 0.724208
| 122
| 979
| 5.606557
| 0.180328
| 0.087719
| 0.184211
| 0.289474
| 0.738304
| 0.738304
| 0.738304
| 0.682749
| 0.672515
| 0.584795
| 0
| 0
| 0.170582
| 979
| 28
| 106
| 34.964286
| 0.842365
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3125
| false
| 0
| 0.0625
| 0.25
| 0.9375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
3547513423c5114473ab3fe5aad60c96eb1b1510
| 89
|
py
|
Python
|
blog/models.py
|
epm0dev/Lens-dev
|
2f34718020ed15ee9a181181e02f62eb3fbadc3b
|
[
"MIT"
] | null | null | null |
blog/models.py
|
epm0dev/Lens-dev
|
2f34718020ed15ee9a181181e02f62eb3fbadc3b
|
[
"MIT"
] | null | null | null |
blog/models.py
|
epm0dev/Lens-dev
|
2f34718020ed15ee9a181181e02f62eb3fbadc3b
|
[
"MIT"
] | null | null | null |
from django.db import models
# TODO Create any models needed for the blog application.
| 17.8
| 57
| 0.786517
| 14
| 89
| 5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179775
| 89
| 4
| 58
| 22.25
| 0.958904
| 0.617978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
103127a3d1b7c85b5b328d8c82188b71aa036bd4
| 244
|
py
|
Python
|
fabfile.py
|
freshlimestudio/djlime
|
69e177d6e26fdf9cb520ac3316548d7d38b9690e
|
[
"BSD-3-Clause"
] | null | null | null |
fabfile.py
|
freshlimestudio/djlime
|
69e177d6e26fdf9cb520ac3316548d7d38b9690e
|
[
"BSD-3-Clause"
] | null | null | null |
fabfile.py
|
freshlimestudio/djlime
|
69e177d6e26fdf9cb520ac3316548d7d38b9690e
|
[
"BSD-3-Clause"
] | null | null | null |
from fabric.api import *
@task
def release():
local('python setup.py sdist upload')
local('python setup.py sdist upload -r lime')
local('python setup.py bdist_wheel upload')
local('python setup.py bdist_wheel upload -r lime')
| 24.4
| 55
| 0.70082
| 37
| 244
| 4.567568
| 0.459459
| 0.260355
| 0.378698
| 0.426036
| 0.745562
| 0.745562
| 0.402367
| 0
| 0
| 0
| 0
| 0
| 0.184426
| 244
| 9
| 56
| 27.111111
| 0.849246
| 0
| 0
| 0
| 0
| 0
| 0.57377
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.142857
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
105c52442e3bbb9c7dcb20c3ebc55a40367ad3a3
| 109
|
py
|
Python
|
sample/sample/constants.py
|
Alzpeta/oarepo-invenio-model
|
851ea01c74a77b728417f0dbfe3ffc4caae3a266
|
[
"MIT"
] | 1
|
2020-06-03T14:44:49.000Z
|
2020-06-03T14:44:49.000Z
|
sample/sample/constants.py
|
Alzpeta/oarepo-invenio-model
|
851ea01c74a77b728417f0dbfe3ffc4caae3a266
|
[
"MIT"
] | 7
|
2020-06-02T14:45:48.000Z
|
2021-11-16T08:38:47.000Z
|
sample/sample/constants.py
|
Alzpeta/oarepo-invenio-model
|
851ea01c74a77b728417f0dbfe3ffc4caae3a266
|
[
"MIT"
] | 5
|
2020-10-24T16:04:25.000Z
|
2020-10-28T10:42:23.000Z
|
SAMPLE_ALLOWED_SCHEMAS = ['sample/sample-v1.0.0.json']
SAMPLE_PREFERRED_SCHEMA = 'sample/sample-v1.0.0.json'
| 36.333333
| 54
| 0.779817
| 18
| 109
| 4.5
| 0.444444
| 0.296296
| 0.345679
| 0.37037
| 0.493827
| 0.493827
| 0
| 0
| 0
| 0
| 0
| 0.058252
| 0.055046
| 109
| 2
| 55
| 54.5
| 0.728155
| 0
| 0
| 0
| 0
| 0
| 0.458716
| 0.458716
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
109768159a9189dda71c222a04a63d6cc92d9b2a
| 156
|
py
|
Python
|
Mundo 1/Desafio/021.py
|
LeonardoJosedaSilveira/Curso-de-python
|
e5b7920ce75a3c7af9b8250e18cabaa173e2478b
|
[
"MIT"
] | null | null | null |
Mundo 1/Desafio/021.py
|
LeonardoJosedaSilveira/Curso-de-python
|
e5b7920ce75a3c7af9b8250e18cabaa173e2478b
|
[
"MIT"
] | null | null | null |
Mundo 1/Desafio/021.py
|
LeonardoJosedaSilveira/Curso-de-python
|
e5b7920ce75a3c7af9b8250e18cabaa173e2478b
|
[
"MIT"
] | null | null | null |
import playsound
# 021.mp3 é o caminho com o nome do arquivo
print('\033[02;31;43mVoce ta ouvindo uma musica legal.\033[m')
playsound.playsound('021.mp3')
| 26
| 62
| 0.75
| 28
| 156
| 4.178571
| 0.785714
| 0.205128
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0.128205
| 156
| 5
| 63
| 31.2
| 0.713235
| 0.262821
| 0
| 0
| 0
| 0
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
52a3ee1d8ad9aa34f9204c8ece781588e3812471
| 230
|
py
|
Python
|
qutip/qip/gates.py
|
camponogaraviera/qutip
|
1b1f6dffcb3ab97f11b8c6114293e09f378d2e8f
|
[
"BSD-3-Clause"
] | 1,205
|
2015-01-02T16:23:42.000Z
|
2022-03-31T03:21:21.000Z
|
qutip/qip/gates.py
|
camponogaraviera/qutip
|
1b1f6dffcb3ab97f11b8c6114293e09f378d2e8f
|
[
"BSD-3-Clause"
] | 1,361
|
2015-01-09T23:38:25.000Z
|
2022-03-31T12:26:07.000Z
|
qutip/qip/gates.py
|
camponogaraviera/qutip
|
1b1f6dffcb3ab97f11b8c6114293e09f378d2e8f
|
[
"BSD-3-Clause"
] | 569
|
2015-01-19T06:15:33.000Z
|
2022-03-28T20:43:39.000Z
|
import warnings
from qutip.qip.operations.gates import *
warnings.warn(
"Importation from qutip.qip.gates is deprecated."
"Please use e.g.\n from qutip.qip.operations import cnot\n",
DeprecationWarning, stacklevel=2)
| 28.75
| 64
| 0.752174
| 32
| 230
| 5.40625
| 0.625
| 0.156069
| 0.208092
| 0.254335
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005102
| 0.147826
| 230
| 7
| 65
| 32.857143
| 0.877551
| 0
| 0
| 0
| 0
| 0
| 0.452174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d82bf5874707166dfb3e385e8310cc7c7bc718f3
| 27
|
py
|
Python
|
freqerica/__init__.py
|
ymtz03/freqerica
|
d79e76181a037da5c11b47f8a4e1bf4387a0468f
|
[
"BSD-2-Clause"
] | 1
|
2020-05-08T15:28:04.000Z
|
2020-05-08T15:28:04.000Z
|
freqerica/__init__.py
|
ymtz03/freqerica
|
d79e76181a037da5c11b47f8a4e1bf4387a0468f
|
[
"BSD-2-Clause"
] | null | null | null |
freqerica/__init__.py
|
ymtz03/freqerica
|
d79e76181a037da5c11b47f8a4e1bf4387a0468f
|
[
"BSD-2-Clause"
] | null | null | null |
from .kernel import kernel
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc1f74fe8a33af483b64c589279c73d4a7ac48a0
| 824
|
py
|
Python
|
tests/test_provider_sethvargo_filesystem.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_sethvargo_filesystem.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_sethvargo_filesystem.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_sethvargo_filesystem.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:16:21 UTC)
def test_provider_import():
import terrascript.provider.sethvargo.filesystem
def test_resource_import():
from terrascript.resource.sethvargo.filesystem import filesystem_file_reader
from terrascript.resource.sethvargo.filesystem import filesystem_file_writer
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.sethvargo.filesystem
#
# t = terrascript.provider.sethvargo.filesystem.filesystem()
# s = str(t)
#
# assert 'https://github.com/sethvargo/terraform-provider-filesystem' in s
# assert '0.2.0' in s
| 30.518519
| 80
| 0.769417
| 105
| 824
| 5.904762
| 0.552381
| 0.183871
| 0.174194
| 0.183871
| 0.341935
| 0.2
| 0.2
| 0.2
| 0
| 0
| 0
| 0.021246
| 0.143204
| 824
| 26
| 81
| 31.692308
| 0.856941
| 0.628641
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 1
| 0.4
| true
| 0
| 1
| 0
| 1.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
dc21f61da53f83d4f1358e87a81569f6a53a0e3a
| 70
|
py
|
Python
|
kinlin/callbacks/__init__.py
|
the-lay/kinlin
|
ce7c95d46d130049e356104ba77fad51bc59fb3f
|
[
"MIT"
] | null | null | null |
kinlin/callbacks/__init__.py
|
the-lay/kinlin
|
ce7c95d46d130049e356104ba77fad51bc59fb3f
|
[
"MIT"
] | null | null | null |
kinlin/callbacks/__init__.py
|
the-lay/kinlin
|
ce7c95d46d130049e356104ba77fad51bc59fb3f
|
[
"MIT"
] | null | null | null |
from .history import TensorboardCallback, ExcelMetricsLogger, History
| 35
| 69
| 0.871429
| 6
| 70
| 10.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 70
| 1
| 70
| 70
| 0.953125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc6ace54a4183b1048f00a8b60478230eb788df7
| 74
|
py
|
Python
|
templates/main.py
|
s1113950/pak
|
d92bd24663addf5a2e197d865c9d198272642346
|
[
"Apache-2.0"
] | null | null | null |
templates/main.py
|
s1113950/pak
|
d92bd24663addf5a2e197d865c9d198272642346
|
[
"Apache-2.0"
] | null | null | null |
templates/main.py
|
s1113950/pak
|
d92bd24663addf5a2e197d865c9d198272642346
|
[
"Apache-2.0"
] | null | null | null |
def main():
print("${app} template has been successfully installed!")
| 24.666667
| 61
| 0.689189
| 9
| 74
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 74
| 2
| 62
| 37
| 0.822581
| 0
| 0
| 0
| 0
| 0
| 0.648649
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
dc7b9ea54a68461de7099e906b304e2945d8af5c
| 173
|
py
|
Python
|
examples/zh-cn/api/blueprints.py
|
yulix/restful-api-contract
|
fe1a9364f295b79dd668d3a82f9ff2c7ff1b6618
|
[
"MIT"
] | null | null | null |
examples/zh-cn/api/blueprints.py
|
yulix/restful-api-contract
|
fe1a9364f295b79dd668d3a82f9ff2c7ff1b6618
|
[
"MIT"
] | null | null | null |
examples/zh-cn/api/blueprints.py
|
yulix/restful-api-contract
|
fe1a9364f295b79dd668d3a82f9ff2c7ff1b6618
|
[
"MIT"
] | null | null | null |
from .. import app
from .captcha import captcha_api
from .sms import sms_api
# Register API buleprint
app.register_blueprint(captcha_api)
app.register_blueprint(sms_api)
| 17.3
| 35
| 0.815029
| 26
| 173
| 5.192308
| 0.346154
| 0.148148
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121387
| 173
| 9
| 36
| 19.222222
| 0.888158
| 0.127168
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0.4
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dcab710d8c17a8ebb18ddd1d9ad4bb2976191f86
| 61
|
py
|
Python
|
vis/__init__.py
|
DoubleTwelve/lamarepy
|
548e6d6514183ab8ceafd1a42097210350a74985
|
[
"BSD-3-Clause"
] | null | null | null |
vis/__init__.py
|
DoubleTwelve/lamarepy
|
548e6d6514183ab8ceafd1a42097210350a74985
|
[
"BSD-3-Clause"
] | null | null | null |
vis/__init__.py
|
DoubleTwelve/lamarepy
|
548e6d6514183ab8ceafd1a42097210350a74985
|
[
"BSD-3-Clause"
] | null | null | null |
from lamarepy import geometry as g
from show_q import show_q
| 20.333333
| 34
| 0.836066
| 12
| 61
| 4.083333
| 0.666667
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163934
| 61
| 2
| 35
| 30.5
| 0.960784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f49e2e811441e31133821b41f86ee1e0983d67c8
| 135
|
py
|
Python
|
tests/bytecode/mp-tests/class3.py
|
LabAixBidouille/micropython
|
11aa6ba456287d6c80598a7ebbebd2887ce8f5a2
|
[
"MIT"
] | 303
|
2015-07-11T17:12:55.000Z
|
2018-01-08T03:02:37.000Z
|
tests/bytecode/mp-tests/class3.py
|
LabAixBidouille/micropython
|
11aa6ba456287d6c80598a7ebbebd2887ce8f5a2
|
[
"MIT"
] | 13
|
2016-05-12T16:51:22.000Z
|
2018-01-10T22:33:25.000Z
|
tests/bytecode/mp-tests/class3.py
|
LabAixBidouille/micropython
|
11aa6ba456287d6c80598a7ebbebd2887ce8f5a2
|
[
"MIT"
] | 26
|
2018-01-18T09:15:33.000Z
|
2022-02-07T13:09:14.000Z
|
class A:
def f(x):
return x
def g(y):
def h(z):
return x + y + z
h(y)
A()
A.f(1)
A.g(2)(3)
| 12.272727
| 28
| 0.348148
| 27
| 135
| 1.740741
| 0.481481
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042254
| 0.474074
| 135
| 10
| 29
| 13.5
| 0.619718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0
| 0.2
| 0.6
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f4e02bc68d65eea60dea9f52f6d4cb3d1dde8909
| 229
|
py
|
Python
|
ghostipy/utils.py
|
kemerelab/ghostipy
|
e931e7553409e999c168074365a7700c8ff83171
|
[
"Apache-2.0"
] | 9
|
2021-07-28T09:29:55.000Z
|
2022-03-17T16:17:22.000Z
|
ghostipy/utils.py
|
kemerelab/ghostipy
|
e931e7553409e999c168074365a7700c8ff83171
|
[
"Apache-2.0"
] | 5
|
2021-07-20T01:00:38.000Z
|
2022-01-27T00:06:17.000Z
|
ghostipy/utils.py
|
kemerelab/ghostipy
|
e931e7553409e999c168074365a7700c8ff83171
|
[
"Apache-2.0"
] | 1
|
2022-02-04T22:59:52.000Z
|
2022-02-04T22:59:52.000Z
|
import numpy as np
__all__ = ['hz_to_normalized_rad',
'normalized_rad_to_hz']
def hz_to_normalized_rad(freqs, fs):
return freqs / fs * 2 * np.pi
def normalized_rad_to_hz(rad, fs):
return rad / np.pi * fs / 2
| 22.9
| 36
| 0.672489
| 39
| 229
| 3.538462
| 0.384615
| 0.376812
| 0.202899
| 0.246377
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.222707
| 229
| 10
| 37
| 22.9
| 0.764045
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
52152c7be406ee65ec8d704224a66ce6ff41d7b8
| 63
|
py
|
Python
|
todb/__init__.py
|
emkor/todb
|
40a492ea6dc6181fbb3861072ebf512d2a633f04
|
[
"MIT"
] | null | null | null |
todb/__init__.py
|
emkor/todb
|
40a492ea6dc6181fbb3861072ebf512d2a633f04
|
[
"MIT"
] | null | null | null |
todb/__init__.py
|
emkor/todb
|
40a492ea6dc6181fbb3861072ebf512d2a633f04
|
[
"MIT"
] | null | null | null |
from todb.main import todb
from todb.params import InputParams
| 21
| 35
| 0.84127
| 10
| 63
| 5.3
| 0.6
| 0.301887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 36
| 31.5
| 0.963636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5243af0eb677f1c2bf0cf313e87c8ba323328266
| 29
|
py
|
Python
|
usr_dir/__init__.py
|
thomasehuang/tensor2tensor
|
33f3ca44b8e1aa1f1149190f8d2cd1aa81032461
|
[
"Apache-2.0"
] | null | null | null |
usr_dir/__init__.py
|
thomasehuang/tensor2tensor
|
33f3ca44b8e1aa1f1149190f8d2cd1aa81032461
|
[
"Apache-2.0"
] | null | null | null |
usr_dir/__init__.py
|
thomasehuang/tensor2tensor
|
33f3ca44b8e1aa1f1149190f8d2cd1aa81032461
|
[
"Apache-2.0"
] | null | null | null |
from . import data_generators
| 29
| 29
| 0.862069
| 4
| 29
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5263e5bccc39673dd776a344a77421379f771015
| 47
|
py
|
Python
|
LayerHessians/hessian/__init__.py
|
yashkhasbage25/HTR
|
192718f15fafc283d31c22c75fd5e75b31e4db91
|
[
"MIT"
] | null | null | null |
LayerHessians/hessian/__init__.py
|
yashkhasbage25/HTR
|
192718f15fafc283d31c22c75fd5e75b31e4db91
|
[
"MIT"
] | null | null | null |
LayerHessians/hessian/__init__.py
|
yashkhasbage25/HTR
|
192718f15fafc283d31c22c75fd5e75b31e4db91
|
[
"MIT"
] | null | null | null |
from .hessian import FullHessian, LayerHessian
| 23.5
| 46
| 0.851064
| 5
| 47
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 47
| 1
| 47
| 47
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bfdb1b309d123665d3da0d69d0b7f0128185f801
| 238
|
py
|
Python
|
pgquery_old/__init__.py
|
deknowny/pgquery
|
e8f9093f05fc0825886ba443cd648b5653040088
|
[
"MIT"
] | 2
|
2021-10-01T21:14:23.000Z
|
2022-01-29T19:51:55.000Z
|
pgquery_old/__init__.py
|
deknowny/genorm
|
e8f9093f05fc0825886ba443cd648b5653040088
|
[
"MIT"
] | null | null | null |
pgquery_old/__init__.py
|
deknowny/genorm
|
e8f9093f05fc0825886ba443cd648b5653040088
|
[
"MIT"
] | null | null | null |
__version__ = "0.1.0a0"
from pgquery.builder.actor import BuildingActor
from pgquery.builder.impl.column import Integer, Serial, Text, Varchar
from pgquery.builder.impl.literal import literal
from pgquery.builder.impl.table import Table
| 34
| 70
| 0.823529
| 34
| 238
| 5.647059
| 0.529412
| 0.229167
| 0.375
| 0.34375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018605
| 0.096639
| 238
| 6
| 71
| 39.666667
| 0.874419
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bfe4a2e97c3bc96909b791cbfd51fac7ecf0bdf7
| 154
|
py
|
Python
|
jax_models/__init__.py
|
DarshanDeshpande/jax-models
|
ae5750540f142572ff7f276b927a9cdb5195fd23
|
[
"Apache-2.0"
] | 61
|
2022-01-08T19:06:48.000Z
|
2022-03-28T07:56:19.000Z
|
jax_models/__init__.py
|
DarshanDeshpande/jax-models
|
ae5750540f142572ff7f276b927a9cdb5195fd23
|
[
"Apache-2.0"
] | 1
|
2022-02-27T01:15:57.000Z
|
2022-02-28T13:31:50.000Z
|
jax_models/__init__.py
|
DarshanDeshpande/jax-models
|
ae5750540f142572ff7f276b927a9cdb5195fd23
|
[
"Apache-2.0"
] | 2
|
2022-01-09T10:01:49.000Z
|
2022-02-03T23:19:24.000Z
|
from . import activations
from . import initializers
from . import layers
from . import models
from .models.model_registry import list_models, load_model
| 25.666667
| 58
| 0.818182
| 21
| 154
| 5.857143
| 0.47619
| 0.325203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 154
| 5
| 59
| 30.8
| 0.924812
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8717fa6c5c9e817c4ca717c8e22ee978ed977b16
| 23
|
py
|
Python
|
test/test.py
|
dhiabenfraj/Qget
|
27f17ee6559e492e74533ed64b73488c353cf269
|
[
"MIT"
] | 1
|
2021-07-07T21:58:26.000Z
|
2021-07-07T21:58:26.000Z
|
test/test.py
|
dhiabenfraj/Qget
|
27f17ee6559e492e74533ed64b73488c353cf269
|
[
"MIT"
] | null | null | null |
test/test.py
|
dhiabenfraj/Qget
|
27f17ee6559e492e74533ed64b73488c353cf269
|
[
"MIT"
] | null | null | null |
from ..core import test
| 23
| 23
| 0.782609
| 4
| 23
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8719938f2c021c751d1427c2cfe910776b37f843
| 410
|
py
|
Python
|
tests/test_types.py
|
biosustain/gnomic
|
a6e6bbfdd6b42e888a3d1c361847ae7bb87c766e
|
[
"Apache-2.0"
] | 9
|
2015-07-13T14:15:11.000Z
|
2020-11-20T18:42:08.000Z
|
tests/test_types.py
|
biosustain/gnomic
|
a6e6bbfdd6b42e888a3d1c361847ae7bb87c766e
|
[
"Apache-2.0"
] | 33
|
2015-06-19T08:47:19.000Z
|
2017-09-04T11:30:39.000Z
|
tests/test_types.py
|
biosustain/gnomic
|
a6e6bbfdd6b42e888a3d1c361847ae7bb87c766e
|
[
"Apache-2.0"
] | 4
|
2015-10-15T19:10:54.000Z
|
2020-01-22T09:53:18.000Z
|
from gnomic.types import Feature as F, Fusion
def test_fusion_contains():
assert Fusion(F('a'), F('b'), F('c'), F('d')).contains(Fusion(F('b'), F('c'))) is True
assert Fusion(F('a'), F('b'), F('c'), F('d')).contains(Fusion(F('a'), F('c'))) is False
assert Fusion(F('a'), F('b'), F('c'), F('d')).contains(F('a')) is True
assert Fusion(F('a'), F('b'), F('c'), F('d')).contains(F('x')) is False
| 45.555556
| 91
| 0.541463
| 78
| 410
| 2.820513
| 0.25641
| 0.190909
| 0.181818
| 0.204545
| 0.636364
| 0.636364
| 0.636364
| 0.636364
| 0.636364
| 0.636364
| 0
| 0
| 0.139024
| 410
| 8
| 92
| 51.25
| 0.623229
| 0
| 0
| 0
| 0
| 0
| 0.053659
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.166667
| true
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
872c35818533460a952cb1a53fa2609e9d3585f0
| 26
|
py
|
Python
|
trtools/charting/api.py
|
dalejung/trtools
|
39db1d72269f43e7ba380da5ad28d565137089ed
|
[
"MIT"
] | 3
|
2015-01-13T01:03:22.000Z
|
2016-04-20T03:27:11.000Z
|
trtools/charting/api.py
|
dalejung/trtools
|
39db1d72269f43e7ba380da5ad28d565137089ed
|
[
"MIT"
] | null | null | null |
trtools/charting/api.py
|
dalejung/trtools
|
39db1d72269f43e7ba380da5ad28d565137089ed
|
[
"MIT"
] | 1
|
2019-10-16T19:13:47.000Z
|
2019-10-16T19:13:47.000Z
|
from ts_charting import *
| 13
| 25
| 0.807692
| 4
| 26
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
875952b9744afa889de0c9dbfe04a7449fa2ee22
| 35
|
py
|
Python
|
taboo/defender/__init__.py
|
haoxizhong/taboo
|
62ff4bd172dcdd9ea7be68b6c63a639dcbf2b49d
|
[
"MIT"
] | null | null | null |
taboo/defender/__init__.py
|
haoxizhong/taboo
|
62ff4bd172dcdd9ea7be68b6c63a639dcbf2b49d
|
[
"MIT"
] | null | null | null |
taboo/defender/__init__.py
|
haoxizhong/taboo
|
62ff4bd172dcdd9ea7be68b6c63a639dcbf2b49d
|
[
"MIT"
] | null | null | null |
from .defender import TabooDefender
| 35
| 35
| 0.885714
| 4
| 35
| 7.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
875f48a8750a8948ca911f41ca81eee06ebbf4f4
| 43
|
py
|
Python
|
doctr/models/artefacts/__init__.py
|
mzeidhassan/doctr
|
14b376e07d31b09b6bd31bceebf6ffb477c30f08
|
[
"Apache-2.0"
] | 628
|
2021-02-13T21:49:37.000Z
|
2022-03-31T19:48:57.000Z
|
doctr/models/artefacts/__init__.py
|
mzeidhassan/doctr
|
14b376e07d31b09b6bd31bceebf6ffb477c30f08
|
[
"Apache-2.0"
] | 694
|
2021-02-08T15:23:38.000Z
|
2022-03-31T07:24:59.000Z
|
doctr/models/artefacts/__init__.py
|
mzeidhassan/doctr
|
14b376e07d31b09b6bd31bceebf6ffb477c30f08
|
[
"Apache-2.0"
] | 90
|
2021-04-28T05:39:02.000Z
|
2022-03-31T06:48:36.000Z
|
from .barcode import *
from .face import *
| 14.333333
| 22
| 0.72093
| 6
| 43
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 23
| 21.5
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8774d2ff578357700ec356c81bc564547e7db736
| 29
|
py
|
Python
|
app/modules/__init__.py
|
janlycn/flask-restful-demo
|
905301fe62cdbb17ddccc80aca0d9d16ec43dc64
|
[
"Apache-2.0"
] | 1
|
2018-07-20T03:37:40.000Z
|
2018-07-20T03:37:40.000Z
|
app/modules/__init__.py
|
janlycn/flask-restful-demo
|
905301fe62cdbb17ddccc80aca0d9d16ec43dc64
|
[
"Apache-2.0"
] | null | null | null |
app/modules/__init__.py
|
janlycn/flask-restful-demo
|
905301fe62cdbb17ddccc80aca0d9d16ec43dc64
|
[
"Apache-2.0"
] | null | null | null |
from lib.extensions import db
| 29
| 29
| 0.862069
| 5
| 29
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5e4245b835f369ca5ba78f02ae78ba1152a31da4
| 7,430
|
py
|
Python
|
hyperion/model/tests/test_amr_checks.py
|
christopherlovell/hyperion
|
f65c253abf0bdf174a9302666bc2fec57f7ae7da
|
[
"BSD-2-Clause"
] | 37
|
2015-01-29T20:58:04.000Z
|
2022-03-10T23:36:39.000Z
|
hyperion/model/tests/test_amr_checks.py
|
christopherlovell/hyperion
|
f65c253abf0bdf174a9302666bc2fec57f7ae7da
|
[
"BSD-2-Clause"
] | 83
|
2015-01-07T11:04:08.000Z
|
2022-02-16T16:26:33.000Z
|
hyperion/model/tests/test_amr_checks.py
|
christopherlovell/hyperion
|
f65c253abf0bdf174a9302666bc2fec57f7ae7da
|
[
"BSD-2-Clause"
] | 17
|
2015-04-21T13:17:41.000Z
|
2021-12-06T02:42:20.000Z
|
from distutils.version import LooseVersion
import pytest
import numpy as np
from .. import Model, ModelOutput
from .test_helpers import random_id, get_test_dust
from ...grid import AMRGrid
try:
import yt
except:
YT_VERSION = None
else:
if LooseVersion(yt.__version__) >= LooseVersion('3'):
YT_VERSION = 3
else:
YT_VERSION = 2
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_differing_widths(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
grid2 = level1.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -10.
grid2.xmax = grid2.ymax = grid2.zmax = +10.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -10.1)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Grids 1 and 2 in level 1 have differing cell widths in the %s \n direction ( 5.0000E+00 and 5.0250E+00 respectively)' % direction) in open(log_file).read()
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_misaligned_grids_same_level(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
grid2 = level1.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -10.
grid2.xmax = grid2.ymax = grid2.zmax = +10.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -10.1)
setattr(grid2, direction + 'max', 9.9)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Grids 1 and 2 in level 1 have edges that are not separated by \n an integer number of cells in the %s direction' % direction) in open(log_file).read()
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_non_integer_refinement(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
level2 = amr.add_level()
grid2 = level2.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -5.
grid2.xmax = grid2.ymax = grid2.zmax = +5.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -6.)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Refinement factor in the %s direction between level 1 and \n level 2 is not an integer (1.818)' % direction) in open(log_file).read()
@pytest.mark.parametrize(('direction'), ['x', 'y', 'z'])
def test_amr_not_aligned_across_levels(tmpdir, direction):
# Widths of grids in same level are not the same
dust = get_test_dust()
amr = AMRGrid()
level1 = amr.add_level()
grid1 = level1.add_grid()
grid1.nx = grid1.ny = grid1.nz = 4
grid1.xmin = grid1.ymin = grid1.zmin = -10.
grid1.xmax = grid1.ymax = grid1.zmax = +10.
grid1.quantities['density'] = np.ones(grid1.shape) * 1.e-10
level2 = amr.add_level()
grid2 = level2.add_grid()
grid2.nx = grid2.ny = grid2.nz = 4
grid2.xmin = grid2.ymin = grid2.zmin = -5.
grid2.xmax = grid2.ymax = grid2.zmax = +5.
grid2.quantities['density'] = np.ones(grid2.shape) * 1.e-10
setattr(grid2, direction + 'min', -6.)
setattr(grid2, direction + 'max', 4.)
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], dust)
m.set_n_photons(initial=1, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
log_file = tmpdir.join(random_id()).strpath
with pytest.raises(SystemExit) as exc:
m.run(tmpdir.join(random_id()).strpath, logfile=log_file)
assert exc.value.args[0] == 'An error occurred, and the run did not ' + \
'complete'
assert ('Grid 1 in level 2 is not aligned with cells in level 1 in the \n %s direction' % direction) in open(log_file).read()
@pytest.mark.skipif("YT_VERSION is None")
def test_shadowing_regression(tmpdir):
from ...grid.tests.yt_compat import get_frb
# Regression test for a bug that caused photons escaping from some grids to
# be terminated.
amr = AMRGrid()
level = amr.add_level()
grid = level.add_grid()
grid.xmin, grid.xmax = -1, 1
grid.ymin, grid.ymax = -1, 1
grid.zmin, grid.zmax = -1, 1
grid.nx, grid.ny, grid.nz = 8, 8, 8
grid.quantities['density'] = np.ones((grid.nz, grid.ny, grid.nx))
level = amr.add_level()
grid = level.add_grid()
grid.xmin, grid.xmax = 0.5, 1
grid.ymin, grid.ymax = -0.5, 0.5
grid.zmin, grid.zmax = -0.5, 0.5
grid.nx, grid.ny, grid.nz = 4, 8, 8
grid.quantities['density'] = np.ones((grid.nz, grid.ny, grid.nx))
m = Model()
m.set_grid(amr)
m.add_density_grid(amr['density'], get_test_dust())
s = m.add_point_source()
s.luminosity = 100
s.temperature = 10000
s.position = (0.0001, 0.0001, 0.0001)
m.set_n_photons(initial=1e5, imaging=0)
m.write(tmpdir.join(random_id()).strpath)
mo = m.run(tmpdir.join(random_id()).strpath)
from yt.mods import SlicePlot
g = mo.get_quantities()
pf = g.to_yt()
prj = SlicePlot(pf, 'y', ['density', 'temperature'],
center=[0.0, 0.0, 0.0])
# With bug, value was lower because there were shadowed regions
assert 12. < get_frb(prj, 'temperature').min() < 13.
| 31.218487
| 179
| 0.630148
| 1,119
| 7,430
| 4.079535
| 0.150134
| 0.026287
| 0.049069
| 0.055203
| 0.762979
| 0.747426
| 0.73954
| 0.733187
| 0.733187
| 0.724644
| 0
| 0.05033
| 0.224495
| 7,430
| 237
| 180
| 31.350211
| 0.74193
| 0.045491
| 0
| 0.708075
| 0
| 0.018634
| 0.119565
| 0
| 0
| 0
| 0
| 0
| 0.055901
| 1
| 0.031056
| false
| 0
| 0.055901
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5ec010616918b5f9eb7a19f2b69feb4ddf284575
| 59
|
py
|
Python
|
sklearn_gbm_ots/__init__.py
|
aradnaev/sklearn_gbm_ots
|
ed5d2afc02d8d3e8335c891770eb73b09b49c0ce
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
sklearn_gbm_ots/__init__.py
|
aradnaev/sklearn_gbm_ots
|
ed5d2afc02d8d3e8335c891770eb73b09b49c0ce
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
sklearn_gbm_ots/__init__.py
|
aradnaev/sklearn_gbm_ots
|
ed5d2afc02d8d3e8335c891770eb73b09b49c0ce
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
from sklearn_gbm_ots.sklearn_gbm_wrapper import GBMwrapper
| 29.5
| 58
| 0.915254
| 9
| 59
| 5.555556
| 0.777778
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 1
| 59
| 59
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5ecbaa5972609aea0a8fb3c3542ad5f8dcd731c5
| 30
|
py
|
Python
|
jk.py
|
validio-web/JavaPrograms
|
ff05e10135e5d5a527f70f67af683f5f99cd7bd6
|
[
"MIT"
] | null | null | null |
jk.py
|
validio-web/JavaPrograms
|
ff05e10135e5d5a527f70f67af683f5f99cd7bd6
|
[
"MIT"
] | null | null | null |
jk.py
|
validio-web/JavaPrograms
|
ff05e10135e5d5a527f70f67af683f5f99cd7bd6
|
[
"MIT"
] | null | null | null |
print "welcome to the jungle"
| 15
| 29
| 0.766667
| 5
| 30
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 1
| 30
| 30
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
0d6c6a31ee2dcded3d58deceaa07ea08c9e07ee6
| 11,116
|
py
|
Python
|
finicityapi/controllers/consumer_controller.py
|
monarchmoney/finicity-python
|
b2ab1ded435db75c78d42261f5e4acd2a3061487
|
[
"MIT"
] | null | null | null |
finicityapi/controllers/consumer_controller.py
|
monarchmoney/finicity-python
|
b2ab1ded435db75c78d42261f5e4acd2a3061487
|
[
"MIT"
] | null | null | null |
finicityapi/controllers/consumer_controller.py
|
monarchmoney/finicity-python
|
b2ab1ded435db75c78d42261f5e4acd2a3061487
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from finicityapi.api_helper import APIHelper
from finicityapi.configuration import Configuration
from finicityapi.controllers.base_controller import BaseController
from finicityapi.http.auth.custom_header_auth import CustomHeaderAuth
from finicityapi.models.consumer import Consumer
from finicityapi.models.create_consumer_response import CreateConsumerResponse
from finicityapi.exceptions.error_1_error_exception import Error1ErrorException
class ConsumerController(BaseController):
"""A Controller to access Endpoints in the finicityapi API."""
def get_consumer_for_customer(self,
customer_id,
accept,
content_type):
"""Does a GET request to /decisioning/v1/customers/{customerId}/consumer.
Get the details of a consumer record.
If the service is successful, HTTP 200 (Accepted) will be returned. If
the customer does not exist, the service will return HTTP 404 (Not
Found)
Args:
customer_id (long|int): Finicity’s ID of the customer
accept (string): Replace 'json' with 'xml' if preferred
content_type (string): Replace 'json' with 'xml' if preferred
Returns:
Consumer: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(customer_id=customer_id,
accept=accept,
content_type=content_type)
# Prepare query URL
_url_path = '/decisioning/v1/customers/{customerId}/consumer'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customerId': customer_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'Finicity-App-Key': Configuration.finicity_app_key,
'Accept': accept,
'Content-Type': content_type
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
CustomHeaderAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 404:
raise Error1ErrorException('Bad Request', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, Consumer.from_dictionary)
def create_consumer(self,
customer_id,
body,
accept,
content_type):
"""Does a POST request to /decisioning/v1/customers/{customerId}/consumer.
Create a consumer record associated with the given customer. A
consumer persists as the owner of any reports that are generated, even
after the original customer is deleted from the system. A consumer
must be created for the given customer before calling any of the
Generate Report services.
If a consumer already exists for this customer, this service will
return HTTP 409 (Conflict). If the consumer is successfully created,
the service will return HTTP 201 (Created).
Args:
customer_id (long|int): Finicity’s ID for the customer
body (CreateConsumerRequest): TODO: type description here.
Example:
accept (string): Replace 'json' with 'xml' if preferred
content_type (string): Replace 'json' with 'xml' if preferred
Returns:
CreateConsumerResponse: Response from the API. Created
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(customer_id=customer_id,
body=body,
accept=accept,
content_type=content_type)
# Prepare query URL
_url_path = '/decisioning/v1/customers/{customerId}/consumer'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'customerId': customer_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'Finicity-App-Key': Configuration.finicity_app_key,
'Accept': accept,
'Content-Type': content_type
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
CustomHeaderAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 404:
raise Error1ErrorException('Bad Request', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, CreateConsumerResponse.from_dictionary)
def get_consumer(self,
consumer_id,
accept,
content_type):
"""Does a GET request to /decisioning/v1/consumers/{consumerId}.
Get the details of a consumer record. If the service successfully
retrieves the consumer record, HTTP 200 will be returned. If the
consumer does not exist, the service will return HTTP 404.
Args:
consumer_id (string): Finicity’s ID of the consumer (UUID with max
length 32 characters)
accept (string): Replace 'json' with 'xml' if preferred
content_type (string): Replace 'json' with 'xml' if preferred
Returns:
Consumer: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(consumer_id=consumer_id,
accept=accept,
content_type=content_type)
# Prepare query URL
_url_path = '/decisioning/v1/consumers/{consumerId}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'consumerId': consumer_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'Finicity-App-Key': Configuration.finicity_app_key,
'Accept': accept,
'Content-Type': content_type
}
# Prepare and execute request
_request = self.http_client.get(_query_url, headers=_headers)
CustomHeaderAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 404:
raise Error1ErrorException('Bad Request', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, Consumer.from_dictionary)
def modify_consumer(self,
consumer_id,
body,
accept,
content_type):
"""Does a PUT request to /decisioning/v1/consumers/{consumerId}.
Modify the details for an existing consumer. All fields are required
for a consumer record, but individual fields for this call are
optional because fields that are not specified will be left
unchanged.
If the service is successful, HTTP 204 (No Content) will be returned.
If the consumer does not exist, the service will return HTTP 404.
Args:
consumer_id (string): Finicity ID of the consumer (UUID with max
length 32 characters)
body (ModifyConsumerRequest): Consumer details
accept (string): Replace 'json' with 'xml' if preferred
content_type (string): Replace 'json' with 'xml' if preferred
Returns:
void: Response from the API. No Content
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Validate required parameters
self.validate_parameters(consumer_id=consumer_id,
body=body,
accept=accept,
content_type=content_type)
# Prepare query URL
_url_path = '/decisioning/v1/consumers/{consumerId}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'consumerId': consumer_id
})
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'Finicity-App-Key': Configuration.finicity_app_key,
'Accept': accept,
'Content-Type': content_type
}
# Prepare and execute request
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
CustomHeaderAuth.apply(_request)
_context = self.execute_request(_request)
# Endpoint and global error handling using HTTP status codes.
if _context.response.status_code == 404:
raise Error1ErrorException('Bad Request', _context)
self.validate_response(_context)
| 41.17037
| 114
| 0.611011
| 1,159
| 11,116
| 5.659189
| 0.163934
| 0.04025
| 0.031102
| 0.025614
| 0.778015
| 0.770544
| 0.754688
| 0.734563
| 0.724806
| 0.71886
| 0
| 0.007377
| 0.329255
| 11,116
| 269
| 115
| 41.32342
| 0.872318
| 0.392497
| 0
| 0.834783
| 0
| 0
| 0.066838
| 0.029135
| 0
| 0
| 0
| 0.003717
| 0
| 1
| 0.034783
| false
| 0
| 0.06087
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0d9cc7612f2e00ee4ab1d28f9946e89d43ce1f78
| 32
|
py
|
Python
|
Unit 3: Structured Types/Lecture_5_Tuples_and_lists/Exercise: apply to each.py
|
Hongbin-Ze/MITx-6.00.1
|
9443e326a9893f7cdb3b74e06025d34b3e1e9fdc
|
[
"MIT"
] | null | null | null |
Unit 3: Structured Types/Lecture_5_Tuples_and_lists/Exercise: apply to each.py
|
Hongbin-Ze/MITx-6.00.1
|
9443e326a9893f7cdb3b74e06025d34b3e1e9fdc
|
[
"MIT"
] | null | null | null |
Unit 3: Structured Types/Lecture_5_Tuples_and_lists/Exercise: apply to each.py
|
Hongbin-Ze/MITx-6.00.1
|
9443e326a9893f7cdb3b74e06025d34b3e1e9fdc
|
[
"MIT"
] | null | null | null |
def aFunc(a):
return abs(a)
| 10.666667
| 17
| 0.59375
| 6
| 32
| 3.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 32
| 2
| 18
| 16
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
0db8e367adf74db3975b54efc018e7078d8335d1
| 21,311
|
py
|
Python
|
src/systemtest/op-monitoring/xrd-opmon-tests/testcases/test_simple_store_and_query.py
|
digitaliceland/X-Road
|
3f4ec6ebd09681629d375a43edfa2f7ea0500ed2
|
[
"MIT"
] | 436
|
2018-06-02T12:42:25.000Z
|
2022-03-31T13:51:16.000Z
|
src/systemtest/op-monitoring/xrd-opmon-tests/testcases/test_simple_store_and_query.py
|
digitaliceland/X-Road
|
3f4ec6ebd09681629d375a43edfa2f7ea0500ed2
|
[
"MIT"
] | 355
|
2018-06-28T13:59:46.000Z
|
2022-03-09T12:47:14.000Z
|
src/systemtest/op-monitoring/xrd-opmon-tests/testcases/test_simple_store_and_query.py
|
digitaliceland/X-Road
|
3f4ec6ebd09681629d375a43edfa2f7ea0500ed2
|
[
"MIT"
] | 155
|
2018-06-24T17:58:23.000Z
|
2022-03-22T01:25:14.000Z
|
#!/usr/bin/env python3
# The MIT License
# Copyright (c) 2016 Estonian Information System Authority (RIA), Population Register Centre (VRK)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Test case for verifying that the operational monitoring related data
# of a simple correct X-Road request are stored by the operational
# monitoring daemon and can be queried.
import os
import time
import common
# Base sizes of request and responses.
# Parameters sizes must be added to these values.
SIMPLE_QUERY_REQUEST_SOAP_BASE_SIZE = 1461
SIMPLE_QUERY_RESPONSE_SOAP_BASE_SIZE = 1503
QUERY_DATA_CLIENT_REQUEST_SOAP_BASE_SIZE = 1696
QUERY_DATA_CLIENT_RESPONSE_SOAP_BASE_SIZE = 1766
QUERY_DATA_CLIENT_RESPONSE_MIME_BASE_SIZE = 2207
QUERY_DATA_PRODUCER_REQUEST_SOAP_BASE_SIZE = 1681
QUERY_DATA_PRODUCER_RESPONSE_SOAP_BASE_SIZE = 1751
QUERY_DATA_PRODUCER_RESPONSE_MIME_BASE_SIZE = 2194
def _simple_query_request_parameters_size(query_parameters):
# Request template: simple_xroad_query_template.xml
return (
len(query_parameters["producer_instance"])
+ len(query_parameters["producer_class"])
+ len(query_parameters["producer_code"])
+ len(query_parameters["producer_system"])
+ len(query_parameters["client_instance"])
+ len(query_parameters["client_class"])
+ len(query_parameters["client_code"])
+ len(query_parameters["client_system"])
)
def _expected_keys_and_values_of_simple_query_rec(
xroad_message_id, security_server_type, query_parameters):
request_parameters_size = _simple_query_request_parameters_size(query_parameters)
print("Size of simple query request parameters: {}".format(request_parameters_size))
return [
("clientMemberClass", query_parameters["client_class"]),
("clientMemberCode", query_parameters["client_code"]),
("clientSecurityServerAddress", query_parameters["client_server_address"]),
("clientSubsystemCode", query_parameters["client_system"]),
("clientXRoadInstance", query_parameters["client_instance"]),
("messageId", xroad_message_id),
("messageIssue", "453465"),
("messageProtocolVersion", "4.0"),
("messageUserId", "EE12345678901"),
("representedPartyClass", "COM"),
("representedPartyCode", "UNKNOWN_MEMBER"),
("requestAttachmentCount", 0),
("requestSize", SIMPLE_QUERY_REQUEST_SOAP_BASE_SIZE + request_parameters_size),
("responseAttachmentCount", 0),
("responseSize", SIMPLE_QUERY_RESPONSE_SOAP_BASE_SIZE + request_parameters_size),
("securityServerType", security_server_type),
("serviceCode", "mock"),
("serviceMemberClass", query_parameters["producer_class"]),
("serviceMemberCode", query_parameters["producer_code"]),
("serviceSecurityServerAddress", query_parameters["producer_server_address"]),
("serviceSubsystemCode", query_parameters["producer_system"]),
("serviceVersion", "v1"),
("serviceXRoadInstance", query_parameters["producer_instance"]),
("succeeded", True),
]
def _query_data_client_request_parameters_size(query_parameters):
# Request template: query_operational_data_client_template.xml
return (
3 * len(query_parameters["client_instance"])
+ 3 * len(query_parameters["client_class"])
+ 3 * len(query_parameters["client_code"])
+ len(query_parameters["client_system"])
+ len(query_parameters["client_server_code"])
)
def _expected_keys_and_values_of_query_data_client_rec(
xroad_message_id, security_server_type, query_parameters):
request_parameters_size = _query_data_client_request_parameters_size(query_parameters)
print("Size of query data client request parameters: {}".format(request_parameters_size))
return [
("clientMemberClass", query_parameters["client_class"]),
("clientMemberCode", query_parameters["client_code"]),
("clientSecurityServerAddress", query_parameters["client_server_address"]),
("clientSubsystemCode", query_parameters["client_system"]),
("clientXRoadInstance", query_parameters["client_instance"]),
("messageId", xroad_message_id),
("messageProtocolVersion", "4.0"),
("requestAttachmentCount", 0),
("requestSize", QUERY_DATA_CLIENT_REQUEST_SOAP_BASE_SIZE + request_parameters_size),
("responseAttachmentCount", 1),
("securityServerType", security_server_type),
("serviceCode", "getSecurityServerOperationalData"),
("serviceMemberClass", query_parameters["client_class"]),
("serviceMemberCode", query_parameters["client_code"]),
("serviceSecurityServerAddress", query_parameters["client_server_address"]),
("serviceVersion", "красивая родина"),
("serviceXRoadInstance", query_parameters["client_instance"]),
("succeeded", True),
]
def _query_data_producer_request_parameters_size(query_parameters):
# Request template: query_operational_data_producer_template.xml
return (
3 * len(query_parameters["producer_instance"])
+ 3 * len(query_parameters["producer_class"])
+ 3 * len(query_parameters["producer_code"])
+ len(query_parameters["producer_system"])
+ len(query_parameters["producer_server_code"])
)
def _expected_keys_and_values_of_query_data_producer_rec(
xroad_message_id, security_server_type, query_parameters):
# Request template: query_operational_data_producer_template.xml
request_parameters_size = _query_data_producer_request_parameters_size(query_parameters)
print("Size of query data producer request parameters: {}".format(request_parameters_size))
return [
("clientMemberClass", query_parameters["producer_class"]),
("clientMemberCode", query_parameters["producer_code"]),
("clientSecurityServerAddress", query_parameters["producer_server_address"]),
("clientSubsystemCode", query_parameters["producer_system"]),
("clientXRoadInstance", query_parameters["producer_instance"]),
("messageId", xroad_message_id),
("messageProtocolVersion", "4.0"),
("requestAttachmentCount", 0),
("requestSize", QUERY_DATA_PRODUCER_REQUEST_SOAP_BASE_SIZE + request_parameters_size),
("responseAttachmentCount", 1),
("securityServerType", security_server_type),
("serviceCode", "getSecurityServerOperationalData"),
("serviceMemberClass", query_parameters["producer_class"]),
("serviceMemberCode", query_parameters["producer_code"]),
("serviceSecurityServerAddress", query_parameters["producer_server_address"]),
("serviceVersion", "[Hüsker Dü?]"),
("serviceXRoadInstance", query_parameters["producer_instance"]),
("succeeded", True),
]
def run(request_template_dir, query_parameters):
client_security_server_address = query_parameters["client_server_ip"]
producer_security_server_address = query_parameters["producer_server_ip"]
ssh_user = query_parameters["ssh_user"]
xroad_request_template_filename = os.path.join(
request_template_dir, "simple_xroad_query_template.xml")
query_data_client_template_filename = os.path.join(
request_template_dir, "query_operational_data_client_template.xml")
query_data_producer_template_filename = os.path.join(
request_template_dir, "query_operational_data_producer_template.xml")
client_timestamp_before_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_before_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
xroad_message_id = common.generate_message_id()
print("\nGenerated message ID {} for X-Road request".format(xroad_message_id))
# Regular and operational data requests and the relevant checks
print("\n---- Sending an X-Road request to the client's security server ----\n")
request_contents = common.format_xroad_request_template(
xroad_request_template_filename, xroad_message_id, query_parameters)
print("Generated the following X-Road request: \n")
print(request_contents)
request_xml = common.parse_and_clean_xml(request_contents)
# Headers of the original request
xroad_request_headers = request_xml.getElementsByTagName(
"SOAP-ENV:Header")[0].toprettyxml()
response = common.post_xml_request(
client_security_server_address, request_contents)
print("Received the following X-Road response: \n")
xml = common.parse_and_clean_xml(response.text)
print(xml.toprettyxml())
common.check_soap_fault(xml)
common.wait_for_operational_data()
client_timestamp_after_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_after_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
# Now make operational data requests to both security servers and
# check the response payloads.
print("\n---- Sending an operational data request to the client's security server ----\n")
message_id = common.generate_message_id()
message_id_client = message_id
print("Generated message ID {} for query data request".format(message_id))
request_contents = common.format_query_operational_data_request_template(
query_data_client_template_filename, message_id,
client_timestamp_before_requests, client_timestamp_after_requests,
query_parameters)
print("Generated the following query data request for the client's security server: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of all the required fields in at least one
# JSON structure.
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_simple_query_rec(
xroad_message_id, "Client", query_parameters))
# As operational data is queried by regular client, the field
# 'securityServerInternalIp' is not expected to be included
# in the response payload.
common.assert_missing_in_json(json_payload, "securityServerInternalIp")
# Check if the timestamps in the response are in the expected
# range.
common.assert_expected_timestamp_values(
json_payload,
client_timestamp_before_requests, client_timestamp_after_requests)
common.print_multipart_query_data_response(json_payload, xroad_message_id)
else:
common.parse_and_check_soap_response(raw_response)
print("\nThe headers of the original request were: \n")
print(xroad_request_headers)
print("\n---- Sending an operational data request to the producer's "
"security server ----\n")
message_id = common.generate_message_id()
message_id_producer = message_id
print("\nGenerated message ID {} for operational data request".format(message_id))
request_contents = common.format_query_operational_data_request_template(
query_data_producer_template_filename, message_id,
producer_timestamp_before_requests, producer_timestamp_after_requests,
query_parameters)
print("Generated the following operational data request for the producer's "
"security server: \n")
print(request_contents)
response = common.post_xml_request(
producer_security_server_address, request_contents,
get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count, is_client=False)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of all the required fields in at least one
# JSON structure.
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_simple_query_rec(
xroad_message_id, "Producer", query_parameters))
# As operational data is queried by regular client, the field
# 'securityServerInternalIp' is not expected to be included
# in the response payload.
common.assert_missing_in_json(json_payload, "securityServerInternalIp")
# Check timestamp values
common.assert_expected_timestamp_values(
json_payload,
producer_timestamp_before_requests, producer_timestamp_after_requests)
common.print_multipart_query_data_response(
json_payload, xroad_message_id)
else:
common.parse_and_check_soap_response(raw_response)
print("\nThe headers of the original request were: \n")
print(xroad_request_headers)
# Repeat both query_data requests after a second, to ensure the
# initial attempts were also stored in the operational_data table.
time.sleep(1)
print("\n---- Repeating the query_data request to the client's security server ----\n")
client_timestamp_after_requests = common.get_remote_timestamp(
client_security_server_address, ssh_user)
producer_timestamp_after_requests = common.get_remote_timestamp(
producer_security_server_address, ssh_user)
message_id = common.generate_message_id()
print("\nGenerated message ID {} for operational data request".format(message_id))
request_contents = common.format_query_operational_data_request_template(
query_data_client_template_filename, message_id,
client_timestamp_before_requests, client_timestamp_after_requests,
query_parameters)
print("Generated the following operational data request for the client's "
"security server: \n")
print(request_contents)
response = common.post_xml_request(
client_security_server_address, request_contents, get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of the required fields in the JSON
# structures.
# The record describing the original X-Road request
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_simple_query_rec(
xroad_message_id, "Client", query_parameters))
# The record describing the query data request at the client
# proxy side in the client's security server
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_query_data_client_rec(
message_id_client, "Client", query_parameters))
# The record describing the query data request at the server
# proxy side in the client's security server
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_query_data_client_rec(
message_id_client, "Producer", query_parameters))
# Check if the value of "responseSize" is in the expected
# range.
common.assert_response_soap_size_in_range(
json_payload, message_id_client, (
QUERY_DATA_CLIENT_RESPONSE_SOAP_BASE_SIZE
+ _query_data_client_request_parameters_size(query_parameters)
), 2)
# Check if the value of "responseMimeSize" is in the expected
# range.
common.assert_response_mime_size_in_range(
json_payload, message_id_client, (
QUERY_DATA_CLIENT_RESPONSE_MIME_BASE_SIZE
+ _query_data_client_request_parameters_size(query_parameters)
), 2)
# As operational data is queried by regular client, the field
# 'securityServerInternalIp' is not expected to be included
# in the response payload.
common.assert_missing_in_json(json_payload, "securityServerInternalIp")
# Check timestamp values
common.assert_expected_timestamp_values(
json_payload,
client_timestamp_before_requests, client_timestamp_after_requests)
common.print_multipart_query_data_response(json_payload)
else:
common.parse_and_check_soap_response(raw_response)
print("\n----- Repeating the query_data request to the producer's security server ----\n")
message_id = common.generate_message_id()
print("\nGenerated message ID {} for operational data request".format(message_id))
request_contents = common.format_query_operational_data_request_template(
query_data_producer_template_filename, message_id,
producer_timestamp_before_requests, producer_timestamp_after_requests,
query_parameters)
print("Generated the following operational data request for the producer's "
"security server: \n")
print(request_contents)
response = common.post_xml_request(
producer_security_server_address, request_contents, get_raw_stream=True)
mime_parts, raw_response = common.parse_multipart_response(response)
if mime_parts:
soap_part, record_count = common.get_multipart_soap_and_record_count(mime_parts[0])
common.print_multipart_soap_and_record_count(soap_part, record_count, is_client=False)
json_payload = common.get_multipart_json_payload(mime_parts[1])
# Check the presence of the required fields in the JSON
# structures.
# The record describing the original X-Road request
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_simple_query_rec(
xroad_message_id, "Producer", query_parameters))
# The record describing the query data request at the client
# proxy side in the producer's security server
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_query_data_producer_rec(
message_id_producer, "Client", query_parameters))
# The record describing the query data request at the server
# proxy side in the producer's security server
common.assert_present_in_json(
json_payload, _expected_keys_and_values_of_query_data_producer_rec(
message_id_producer, "Producer", query_parameters))
# Check if the value of "responseSize" is in the expected
# range.
common.assert_response_soap_size_in_range(
json_payload, message_id_producer, (
QUERY_DATA_PRODUCER_RESPONSE_SOAP_BASE_SIZE +
_query_data_producer_request_parameters_size(query_parameters)
), 2)
# Check if the value of "responseMimeSize" is in the expected
# range.
common.assert_response_mime_size_in_range(
json_payload, message_id_producer, (
QUERY_DATA_PRODUCER_RESPONSE_MIME_BASE_SIZE
+ _query_data_producer_request_parameters_size(query_parameters)
), 2)
# As operational data is queried by regular client, the field
# 'securityServerInternalIp' is not expected to be included
# in the response payload.
common.assert_missing_in_json(json_payload, "securityServerInternalIp")
# Check timestamp values
common.assert_expected_timestamp_values(
json_payload,
producer_timestamp_before_requests, producer_timestamp_after_requests)
common.print_multipart_query_data_response(json_payload)
else:
common.parse_and_check_soap_response(raw_response)
| 45.246285
| 98
| 0.733283
| 2,494
| 21,311
| 5.871291
| 0.12069
| 0.077853
| 0.035853
| 0.021307
| 0.819368
| 0.77887
| 0.758246
| 0.730998
| 0.706822
| 0.692959
| 0
| 0.00505
| 0.191591
| 21,311
| 470
| 99
| 45.342553
| 0.844904
| 0.175262
| 0
| 0.603279
| 0
| 0
| 0.20112
| 0.045494
| 0
| 0
| 0
| 0
| 0.065574
| 1
| 0.022951
| false
| 0
| 0.009836
| 0.009836
| 0.052459
| 0.121311
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0dc0ab3941d2a0577ef9dd19f4ae678cbe8b2aa3
| 7,327
|
py
|
Python
|
package/cudamat/cudamat_conv.py
|
Corvalius/deepnet
|
5ff6726ae64c248283b9038a0023983bd209a7b0
|
[
"BSD-3-Clause"
] | 1
|
2018-04-02T08:59:56.000Z
|
2018-04-02T08:59:56.000Z
|
package/cudamat/cudamat_conv.py
|
Corvalius/deepnet
|
5ff6726ae64c248283b9038a0023983bd209a7b0
|
[
"BSD-3-Clause"
] | null | null | null |
package/cudamat/cudamat_conv.py
|
Corvalius/deepnet
|
5ff6726ae64c248283b9038a0023983bd209a7b0
|
[
"BSD-3-Clause"
] | null | null | null |
import ctypes as ct
import math
import pdb
import platform
if platform.system() == 'Windows':
_ConvNet = ct.cdll.LoadLibrary('libcudamat_conv.dll')
else:
_ConvNet = ct.cdll.LoadLibrary('libcudamat_conv.so')
def convUp(images, filters, targets, numModulesX, paddingStart, moduleStride, numImgColors, numGroups=1):
"""
images - (n_images, img_w**2 * n_chans)
filters - (n_filters, filter_w**2 * n_chans)
targets - (n_images, n_locs**2 * n_filters)
numModulesX - Number of filter locations along an axis. = n_locs
paddingStart - Set to k for a k-pixel border of zeros. Usually set to 0.
moduleStride - stride to move the filters by.
numImgColors - n_chans
"""
numImages = images.shape[0]
numFilters = filters.shape[0]
assert targets.shape == (numImages, numFilters * numModulesX * numModulesX), '%s %d %d-%d-%d' % (targets.shape.__str__(), numImages, numFilters, numModulesX, numModulesX)
_ConvNet.convUp(images.p_mat, filters.p_mat, targets.p_mat, numModulesX,
-paddingStart, moduleStride, numImgColors, numGroups)
def convDown(hidSums, filters, targets, numModulesX, paddingStart, moduleStride, filterSizeX, imSizeX, numImgColors):
"""
hidSums - (n_images, n_locs**2 * n_filters)
filters - (n_filters, filter_w**2 * n_chans)
targets - (n_images, img_w**2 * n_chans)
"""
numGroups = 1
numFilters = filters.shape[0]
numImages = hidSums.shape[0]
numModules = numModulesX**2
assert paddingStart >= 0
assert targets.shape == (numImages, numImgColors * imSizeX * imSizeX)
_ConvNet.convDown(hidSums.p_mat, filters.p_mat, targets.p_mat, imSizeX,
-paddingStart, moduleStride, numImgColors, numGroups)
def convOutp(images, hidSums, targets, numModulesX, paddingStart, filterSizeX, moduleStride, numImgColors):
"""
images - (n_images, img_w**2 * n_chans)
hidSums - (n_images, n_locs**2 * n_filters)
targets - (n_filters, filter_w**2 * n_chans)
"""
numGroups = 1
partialSum = 0
numImages = images.shape[0]
numFilters = hidSums.shape[1] / (numModulesX**2)
assert targets.shape == (numFilters, numImgColors * filterSizeX * filterSizeX), '%s %d %d-%d-%d' % (targets.shape.__str__(), numFilters, numImgColors, filterSizeX, filterSizeX)
_ConvNet.convOutp(images.p_mat, hidSums.p_mat, targets.p_mat, numModulesX, filterSizeX, -paddingStart, moduleStride, numImgColors, 1, 0)
def localUp(images, filters, targets, numModulesX, paddingStart, moduleStride, numImgColors, numGroups=1):
"""
images - (n_images, img_w**2 * n_chans)
filters - (n_filters, filter_w**2 * n_chans)
targets - (n_images, n_locs**2 * n_filters)
numModulesX - Number of filter locations along an axis. = n_locs
paddingStart - Set to k for a k-pixel border of zeros. Usually set to 0.
moduleStride - stride to move the filters by.
numImgColors - n_chans
"""
numImages = images.shape[0]
numFilters = filters.shape[0]
assert targets.shape == (numImages, numFilters * numModulesX * numModulesX), '%s %d %d-%d-%d' % (targets.shape.__str__(), numImages, numFilters, numModulesX, numModulesX)
_ConvNet.localUp(images.p_mat, filters.p_mat, targets.p_mat,
numModulesX, -paddingStart, moduleStride, numImgColors, numGroups)
def localDown(hidSums, filters, targets, numModulesX, paddingStart, moduleStride, filterSizeX, imSizeX, numImgColors):
"""
hidSums - (n_images, n_locs**2 * n_filters)
filters - (n_filters, filter_w**2 * n_chans)
targets - (n_images, img_w**2 * n_chans)
"""
numGroups = 1
numFilters = filters.shape[0]
numImages = hidSums.shape[0]
numModules = numModulesX**2
assert paddingStart >= 0
assert targets.shape == (numImages, numImgColors * imSizeX * imSizeX)
_ConvNet.localDown(hidSums.p_mat, filters.p_mat, targets.p_mat,
imSizeX, -paddingStart, moduleStride, numImgColors, numGroups)
def localOutp(images, hidSums, targets, numModulesX, paddingStart, filterSizeX, moduleStride, numImgColors):
"""
images - (n_images, img_w**2 * n_chans)
hidSums - (n_images, n_locs**2 * n_filters)
targets - (n_filters, filter_w**2 * n_chans)
"""
numGroups = 1
partialSum = 0
numImages = images.shape[0]
numFilters = hidSums.shape[1] / (numModulesX**2)
assert targets.shape == (numFilters, numModulesX**2 * numImgColors * filterSizeX**2), '%s %d %d-%d-%d' % (targets.shape.__str__(), numFilters, numImgColors, filterSizeX, filterSizeX)
_ConvNet.localOutp(images.p_mat, hidSums.p_mat, targets.p_mat,
numModulesX, filterSizeX, -paddingStart, moduleStride, numImgColors, numGroups, partialSum)
def MaxPool(images, targets, numChannels, subsX, startX, strideX, outputsX):
"""
images - (n_images, img_w**2 * n_chans)
numChannels - number of filter/color channels
subsX - width of pooling area
startX - pixel where pooling starts
strideX - stride
outputsX - number of pooling sites
"""
numImages = images.shape[0]
assert targets.shape == (numImages, numChannels * outputsX * outputsX)
_ConvNet.MaxPool(images.p_mat, targets.p_mat,
numChannels, subsX, startX, strideX, outputsX)
def ProbMaxPool(images, rnd, targets, numChannels, subsX, startX, strideX, outputsX):
"""
images - (n_images, img_w**2 * n_chans)
rnd - (n_images, img_w**2 * n_chans)
numChannels - number of filter/color channels
subsX - width of pooling area
startX - pixel where pooling starts
strideX - stride
outputsX - number of pooling sites
"""
numImages = images.shape[0]
assert targets.shape == (numImages, numChannels * outputsX * outputsX)
assert rnd.shape == images.shape
_ConvNet.ProbMaxPool(images.p_mat, rnd.p_mat, targets.p_mat,
numChannels, subsX, startX, strideX, outputsX)
def MaxPoolUndo(images, targets, grad, maxes,
subsX, startX, strideX, outputsX):
"""
images - (n_images, img_w**2 * n_chans)
grad - (n_images, outputsX**2 * n_chans) cudamat of deltas/gradients of loss wrt layer outputs.
maxes - (n_images, outputsX**2 * n_chans) cudamat of layer outputs.
subsX - width of pooling area
startX - pixel where pooling starts
strideX - stride
outputsX - number of pooling sites
"""
assert targets.shape == images.shape
_ConvNet.MaxPoolUndo(images.p_mat, grad.p_mat, maxes.p_mat, targets.p_mat,
subsX, startX, strideX, outputsX)
def ResponseNorm(images, denoms, targets, numChannels, sizeX, addScale, powScale):
assert targets.shape == images.shape
assert targets.shape == denoms.shape
num_images = images.shape[0]
numpixels = images.shape[1] / numChannels
imgsize = int(math.sqrt(numpixels))
#assert images.shape[1] == numChannels * numpixels
#assert imgsize * imgsize == numpixels
#pdb.setrace()
_ConvNet.ResponseNorm(images.p_mat, denoms.p_mat, targets.p_mat,
numChannels, sizeX, ct.c_float(addScale),
ct.c_float(powScale))
def ResponseNormUndo(outGrad, denoms, inGrad, acts, targets, numChannels, sizeX,
addScale, powScale):
assert targets.shape == outGrad.shape
assert targets.shape == denoms.shape
assert targets.shape == inGrad.shape
assert targets.shape == acts.shape
_ConvNet.ResponseNormUndo(outGrad.p_mat, denoms.p_mat, inGrad.p_mat,
acts.p_mat, targets.p_mat, numChannels, sizeX,
ct.c_float(addScale), ct.c_float(powScale))
| 39.181818
| 184
| 0.711888
| 930
| 7,327
| 5.452688
| 0.126882
| 0.027608
| 0.024847
| 0.025242
| 0.827845
| 0.807533
| 0.779136
| 0.779136
| 0.744429
| 0.744429
| 0
| 0.010038
| 0.170602
| 7,327
| 186
| 185
| 39.392473
| 0.82442
| 0.280879
| 0
| 0.436782
| 0
| 0
| 0.019635
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 1
| 0.126437
| false
| 0
| 0.045977
| 0
| 0.172414
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
21c3e1098557143ef1907dc94df0967ca35bba38
| 6,401
|
py
|
Python
|
regularized-wasserstein-estimator/optimization.py
|
MarinBallu/regularized-wasserstein-estimator
|
aeb21778180a5f7b88789ac9640bf0aa90a07552
|
[
"MIT"
] | null | null | null |
regularized-wasserstein-estimator/optimization.py
|
MarinBallu/regularized-wasserstein-estimator
|
aeb21778180a5f7b88789ac9640bf0aa90a07552
|
[
"MIT"
] | null | null | null |
regularized-wasserstein-estimator/optimization.py
|
MarinBallu/regularized-wasserstein-estimator
|
aeb21778180a5f7b88789ac9640bf0aa90a07552
|
[
"MIT"
] | null | null | null |
from initialize import initialize
import timeit
import numpy as np
import computations
### STOCHASTIC GRADIENT DESCENT
def sgd_entropic_regularization(a, b, M, reg1, reg2, numItermax, lr, maxTime):
r'''
Compute the sgd algorithm with one-sized batches to solve the regularized discrete measures
ot dual estimation problem
Parameters
----------
a : ndarray, shape (ns,)
source measure
b : ndarray, shape (nt,)
target measure
M : ndarray, shape (ns, nt)
cost matrix
reg1, reg2 : float
Regularization terms > 0
numItermax : int
number of iteration
lr : float
learning rate
Returns
-------
alpha : ndarray, shape (ns,)
dual variable
beta : ndarray, shape (nt,)
dual variable
'''
# Initialize variables
random_list_a, random_list_b, cur_alpha, cur_beta, cur_S, alpha_list, beta_list, time_list = initialize(a, b, M, numItermax, 1)
# Initialize time counter
start = timeit.default_timer()
for cur_iter in range(numItermax):
# Receive the random indices
i, j = random_list_a[cur_iter], random_list_b[cur_iter]
# Compute the stepsize
stepsize = lr / np.sqrt(cur_iter + 1)
## SGD
# Compute gradients
partial_target = computations.partial_target_meas(b[j], cur_beta[j], reg2, cur_S)
grad_alpha, grad_beta = computations.partial_grad_dual(b[j], partial_target, M[i, j], reg1, cur_alpha[i], cur_beta[j])
# Update dual variables
cur_alpha[i], cur_beta[j], cur_S = computations.sgd_update(b[j], reg2, cur_alpha[i], cur_beta[j], cur_S, grad_alpha, grad_beta, stepsize)
# Update memory for analysis
alpha_list.append(np.array(cur_alpha))
beta_list.append(np.array(cur_beta))
t = timeit.default_timer() - start
time_list.append(t)
# Stopping time
if maxTime and t > maxTime:
break
# Stop time counter
stop = timeit.default_timer()
# Print info
print('Nb iter: ', cur_iter + 1)
print('Time: ', stop - start)
print('Average iteration time: ', (stop - start) / numItermax)
# Return memory of dual variables and time
return alpha_list, beta_list, time_list
### BATCHED GRADIENT DESCENT
def bgd_entropic_regularization(a, b, M, reg1, reg2, numItermax, batch_size, lr, maxTime):
r'''
Compute the batched sgd algorithm to solve the regularized discrete measures
ot dual estimation problem
Parameters
----------
a : ndarray, shape (ns,)
source measure
b : ndarray, shape (nt,)
target measure
M : ndarray, shape (ns, nt)
cost matrix
reg1, reg2 : float
Regularization terms > 0
batch_size : int
size of the batch
numItermax : int
number of iteration
lr : float
learning rate
Returns
-------
alpha : ndarray, shape (ns,)
dual variable
beta : ndarray, shape (nt,)
dual variable
'''
# Initialize variables
random_list_a, random_list_b, cur_alpha, cur_beta, cur_S, alpha_list, beta_list, time_list = initialize(a, b, M, numItermax, batch_size)
# Initialize time counter
start = timeit.default_timer()
for cur_iter in range(numItermax):
# Receive the random batches of indices
batch_a, batch_b = random_list_a[cur_iter * batch_size : (cur_iter + 1) * batch_size], random_list_b[cur_iter * batch_size : (cur_iter + 1) * batch_size]
# Compute the stepsize
stepsize = stepsize = min(lr / np.sqrt(cur_iter + 1), reg1) / batch_size
## SGD
# Compute gradients
partial_target = computations.partial_target_meas(b[batch_b], cur_beta[batch_b], reg2, cur_S)
grad_alpha, grad_beta = computations.partial_grad_dual(b[batch_b], partial_target, M[batch_a, batch_b], reg1, cur_alpha[batch_a], cur_beta[batch_b])
# Update dual variables
cur_alpha, cur_beta, cur_S = computations.bgd_update(b, reg2, cur_alpha, cur_beta, cur_S, grad_alpha, grad_beta, batch_a, batch_b, stepsize)
# Update memory for analysis
alpha_list.append(np.array(cur_alpha))
beta_list.append(np.array(cur_beta))
t = timeit.default_timer() - start
time_list.append(t)
# Stopping time
if maxTime and t > maxTime:
break
# Stop time counter
stop = timeit.default_timer()
# Print info
print('Nb iter: ', cur_iter + 1)
print('Time: ', stop - start)
print('Average iteration time: ', (stop - start) / numItermax)
# Return memory of dual variables and time
return alpha_list, beta_list, time_list
### SEMI-STOCHASTIC GRADIENT DESCENT
def ssgd_entropic_regularization(a, b, M, reg1, reg2, numItermax, lr, maxTime):
r'''
Compute the semi-sgd algorithm to solve the regularized discrete measures
ot dual estimation problem (sgd for alpha, full gradient for beta)
Parameters
----------
a : ndarray, shape (ns,)
source measure
b : ndarray, shape (nt,)
target measure
M : ndarray, shape (ns, nt)
cost matrix
reg1, reg2 : float
Regularization terms > 0
batch_size : int
size of the batch
numItermax : int
number of iteration
lr : float
learning rate
Returns
-------
alpha : ndarray, shape (ns,)
dual variable
beta : ndarray, shape (nt,)
dual variable
'''
# Initialize variables
random_list_a, useless_random_list_b, cur_alpha, cur_beta, cur_S, alpha_list, beta_list, time_list = initialize(a, b, M, numItermax, 1)
# Initialize time counter
start = timeit.default_timer()
for cur_iter in range(numItermax):
# Receive the random indices
i = random_list_a[cur_iter]
# Compute the stepsize
stepsize = lr / np.sqrt(cur_iter + 1)
## SGD
# Compute gradients
target = computations.dual_to_target(b, reg2, cur_beta)
grad_alpha, grad_beta = computations.semi_grad_dual(b, target, M[i], reg1, cur_alpha[i], cur_beta)
# Update dual variables
cur_alpha[i] += stepsize * grad_alpha
cur_beta += stepsize * grad_beta
# Update memory for analysis
alpha_list.append(np.array(cur_alpha))
beta_list.append(np.array(cur_beta))
t = timeit.default_timer() - start
time_list.append(t)
# Stopping time
if maxTime and t > maxTime:
break
# Stop time counter
stop = timeit.default_timer()
# Print info
print('Nb iter: ', cur_iter + 1)
print('Time: ', stop - start)
print('Average iteration time: ', (stop - start) / numItermax)
# Return memory of dual variables and time
return alpha_list, beta_list, time_list
| 32.165829
| 157
| 0.683331
| 903
| 6,401
| 4.654485
| 0.125138
| 0.028313
| 0.029979
| 0.024268
| 0.85201
| 0.82108
| 0.779205
| 0.779205
| 0.758982
| 0.743279
| 0
| 0.006777
| 0.216216
| 6,401
| 199
| 158
| 32.165829
| 0.830975
| 0.381815
| 0
| 0.602941
| 0
| 0
| 0.031258
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044118
| false
| 0
| 0.058824
| 0
| 0.147059
| 0.132353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
21e9a6907f5b3b46acccb7f2baa68e74cb020ad7
| 35
|
py
|
Python
|
tests/test_to_be_removed.py
|
Farama-Foundation/jump
|
31524fb6b5bfdd654c71446a1b3f4aef86955f42
|
[
"Apache-2.0"
] | 2
|
2022-02-25T23:41:13.000Z
|
2022-02-26T13:22:53.000Z
|
tests/test_to_be_removed.py
|
Farama-Foundation/jump
|
31524fb6b5bfdd654c71446a1b3f4aef86955f42
|
[
"Apache-2.0"
] | null | null | null |
tests/test_to_be_removed.py
|
Farama-Foundation/jump
|
31524fb6b5bfdd654c71446a1b3f4aef86955f42
|
[
"Apache-2.0"
] | 2
|
2022-02-25T23:29:50.000Z
|
2022-03-08T05:42:19.000Z
|
def test_to_be_removed():
pass
| 11.666667
| 25
| 0.714286
| 6
| 35
| 3.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 35
| 2
| 26
| 17.5
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
df0e0aed26bec90479f8f5ad09c24410a1024383
| 71
|
py
|
Python
|
sgi/base/tests/__init__.py
|
jorgevilaca82/SGI
|
c3f13d9e3e8f04377d9e23636dc8e35ed5ace35a
|
[
"MIT"
] | null | null | null |
sgi/base/tests/__init__.py
|
jorgevilaca82/SGI
|
c3f13d9e3e8f04377d9e23636dc8e35ed5ace35a
|
[
"MIT"
] | 8
|
2019-12-07T13:13:34.000Z
|
2021-09-02T03:07:25.000Z
|
sgi/base/tests/__init__.py
|
jorgevilaca82/SGI
|
c3f13d9e3e8f04377d9e23636dc8e35ed5ace35a
|
[
"MIT"
] | null | null | null |
from .pessoa_tests import *
from .unidadeorganizacional_tests import *
| 23.666667
| 42
| 0.830986
| 8
| 71
| 7.125
| 0.625
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 43
| 35.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
df3bf899638da406b7123ee13fad1b04c0a2568e
| 59
|
py
|
Python
|
python/dataingest/patents/svc/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/dataingest/patents/svc/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/dataingest/patents/svc/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
from .generate_src_collection import GenerateSrcCollection
| 29.5
| 58
| 0.915254
| 6
| 59
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 59
| 1
| 59
| 59
| 0.945455
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
df493625da55eaea97ae29d15d245add1cb4e501
| 39
|
py
|
Python
|
pipeline/src/enrich/__init__.py
|
wellcomecollection/concepts
|
fcdb23d47ea841bb3b8d2826a8bd8640dc639ccb
|
[
"MIT"
] | null | null | null |
pipeline/src/enrich/__init__.py
|
wellcomecollection/concepts
|
fcdb23d47ea841bb3b8d2826a8bd8640dc639ccb
|
[
"MIT"
] | 52
|
2020-04-21T09:13:45.000Z
|
2022-01-31T09:43:16.000Z
|
pipeline/src/enrich/__init__.py
|
wellcomecollection/concepts
|
fcdb23d47ea841bb3b8d2826a8bd8640dc639ccb
|
[
"MIT"
] | null | null | null |
from ..utils import clean, http_client
| 19.5
| 38
| 0.794872
| 6
| 39
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 1
| 39
| 39
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
df587411f65d34db40c86a1eb7e3c422a977c673
| 229
|
py
|
Python
|
pycopa/result.py
|
advaita-krishna-das/pycopa
|
ed681f86c9c3b418605dbf6b0aee6a63db48458a
|
[
"MIT"
] | null | null | null |
pycopa/result.py
|
advaita-krishna-das/pycopa
|
ed681f86c9c3b418605dbf6b0aee6a63db48458a
|
[
"MIT"
] | null | null | null |
pycopa/result.py
|
advaita-krishna-das/pycopa
|
ed681f86c9c3b418605dbf6b0aee6a63db48458a
|
[
"MIT"
] | null | null | null |
class ParserResult:
def __init__(self, value):
self.__value = value
def get(self, name):
return self.__value.get(name, None)
def __getattr__(self, item):
return self.__value.get(item, None)
| 20.818182
| 43
| 0.633188
| 29
| 229
| 4.517241
| 0.413793
| 0.274809
| 0.229008
| 0.274809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257642
| 229
| 10
| 44
| 22.9
| 0.770588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
df6719fc088a3a0aaec2c43c101efb49c9214882
| 40
|
py
|
Python
|
test/tests/test_environments/python_src/foo.py
|
jithindevasia/fission
|
4a4ba9d36d92c61c9f22b07e10ec08580957b520
|
[
"Apache-2.0"
] | 6,891
|
2016-12-24T05:54:27.000Z
|
2022-03-31T15:55:08.000Z
|
test/tests/test_environments/python_src/foo.py
|
jithindevasia/fission
|
4a4ba9d36d92c61c9f22b07e10ec08580957b520
|
[
"Apache-2.0"
] | 1,840
|
2016-12-24T18:50:37.000Z
|
2022-03-31T08:22:23.000Z
|
test/tests/test_environments/python_src/foo.py
|
jithindevasia/fission
|
4a4ba9d36d92c61c9f22b07e10ec08580957b520
|
[
"Apache-2.0"
] | 839
|
2017-01-13T12:40:55.000Z
|
2022-03-29T14:44:02.000Z
|
def bar():
return 'THIS_IS_FOO_BAR'
| 13.333333
| 28
| 0.675
| 7
| 40
| 3.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 40
| 2
| 29
| 20
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
df8350f47114b38736a30f3514c9d0957c34d333
| 48
|
py
|
Python
|
bc/utils/__init__.py
|
rjgpinel/rlbc
|
55a7499e4ad10182d9a84ce3c2494231db6fd3b5
|
[
"MIT"
] | 43
|
2019-10-16T02:56:13.000Z
|
2022-01-25T02:04:51.000Z
|
bc/utils/__init__.py
|
rjgpinel/rlbc
|
55a7499e4ad10182d9a84ce3c2494231db6fd3b5
|
[
"MIT"
] | 6
|
2019-10-16T03:44:24.000Z
|
2021-06-19T21:59:09.000Z
|
bc/utils/__init__.py
|
rjgpinel/rlbc
|
55a7499e4ad10182d9a84ce3c2494231db6fd3b5
|
[
"MIT"
] | 11
|
2020-03-23T01:47:46.000Z
|
2021-11-25T07:43:25.000Z
|
from .report import Report
from . import videos
| 16
| 26
| 0.791667
| 7
| 48
| 5.428571
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 2
| 27
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8046ad6d9a8257734d7ffc0c589a44d7c637794d
| 250
|
py
|
Python
|
db_connect.py
|
brandon-schabel/web_scraper_playground
|
1ae4e3b96fb5697b3f8cdf8fa03d2c54c43262e0
|
[
"MIT"
] | null | null | null |
db_connect.py
|
brandon-schabel/web_scraper_playground
|
1ae4e3b96fb5697b3f8cdf8fa03d2c54c43262e0
|
[
"MIT"
] | 12
|
2020-09-04T14:35:09.000Z
|
2022-03-08T22:27:48.000Z
|
db_connect.py
|
brandon-schabel/web_scraper_playground
|
1ae4e3b96fb5697b3f8cdf8fa03d2c54c43262e0
|
[
"MIT"
] | null | null | null |
from mongoengine import *
from secrets import db_host, db_port, db_user, db_password
from schemas import Profile
connect('t_database', port=db_port, host=db_host, username=db_user, password=db_password )
def get_profiles():
return Profile.objects
| 31.25
| 90
| 0.808
| 39
| 250
| 4.923077
| 0.512821
| 0.0625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112
| 250
| 8
| 91
| 31.25
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0.039841
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0.333333
| 0.5
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
|
0
| 6
|
338c0d32d73279205660cc399fdf740c48040908
| 180
|
py
|
Python
|
farmers/admin.py
|
qinyanjuidavid/Raki
|
2bca3e6a6f410619c699be8c45cd2cad8aa08e0c
|
[
"MIT"
] | null | null | null |
farmers/admin.py
|
qinyanjuidavid/Raki
|
2bca3e6a6f410619c699be8c45cd2cad8aa08e0c
|
[
"MIT"
] | null | null | null |
farmers/admin.py
|
qinyanjuidavid/Raki
|
2bca3e6a6f410619c699be8c45cd2cad8aa08e0c
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from farmers.models import ProductCategory
from accounts.models import Product
admin.site.register(ProductCategory)
admin.site.register(Product)
| 22.5
| 42
| 0.85
| 23
| 180
| 6.652174
| 0.521739
| 0.156863
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 180
| 7
| 43
| 25.714286
| 0.932927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
339d7b0cb0ef6b311050d45855e1289f86442cca
| 47
|
py
|
Python
|
hello.py
|
ydeath/Hello
|
147d2f36599cfa45dddf621d5b0a6c43a156d72b
|
[
"MIT"
] | null | null | null |
hello.py
|
ydeath/Hello
|
147d2f36599cfa45dddf621d5b0a6c43a156d72b
|
[
"MIT"
] | null | null | null |
hello.py
|
ydeath/Hello
|
147d2f36599cfa45dddf621d5b0a6c43a156d72b
|
[
"MIT"
] | null | null | null |
print("你好")
def add():
return 1+1
add()
| 5.875
| 14
| 0.510638
| 8
| 47
| 3
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.276596
| 47
| 7
| 15
| 6.714286
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.5
| 0.25
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
33c55634f10c709ca36cc80a9cac7247cbd479f4
| 1,878
|
py
|
Python
|
synth/tests/input_checks/test_input_checks.py
|
AprilXiaoyanLiu/whitenoise-system
|
0e94d2cc8114b97a61d5d2e45278428f91f1e687
|
[
"MIT"
] | 63
|
2020-03-26T15:26:10.000Z
|
2020-10-22T06:26:38.000Z
|
synth/tests/input_checks/test_input_checks.py
|
AprilXiaoyanLiu/whitenoise-system
|
0e94d2cc8114b97a61d5d2e45278428f91f1e687
|
[
"MIT"
] | 82
|
2020-03-10T17:54:48.000Z
|
2020-10-23T02:11:06.000Z
|
synth/tests/input_checks/test_input_checks.py
|
AprilXiaoyanLiu/whitenoise-system
|
0e94d2cc8114b97a61d5d2e45278428f91f1e687
|
[
"MIT"
] | 15
|
2020-03-10T05:52:14.000Z
|
2020-10-09T09:09:52.000Z
|
import pandas as pd
import numpy as np
from snsynth.pytorch.nn import DPCTGAN, PATECTGAN
from snsynth.preprocessors import DPSSTransformer
eps = 3.0
batch_size = 20
size = 100
pd_data = pd.DataFrame(columns=["A"], data=np.array(np.arange(0.0, size)).T)
np_data = np.array(np.arange(0.0, size)).astype(np.double)
class TestInputChecks:
def test_input_checks_PATECTGAN_np(self):
synth = PATECTGAN(epsilon=eps, batch_size=batch_size)
try:
synth.train(np_data, categorical_columns=[0], transformer=DPSSTransformer)
except ValueError as v:
assert str(v).startswith("It looks like")
return
raise AssertionError("DPCTGAN should have raised a ValueError")
def test_input_checks_PATECTGAN_pd(self):
synth = PATECTGAN(epsilon=eps, batch_size=batch_size)
try:
synth.train(pd_data, categorical_columns=["A"], transformer=DPSSTransformer)
except ValueError as v:
assert str(v).startswith("It looks like")
return
raise AssertionError("DPCTGAN should have raised a ValueError")
def test_input_checks_DPCTGAN_np(self):
synth = DPCTGAN(epsilon=eps, batch_size=batch_size)
try:
synth.train(np_data, categorical_columns=[0], transformer=DPSSTransformer)
except ValueError as v:
assert str(v).startswith("It looks like")
return
raise AssertionError("DPCTGAN should have raised a ValueError")
def test_input_checks_DPCTGAN_pd(self):
synth = DPCTGAN(epsilon=eps, batch_size=batch_size)
try:
synth.train(pd_data, categorical_columns=["A"], transformer=DPSSTransformer)
except ValueError as v:
assert str(v).startswith("It looks like")
return
raise AssertionError("DPCTGAN should have raised a ValueError")
| 37.56
| 88
| 0.677316
| 237
| 1,878
| 5.219409
| 0.244726
| 0.065481
| 0.038804
| 0.058205
| 0.829426
| 0.800323
| 0.800323
| 0.800323
| 0.759903
| 0.759903
| 0
| 0.009015
| 0.232162
| 1,878
| 49
| 89
| 38.326531
| 0.848821
| 0
| 0
| 0.666667
| 0
| 0
| 0.112354
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.095238
| false
| 0
| 0.095238
| 0
| 0.309524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1d134a402fc45bc125b2b0be81bcc29b582b22ee
| 59
|
py
|
Python
|
mangrove/utils/__init__.py
|
mariot/mangrove
|
376bca99818c6cc1d08b1c726f46b364c69b0cc7
|
[
"BSD-3-Clause"
] | null | null | null |
mangrove/utils/__init__.py
|
mariot/mangrove
|
376bca99818c6cc1d08b1c726f46b364c69b0cc7
|
[
"BSD-3-Clause"
] | null | null | null |
mangrove/utils/__init__.py
|
mariot/mangrove
|
376bca99818c6cc1d08b1c726f46b364c69b0cc7
|
[
"BSD-3-Clause"
] | null | null | null |
from google_spreadsheets import GoogleSpreadsheetsClient
| 14.75
| 56
| 0.898305
| 5
| 59
| 10.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 59
| 3
| 57
| 19.666667
| 0.981132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d2d1707eb4e6d8f44377b65afc47847232dfecf
| 30,952
|
py
|
Python
|
jsrows.py
|
codeforamerica/sheltraustin
|
a07ffd4b328a9d961347a85b49c95d8bf5ec1046
|
[
"BSD-3-Clause"
] | null | null | null |
jsrows.py
|
codeforamerica/sheltraustin
|
a07ffd4b328a9d961347a85b49c95d8bf5ec1046
|
[
"BSD-3-Clause"
] | 1
|
2015-08-03T21:27:36.000Z
|
2015-08-03T21:27:36.000Z
|
jsrows.py
|
codeforamerica/sheltraustin
|
a07ffd4b328a9d961347a85b49c95d8bf5ec1046
|
[
"BSD-3-Clause"
] | 1
|
2021-04-17T10:13:29.000Z
|
2021-04-17T10:13:29.000Z
|
location = {loc : [ -97.732517999999999, 30.259606000000002 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'Y', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'Y', 'title': 'Manos De Cristo: Dental Clinic', 'address': '1201 E Cesar Chavez St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.736924400000007, 30.2683207 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'Y', 'private': 'N', 'shelter': 'Y', 'subst_abuse_service': 'N', 'med_facility': 'Y', 'title': 'Salvation Army', 'address': '501 E 8th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.758165000000005, 30.233862999999999 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Aeschbach and Associates', 'address': '2824 S Congress Ave, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.710374700000003, 30.345065900000002 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Austin Drug and Alcohol Abuse Program', 'address': 'Church of Pentecost 7801 N Lamar Blvd # D102, Austin, TX 78752, USA'}
db.austindb.insert(location);
location = {loc : [ -97.676707899999997, 30.338674399999999 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Austin Recovery Inc', 'address': '8402 Cross Park Dr, Austin, TX 78754, USA'}
db.austindb.insert(location);
location = {loc : [ -97.676707899999997, 30.338674399999999 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': , 'address': '8402 Cross Park Dr, Austin, TX 78754, USA'}
db.austindb.insert(location);
location = {loc : [ -97.735171199999996, 30.2756422 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Austin/Travis County Integral Care - ATCIC Residential properties', 'address': '403 E 15th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.726608900000002, 30.258762900000001 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Austin/Travis County Integral Care - CARE Program Journey OTP', 'address': '1631 E 2nd St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.747823999999994, 30.25751 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Austin/Travis County Integral Care - Child and Family properties', 'address': '105 W Riverside Dr, Austin, TX 78704", USA'}
db.austindb.insert(location);
location = {loc : [ -97.726650199999995, 30.369487299999999 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Changes Counseling properties', 'address': 'Accident & Rehab Clinic, 8711 Burnet Rd # A16, Austin, TX 78757", USA'}
db.austindb.insert(location);
location = {loc : [ -97.689818000000002, 30.334695 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Clean Investments Counseling Center', 'address': 'Life Changing Minitries International, 1212 E Anderson Ln, Austin, TX 78752", USA'}
db.austindb.insert(location);
location = {loc : [ -97.790892999999997, 30.229835000000001 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Developmental Counseling Center Inc', 'address': '2101 W Ben White Blvd, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.712445000000002, 30.390044 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'La Haciendas Solutions', 'address': '2100 Kramer Ln, Austin, TX 78758, USA'}
db.austindb.insert(location);
location = {loc : [ -97.686018899999993, 30.3761954 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Maintenance and Recovery properties Inc', 'address': '305 Ferguson Dr, Austin, TX 78753, USA'}
db.austindb.insert(location);
location = {loc : [ -97.803529999999995, 30.225010000000001 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Maintenance and Recovery properties Inc - South', 'address': '2627 Jones Rd, Austin, TX 78745, USA'}
db.austindb.insert(location);
location = {loc : [ -97.795706899999999, 30.455245000000001 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Northwest Counseling and Wellness Ctr', 'address': '12335 Hymeadow Dr, Austin, TX 78750, USA'}
db.austindb.insert(location);
location = {loc : [ -97.756614999999996, 30.244605 ], 'mental_health': 'N', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Phoenix Academy of Austin', 'address': 'Phoenix House, 400 W Live Oak St, Austin, TX 78704", USA'}
db.austindb.insert(location);
location = {loc : [ -97.680945399999999, 30.496313700000002 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Veterans Administration', 'address': '1500 S Ih 35, Round Rock, TX 78681, USA'}
db.austindb.insert(location);
location = {loc : [ -97.748383000000004, 30.306041 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Seton Shoal Creek Hospital', 'address': 'Seton Shoal Creek Hospital: Carchedi Lisa R MD, 3501 Mills Ave, Austin, TX 78731", USA'}
db.austindb.insert(location);
location = {loc : [ -97.736207100000001, 30.305842999999999 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Austin State Hospital', 'address': '4110 Guadalupe St, Austin, TX 78705, USA'}
db.austindb.insert(location);
location = {loc : [ -97.669453000000004, 30.336068999999998 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Lutheran Soc Srvs of the South', 'address': '8305 Cross Park Dr, Austin, TX 78710, USA'}
db.austindb.insert(location);
location = {loc : [ -97.715613099999999, 30.360574700000001 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'HILL COUNTRY COUNSELING', 'address': '1433 Fairfield Dr, Austin, TX 78758, USA'}
db.austindb.insert(location);
location = {loc : [ -97.742264199999994, 30.312096499999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Volunteer Healthcare Clinic', 'address': '4215 Medical Pkwy, Austin, TX 78756, USA'}
db.austindb.insert(location);
location = {loc : [ -97.675267500000004, 30.402934500000001 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care William Cannon', 'address': '10500 N IH 35, Austin, TX 78753, USA'}
db.austindb.insert(location);
location = {loc : [ -97.622220999999996, 30.175242999999998 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': , 'address': '5301 Ross Rd, Del Valle, TX 78617, USA'}
db.austindb.insert(location);
location = {loc : [ -97.6979896, 30.3635187 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Rundberg', 'address': '801-833 W Rundberg Ln, Austin, TX 78753, USA'}
db.austindb.insert(location);
location = {loc : [ -97.734545900000001, 30.252542399999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care properties Department', 'address': '15 Waller St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.760810899999996, 30.239539499999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care South Austin', 'address': 'South Austin Neighborhood Center, 2529 S 1st St, Austin, TX 78704", USA'}
db.austindb.insert(location);
location = {loc : [ -97.710158300000003, 30.2648969 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Rosewood-Zaragosa', 'address': '2802 Webberville Rd, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.742783000000003, 30.276062 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Right To Sight Clinic At The First United Methodist Church Family Life Center', 'address': '1300 Lavaca St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Red River', 'address': 'Heart Smart CPR, 152 Hoot Owl Ln N, Leander, TX 78641", USA'}
db.austindb.insert(location);
location = {loc : [ -97.894009800000006, 30.249758199999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Oak Hill', 'address': '8600 State Highway 71, Austin, TX 78735, USA'}
db.austindb.insert(location);
location = {loc : [ -97.664190700000006, 30.313778200000002 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Northeast', 'address': 'Community Care Northeast, 7112 Ed Bluestein Blvd, Austin, TX 78723", USA'}
db.austindb.insert(location);
location = {loc : [ -97.703492999999995, 30.2248871 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Montopolis', 'address': '6301 E Riverside Dr, Austin, TX 78741, USA'}
db.austindb.insert(location);
location = {loc : [ -97.742464999999996, 30.286525999999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Lifeworks', 'address': '408 W 23rd St, Austin, TX 78705, USA'}
db.austindb.insert(location);
location = {loc : [ -97.727476899999999, 30.259702000000001 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care East Austin', 'address': 'East Austin Neighborhood Center, 211 Comal St, Austin, TX 78702", USA'}
db.austindb.insert(location);
location = {loc : [ -97.713483199999999, 30.305188399999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care David Powell', 'address': '4614 Interstate 35 Frontage Rd, Austin, TX 78751, USA'}
db.austindb.insert(location);
location = {loc : [ -97.737620000000007, 30.267955000000001 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care Austin Resource Center For The Homeless', 'address': '500 E 7th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.778486000000001, 30.227219399999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Ben White Health Clinic', 'address': 'Machen Robert L Dds, 1221 W Ben White Blvd, Austin, TX 78704", USA'}
db.austindb.insert(location);
location = {loc : [ -97.732953199999997, 30.2599199 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Travis County Healthcare District', 'address': '1111 E Cesar Chavez St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -71.955223399999994, 42.376135400000003 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': '', 'shelter': 'Y', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Safe Place', 'address': 'Safe Place, 285 Main St, Rutland, MA 01543", USA'}
db.austindb.insert(location);
location = {loc : [ -97.7618188, 30.255495499999999 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Austin Stress Clinic', 'address': '1002-1108 S Lamar Blvd, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.4866423, 35.444530800000003 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': , 'address': '1983-2227 S I-35 Service Rd, Oklahoma City, OK 73129, USA'}
db.austindb.insert(location);
location = {loc : [ -97.802170599999997, 30.199120499999999 ], 'mental_health': 'Y', 'med_service': 'Y', 'food': 'Y', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'El Buen Samaritano Episcopal Mission', 'address': '7000 Woodhue Dr, Austin, TX 78745, USA'}
db.austindb.insert(location);
location = {loc : [ -97.675409400000007, 30.404841099999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': , 'address': '12520 N IH 35, Austin, TX 78753, USA'}
db.austindb.insert(location);
location = {loc : [ -97.778486000000001, 30.227219399999999 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Behavioral Health at Ben White Health Clinic', 'address': 'Machen Robert L Dds, 1221 W Ben White Blvd, Austin, TX 78704", USA'}
db.austindb.insert(location);
location = {loc : [ -97.733926699999998, 30.274405600000001 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Brackenridge Specialty Clinic', 'address': '601 E 15th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.733711999999997, 30.272894999999998 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Blackstock Family Health Center', 'address': '1313 Red River St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': 'Y', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Paul Bass Clinic', 'address': 'Heart Smart CPR, 152 Hoot Owl Ln N, Leander, TX 78641", USA'}
db.austindb.insert(location);
location = {loc : [ -95.646563, 29.7278673 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'Austin Lakes Hospital - Inpatient', 'address': '32, Houston, TX 77082, USA'}
db.austindb.insert(location);
location = {loc : [ -97.719537000000003, 30.296693699999999 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Austin Lakes Hospital - Outpatient', 'address': 'William E. Mccaleb, MD, 1009 E 40th St, Austin", TX 78751, USA'},
db.austindb.insert(location);
location = {loc : [ -97.733926699999998, 30.274405600000001 ], 'mental_health': 'Y', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': 'University Medical Center Brackenridge', 'address': '601 E 15th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.7456739, 30.298745 ], 'mental_health': 'Y', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': , 'address': '900 W 30th St, Austin, TX 78705, USA'}
db.austindb.insert(location);
location = {loc : [ -97.774368100000004, 30.227244200000001 ], 'mental_health': 'Y', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'Y', 'med_facility': 'N', 'title': , 'address': '901 W Ben White Blvd, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.742783000000003, 30.276062 ], 'mental_health': '-', 'med_service': '-', 'food': 'N - breakfast ', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'First United Methodist Church and Foundation for the Homeless ', 'address': '1300 Lavaca St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.737920900000006, 30.2674056 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Caritas of Austin', 'address': '611 Neches St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.7399901, 30.268265499999998 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Trinity Center ', 'address': '304 E 7th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.736924400000007, 30.2683207 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': 'Y', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Salvation Army', 'address': '501 E 8th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.712749599999995, 30.308918800000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': 'Y- dental clinic', 'title': 'Manos de Cristo', 'address': '4911 Harmon Ave, Austin, TX 78751, USA'}
db.austindb.insert(location);
location = {loc : [ -97.845413899999997, 30.217801900000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Abiding Love Lutheran Church', 'address': '7210 Brush Country Rd, Austin, TX 78749, USA'}
db.austindb.insert(location);
location = {loc : [ -97.682717999999994, 30.296621999999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Alpha Seventh Day Adventist', 'address': 'Alpha Seventh-Day Adventist, 3016 E 51st St, Austin, TX 78723", USA'}
db.austindb.insert(location);
location = {loc : [ -97.707926999999998, 30.344186000000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Austin First Seveneth Day Adventist Chruch', 'address': 'Austin Adventist Jr Academy, 301 W Anderson Ln, Austin, TX 78752", USA'}
db.austindb.insert(location);
location = {loc : [ -97.693098300000003, 30.331810999999998 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'St. John Community Center', 'address': '7500-7598 Blessing Ave, Austin, TX 78752, USA'}
db.austindb.insert(location);
location = {loc : [ -97.784239200000002, 30.1760631 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Capital Area Food Bank', 'address': '8201 S Congress Ave, Austin, TX 78745, USA'}
db.austindb.insert(location);
location = {loc : [ -97.775845599999997, 30.249256200000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Community of Christ', 'address': '2132 Bluebonnet Ln, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.7322068, 30.448119800000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Dolores Catholic Church', 'address': '5720 McNeil Dr, Austin, TX 78729, USA'}
db.austindb.insert(location);
location = {loc : [ -97.693916000000002, 30.312491000000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Eastside Community Connection', 'address': '5810 Berkman Dr, Austin, TX 78723, USA'}
db.austindb.insert(location);
location = {loc : [ -84.296052900000007', '33.352277999999998 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Hope Lutheran Church', 'address': '4852-4898 Bear Creek Blvd, Hampton, GA 30228, USA'}
db.austindb.insert(location);
location = {loc : [ -97.732231999999996, 30.302424999999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Hype Park Baptist Church/Church Under the Bridge ', 'address': '3901 Speedway, Austin, TX 78751, USA'}
db.austindb.insert(location);
location = {loc : [ -97.761820999999998, 30.220492 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'La Iglesia Del Senor', 'address': '209 E Ben White Blvd, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.699969899999999, 30.353883 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': , 'address': '202 W Elliott St, Austin, TX 78753, USA'}
db.austindb.insert(location);
location = {loc : [ -97.706166899999999, 30.254745499999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Melas on Wheels and More H.O.P.E. Food Pantries', 'address': '3227 E 5th St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.727554999999995, 30.267005000000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Our Lady of Guadalupe Catholic Church', 'address': '1206 E 9th St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.708208999999997, 30.362287999999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Principe de Paz', 'address': '1204 Payton Gin Rd, Austin, TX 78758, USA'}
db.austindb.insert(location);
location = {loc : [ -97.601757199999994, 30.252694000000002 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'River Road Baptist Church', 'address': '12825-13103 Farm to Market 969, Austin, TX 78724, USA'}
db.austindb.insert(location);
location = {loc : [ -97.719566900000004, 30.269553200000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Rosewood Avenue Missionary Baptist Church', 'address': '1807 Rosewood Ave, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.736924400000007, 30.2683207 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': 'Y', 'title': 'Salvation Army', 'address': '501 E 8th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.671834099999998, 30.282588499999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': 'Y', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': , 'address': '4413 Tannehill Ln, Austin, TX 78721, USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': '-', 'med_service': '-', 'food': '', 'private': 'Y', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'SafePlace', 'address': 'Heart Smart CPR, 152 Hoot Owl Ln N, Leander, TX 78641", USA'}
db.austindb.insert(location);
location = {loc : [ -97.701123699999997, 30.259935299999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': 'Y', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Casa Marianella', 'address': '821 Gunter St, Austin, TX 78702, USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': '-', 'med_service': '-', 'food': '', 'private': 'Y', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Posada Esperanza', 'address': 'Heart Smart CPR, 152 Hoot Owl Ln N, Leander, TX 78641", USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': 'Y', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Foundation for the Homeless', 'address': 'Heart Smart CPR, 152 Hoot Owl Ln N, Leander, TX 78641", USA'}
db.austindb.insert(location);
location = {loc : [ -97.767988399999993, 30.228786899999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Lifeworks', 'address': '3700 S 1st St, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.764717000000005, 30.2342333 ], 'mental_health': '-', 'med_service': '-', 'food': '-', 'private': 'Y', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Foundation Communities', 'address': '3036 S 1st St, Austin, TX 78704, USA'}
db.austindb.insert(location);
location = {loc : [ -97.7416068, 30.323559400000001 ], 'mental_health': '-', 'med_service': '-', 'food': '-', 'private': 'Y', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Family Eldercare', 'address': '2210 Hancock Dr, Austin, TX 78756, USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': '-', 'med_service': '-', 'food': '-', 'private': 'Y', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'St. Louise House', 'address': 'Heart Smart CPR, 152 Hoot Owl Ln N, Leander, TX 78641", USA'}
db.austindb.insert(location);
location = {loc : [ -97.7399901, 30.268265499999998 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Trinity Center ', 'address': '304 E 7th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
location = {loc : [ -97.722403900000003, 30.280997200000002 ], 'mental_health': '-', 'med_service': '-', 'food': '-', 'private': 'Y', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '-', 'title': 'Blackland Communtiy Development ', 'address': '2005 Salina St, Austin, TX 78722, USA'}
db.austindb.insert(location);
location = {loc : [ -97.698698399999998, 30.333442300000002 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Community care AK Black', 'address': '928 Blackson Ave, Austin, TX 78752, USA'}
db.austindb.insert(location);
location = {loc : [ -97.711173000000002, 30.283055000000001 ], 'mental_health': 'Y', 'med_service': 'N', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'Capital Area Mental Health', 'address': '2824 Real St, Austin, TX 78722, USA'}
db.austindb.insert(location);
location = {loc : [ -97.8631946, 30.228828499999999 ], 'mental_health': 'N', 'med_service': 'Y', 'food': 'N', 'private': 'N', 'shelter': 'N', 'subst_abuse_service': 'N', 'med_facility': 'N', 'title': 'NextCare Urgent Care', 'address': '6001 W William Cannon Dr, Austin, TX 78749, USA'}
db.austindb.insert(location);
location = {loc : [ -97.762900000000002, 30.436649500000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Gateway Community Church', 'address': '7104 McNeil Dr, Austin, TX 78729, USA'}
db.austindb.insert(location);
location = {loc : [ -97.742580000000004, 30.285502000000001 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'Micah 6 Food Pantries', 'address': 'University Presbyterian Church, 2203 San Antonio St, Austin, TX 78705", USA'}
db.austindb.insert(location);
location = {loc : [ -97.710788300000004, 30.375915899999999 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '-', 'shelter': '-', 'subst_abuse_service': '-', 'med_facility': '', 'title': 'North Austin Christian Church ', 'address': '1734 Rutland Dr, Austin, TX 78758, USA'}
db.austindb.insert(location);
location = {loc : [ -97.743060799999995, 30.267153 ], 'mental_health': '-', 'med_service': '-', 'food': 'Y', 'private': '', 'shelter': 'Y', 'subst_abuse_service': '-', 'med_facility': 'Y - there is a separate CommUnity Care health Clinic', 'title': 'Austin Resource Center for the Homeless (ARCH) - managed by Front Steps ', 'address': '500 E 7th St, Austin, TX 78701, USA'}
db.austindb.insert(location);
| 162.052356
| 374
| 0.655111
| 3,937
| 30,952
| 5.029718
| 0.153924
| 0.058176
| 0.081557
| 0.091152
| 0.715483
| 0.710686
| 0.707201
| 0.703515
| 0.671245
| 0.669427
| 0
| 0.138404
| 0.115986
| 30,952
| 191
| 375
| 162.052356
| 0.585301
| 0
| 0
| 0.5
| 0
| 0
| 0.505008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1d857b7a050e752f38249ab3964c756e0e4c6ecd
| 41
|
py
|
Python
|
accounts/admin.py
|
sahilr05/sitechecker
|
9c5d99109cfe2cbf46eb9f3cb397fc0f3505f039
|
[
"MIT"
] | 2
|
2021-02-21T09:24:54.000Z
|
2021-05-12T18:42:37.000Z
|
accounts/admin.py
|
sahilr05/sitechecker
|
9c5d99109cfe2cbf46eb9f3cb397fc0f3505f039
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
sahilr05/sitechecker
|
9c5d99109cfe2cbf46eb9f3cb397fc0f3505f039
|
[
"MIT"
] | null | null | null |
from django.contrib import admin # NOQA
| 20.5
| 40
| 0.780488
| 6
| 41
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 41
| 1
| 41
| 41
| 0.941176
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d54c6db149f6401a788e181c4dc27753be8b1fc7
| 2,967
|
py
|
Python
|
sdk/python/pulumi_azure_native/web/v20150801/__init__.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/v20150801/__init__.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/v20150801/__init__.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from ... import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .certificate import *
from .certificate_csr import *
from .get_certificate import *
from .get_certificate_csr import *
from .get_hosting_environment import *
from .get_managed_hosting_environment import *
from .get_server_farm import *
from .get_site import *
from .get_site_deployment import *
from .get_site_deployment_slot import *
from .get_site_host_name_binding import *
from .get_site_host_name_binding_slot import *
from .get_site_instance_deployment import *
from .get_site_instance_deployment_slot import *
from .get_site_logs_config import *
from .get_site_relay_service_connection import *
from .get_site_relay_service_connection_slot import *
from .get_site_slot import *
from .get_site_slot_config_names import *
from .get_site_source_control import *
from .get_site_source_control_slot import *
from .get_site_vnet_connection import *
from .get_site_vnet_connection_slot import *
from .hosting_environment import *
from .list_site_app_settings import *
from .list_site_app_settings_slot import *
from .list_site_auth_settings import *
from .list_site_auth_settings_slot import *
from .list_site_backup_configuration import *
from .list_site_backup_configuration_slot import *
from .list_site_backup_status_secrets import *
from .list_site_backup_status_secrets_slot import *
from .list_site_connection_strings import *
from .list_site_connection_strings_slot import *
from .list_site_metadata import *
from .list_site_metadata_slot import *
from .list_site_publishing_credentials import *
from .list_site_publishing_credentials_slot import *
from .managed_hosting_environment import *
from .server_farm import *
from .server_farm_route_for_vnet import *
from .site import *
from .site_app_settings import *
from .site_app_settings_slot import *
from .site_auth_settings import *
from .site_auth_settings_slot import *
from .site_backup_configuration import *
from .site_backup_configuration_slot import *
from .site_connection_strings import *
from .site_connection_strings_slot import *
from .site_deployment import *
from .site_deployment_slot import *
from .site_host_name_binding import *
from .site_host_name_binding_slot import *
from .site_instance_deployment import *
from .site_instance_deployment_slot import *
from .site_logs_config import *
from .site_metadata import *
from .site_metadata_slot import *
from .site_relay_service_connection import *
from .site_relay_service_connection_slot import *
from .site_slot import *
from .site_slot_config_names import *
from .site_source_control import *
from .site_source_control_slot import *
from .site_vnet_connection import *
from .site_vnet_connection_slot import *
from ._inputs import *
from . import outputs
| 38.038462
| 80
| 0.834176
| 433
| 2,967
| 5.286374
| 0.17552
| 0.301442
| 0.159021
| 0.118829
| 0.810398
| 0.570118
| 0.138488
| 0
| 0
| 0
| 0
| 0.000379
| 0.109875
| 2,967
| 77
| 81
| 38.532468
| 0.866339
| 0.068419
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d58a3c01fcf5f3a56967acb56a0aa33122e205fa
| 78
|
py
|
Python
|
nordestao/tests/fixtures.py
|
intelivix/pyne-workshop-scraping-web
|
c0696b669934eef2dbda81da3b7c058810041fa5
|
[
"MIT"
] | null | null | null |
nordestao/tests/fixtures.py
|
intelivix/pyne-workshop-scraping-web
|
c0696b669934eef2dbda81da3b7c058810041fa5
|
[
"MIT"
] | null | null | null |
nordestao/tests/fixtures.py
|
intelivix/pyne-workshop-scraping-web
|
c0696b669934eef2dbda81da3b7c058810041fa5
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture
def fixture_exemplo():
return 'not none'
| 9.75
| 22
| 0.717949
| 10
| 78
| 5.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 78
| 7
| 23
| 11.142857
| 0.873016
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
638c0be826b6716fae5d37c58132d62b7412ca00
| 151
|
py
|
Python
|
unecorn/admin.py
|
anast239/unecorn
|
8a874f8cced013668dd29398176a8fb60cc04057
|
[
"MIT"
] | null | null | null |
unecorn/admin.py
|
anast239/unecorn
|
8a874f8cced013668dd29398176a8fb60cc04057
|
[
"MIT"
] | null | null | null |
unecorn/admin.py
|
anast239/unecorn
|
8a874f8cced013668dd29398176a8fb60cc04057
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from unecorn.models import *
admin.site.register(Discount)
admin.site.register(Category)
admin.site.register(Company)
| 25.166667
| 32
| 0.827815
| 21
| 151
| 5.952381
| 0.571429
| 0.216
| 0.408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072848
| 151
| 6
| 33
| 25.166667
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
63a01e092685c700f3b8dba69aaebf5daf1ae324
| 67
|
py
|
Python
|
supplychainpy/reporting/blueprints/contact/templates/__init__.py
|
luisccalves/supplychainpy
|
63a10b77ffdcc5bca71e815c70667c819d8f9af0
|
[
"BSD-3-Clause"
] | 231
|
2016-05-30T02:34:45.000Z
|
2022-03-28T17:00:29.000Z
|
supplychainpy/reporting/blueprints/contact/templates/__init__.py
|
luisccalves/supplychainpy
|
63a10b77ffdcc5bca71e815c70667c819d8f9af0
|
[
"BSD-3-Clause"
] | 77
|
2016-03-23T16:28:34.000Z
|
2021-09-30T22:08:03.000Z
|
supplychainpy/reporting/blueprints/contact/templates/__init__.py
|
luisccalves/supplychainpy
|
63a10b77ffdcc5bca71e815c70667c819d8f9af0
|
[
"BSD-3-Clause"
] | 103
|
2016-08-10T19:53:09.000Z
|
2022-03-16T16:34:38.000Z
|
from supplychainpy.reporting.blueprints.contact.views import about
| 33.5
| 66
| 0.880597
| 8
| 67
| 7.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 67
| 1
| 67
| 67
| 0.936508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
63a73b69fb793b77e0ad6523894415a1d427ed75
| 172
|
py
|
Python
|
any_response_redirect_middleware/tests/__init__.py
|
hzdg/django-any-response-redirect-middleware
|
1b832a35b14566b0c7aa99e17cfcdbe4835a269c
|
[
"MIT"
] | null | null | null |
any_response_redirect_middleware/tests/__init__.py
|
hzdg/django-any-response-redirect-middleware
|
1b832a35b14566b0c7aa99e17cfcdbe4835a269c
|
[
"MIT"
] | null | null | null |
any_response_redirect_middleware/tests/__init__.py
|
hzdg/django-any-response-redirect-middleware
|
1b832a35b14566b0c7aa99e17cfcdbe4835a269c
|
[
"MIT"
] | null | null | null |
from .basic import BasicRedirectMiddlewareTests
from .django_cms import RedirectMiddlewareTestsWithDjangoCMS
from .wagtail_cms import RedirectMiddlewareTestsWithWagtailCMS
| 43
| 62
| 0.912791
| 14
| 172
| 11.071429
| 0.642857
| 0.116129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 172
| 3
| 63
| 57.333333
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
63ba13266acfe77823f64100997d3843e282a41e
| 9,342
|
py
|
Python
|
features_extractions.py
|
AIKICo/Steganalysis-By-Frame
|
c2e1a20664056eb723c694949119a26f7fb6cfbc
|
[
"Apache-2.0"
] | 1
|
2019-03-25T07:20:05.000Z
|
2019-03-25T07:20:05.000Z
|
features_extractions.py
|
MohMehrnia/Steganalysis-By-Frame
|
c2e1a20664056eb723c694949119a26f7fb6cfbc
|
[
"Apache-2.0"
] | 1
|
2020-01-29T07:12:02.000Z
|
2020-01-29T07:12:02.000Z
|
features_extractions.py
|
MohMehrnia/Steganalysis-By-Frame
|
c2e1a20664056eb723c694949119a26f7fb6cfbc
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import math as ma
import os
from pywt import wavedec
from pyeeg import hfd, pfd
from scipy.io import wavfile as wav
from python_speech_features.sigproc import framesig
from python_speech_features import mfcc, fbank, logfbank
from python_speech_features.base import delta
from pandas import DataFrame
from audiolazy.lazy_lpc import lpc
def katz(data, n):
L = np.hypot(np.diff(data), 1).sum()
d = np.hypot(data - data[0], np.arange(len(data))).max()
return ma.log10(n) / (ma.log10(d/L) + ma.log10(n))
def get_fractalfeatues(path, csv, label):
root, dirs, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
rate, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
for i in range(2,12):
frame_features.append(hfd(frame,i))
for i in range(2,12):
frame_features.append(katz(frame,i))
for i in range(2, 12):
frame_features.append(pfd(frame, frame+i))
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
def get_mfccfeatues(path, csv, label):
root, dirs, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
rate, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
mfcc_features = mfcc(frame, rate, numcep=30)
for i in range(0, len(mfcc_features[1])):
frame_features.append(mfcc_features[1][i])
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
print(np.asarray(features_vector).shape)
def get_deltamfccfeatues(path, csv, label):
root, dirs, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
rate, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
mfcc_features = mfcc(frame, rate, numcep=30)
mfcc_features = delta(mfcc_features, 33)
for i in range(0, len(mfcc_features[1])):
frame_features.append(mfcc_features[1][i])
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
print(np.asarray(features_vector).shape)
def get_fbankfeatues(path, csv, label):
root, dirs, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
rate, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
FBank= fbank(frame, rate)
for i in range(0, len(FBank[0][1])):
frame_features.append(FBank[0][1][i])
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
print(np.asarray(features_vector).shape)
def get_logfbankfeatues(path, csv, label):
root, dirs, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
rate, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
LogFBank = logfbank(frame, rate)
for i in range(0, len(LogFBank[0])):
frame_features.append(LogFBank[0][i])
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
print(np.asarray(features_vector).shape)
def get_wavelet(path, csv, label):
root, _, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
_, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
(cA, cD5, cD4, cD3, cD2, cD1) = wavedec(frame,'db1', level=5)
for i in range(0, len(cA)):
frame_features.append(cA[i])
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
print(np.asarray(features_vector).shape)
def get_lpc(path, csv, label):
root, _, files = next(os.walk(path))
fractal_features =[]
features_vector = DataFrame()
audio_counter = 1
for file in files:
_, data = wav.read(root + '\\' + file)
frames = framesig(data, 1000, 500)
for frame in frames:
frame_features = []
analysis_filt = lpc.covar(frame, 5)
residual = list(analysis_filt(frame))
for i in range(0, 32):
frame_features.append(residual[i])
frame_features.append(label)
fractal_features.append(frame_features)
print(str(audio_counter) + '==>' + file)
audio_counter += 1
features_vector = DataFrame(fractal_features)
features_vector.to_csv(csv, index=False, header=False, mode='a')
print(np.asarray(features_vector).shape)
if __name__ == '__main__':
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Normal',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-7.csv', 0)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Normal',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-21.csv', 0)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Normal',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-42.csv', 0)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Normal',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-71.csv', 0)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Normal',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-100.csv', 0)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\NoiseData\\StegHide\\7',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-7.csv', 1)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\NoiseData\\StegHide\\21',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-21.csv', 1)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\NoiseData\\StegHide\\42',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-42.csv', 1)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\NoiseData\\StegHide\\71',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-71.csv', 1)
get_wavelet('D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\NoiseData\\StegHide\\100',
'D:\\MySourceCodes\\Projects-Python\\Steganalysis-By-Frame\\SteganalysisDatasets\\Dataset\Fractal\\noisywavlet-Features-steghide-100.csv', 1)
| 45.130435
| 165
| 0.652644
| 1,104
| 9,342
| 5.384964
| 0.115036
| 0.065601
| 0.074012
| 0.094197
| 0.866274
| 0.864256
| 0.861733
| 0.861733
| 0.853659
| 0.848108
| 0
| 0.020759
| 0.216228
| 9,342
| 206
| 166
| 45.349515
| 0.791177
| 0
| 0
| 0.694444
| 0
| 0.083333
| 0.251392
| 0.245717
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0
| 0.061111
| 0
| 0.111111
| 0.072222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8934c9215b91bf26ad0e501197779b64c284834c
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_poppy/na_poppy_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_poppy/na_poppy_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_poppy/na_poppy_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Poppy_Jng_Aatrox(Ratings):
pass
class NA_Poppy_Jng_Ahri(Ratings):
pass
class NA_Poppy_Jng_Akali(Ratings):
pass
class NA_Poppy_Jng_Alistar(Ratings):
pass
class NA_Poppy_Jng_Amumu(Ratings):
pass
class NA_Poppy_Jng_Anivia(Ratings):
pass
class NA_Poppy_Jng_Annie(Ratings):
pass
class NA_Poppy_Jng_Ashe(Ratings):
pass
class NA_Poppy_Jng_AurelionSol(Ratings):
pass
class NA_Poppy_Jng_Azir(Ratings):
pass
class NA_Poppy_Jng_Bard(Ratings):
pass
class NA_Poppy_Jng_Blitzcrank(Ratings):
pass
class NA_Poppy_Jng_Brand(Ratings):
pass
class NA_Poppy_Jng_Braum(Ratings):
pass
class NA_Poppy_Jng_Caitlyn(Ratings):
pass
class NA_Poppy_Jng_Camille(Ratings):
pass
class NA_Poppy_Jng_Cassiopeia(Ratings):
pass
class NA_Poppy_Jng_Chogath(Ratings):
pass
class NA_Poppy_Jng_Corki(Ratings):
pass
class NA_Poppy_Jng_Darius(Ratings):
pass
class NA_Poppy_Jng_Diana(Ratings):
pass
class NA_Poppy_Jng_Draven(Ratings):
pass
class NA_Poppy_Jng_DrMundo(Ratings):
pass
class NA_Poppy_Jng_Ekko(Ratings):
pass
class NA_Poppy_Jng_Elise(Ratings):
pass
class NA_Poppy_Jng_Evelynn(Ratings):
pass
class NA_Poppy_Jng_Ezreal(Ratings):
pass
class NA_Poppy_Jng_Fiddlesticks(Ratings):
pass
class NA_Poppy_Jng_Fiora(Ratings):
pass
class NA_Poppy_Jng_Fizz(Ratings):
pass
class NA_Poppy_Jng_Galio(Ratings):
pass
class NA_Poppy_Jng_Gangplank(Ratings):
pass
class NA_Poppy_Jng_Garen(Ratings):
pass
class NA_Poppy_Jng_Gnar(Ratings):
pass
class NA_Poppy_Jng_Gragas(Ratings):
pass
class NA_Poppy_Jng_Graves(Ratings):
pass
class NA_Poppy_Jng_Hecarim(Ratings):
pass
class NA_Poppy_Jng_Heimerdinger(Ratings):
pass
class NA_Poppy_Jng_Illaoi(Ratings):
pass
class NA_Poppy_Jng_Irelia(Ratings):
pass
class NA_Poppy_Jng_Ivern(Ratings):
pass
class NA_Poppy_Jng_Janna(Ratings):
pass
class NA_Poppy_Jng_JarvanIV(Ratings):
pass
class NA_Poppy_Jng_Jax(Ratings):
pass
class NA_Poppy_Jng_Jayce(Ratings):
pass
class NA_Poppy_Jng_Jhin(Ratings):
pass
class NA_Poppy_Jng_Jinx(Ratings):
pass
class NA_Poppy_Jng_Kalista(Ratings):
pass
class NA_Poppy_Jng_Karma(Ratings):
pass
class NA_Poppy_Jng_Karthus(Ratings):
pass
class NA_Poppy_Jng_Kassadin(Ratings):
pass
class NA_Poppy_Jng_Katarina(Ratings):
pass
class NA_Poppy_Jng_Kayle(Ratings):
pass
class NA_Poppy_Jng_Kayn(Ratings):
pass
class NA_Poppy_Jng_Kennen(Ratings):
pass
class NA_Poppy_Jng_Khazix(Ratings):
pass
class NA_Poppy_Jng_Kindred(Ratings):
pass
class NA_Poppy_Jng_Kled(Ratings):
pass
class NA_Poppy_Jng_KogMaw(Ratings):
pass
class NA_Poppy_Jng_Leblanc(Ratings):
pass
class NA_Poppy_Jng_LeeSin(Ratings):
pass
class NA_Poppy_Jng_Leona(Ratings):
pass
class NA_Poppy_Jng_Lissandra(Ratings):
pass
class NA_Poppy_Jng_Lucian(Ratings):
pass
class NA_Poppy_Jng_Lulu(Ratings):
pass
class NA_Poppy_Jng_Lux(Ratings):
pass
class NA_Poppy_Jng_Malphite(Ratings):
pass
class NA_Poppy_Jng_Malzahar(Ratings):
pass
class NA_Poppy_Jng_Maokai(Ratings):
pass
class NA_Poppy_Jng_MasterYi(Ratings):
pass
class NA_Poppy_Jng_MissFortune(Ratings):
pass
class NA_Poppy_Jng_MonkeyKing(Ratings):
pass
class NA_Poppy_Jng_Mordekaiser(Ratings):
pass
class NA_Poppy_Jng_Morgana(Ratings):
pass
class NA_Poppy_Jng_Nami(Ratings):
pass
class NA_Poppy_Jng_Nasus(Ratings):
pass
class NA_Poppy_Jng_Nautilus(Ratings):
pass
class NA_Poppy_Jng_Nidalee(Ratings):
pass
class NA_Poppy_Jng_Nocturne(Ratings):
pass
class NA_Poppy_Jng_Nunu(Ratings):
pass
class NA_Poppy_Jng_Olaf(Ratings):
pass
class NA_Poppy_Jng_Orianna(Ratings):
pass
class NA_Poppy_Jng_Ornn(Ratings):
pass
class NA_Poppy_Jng_Pantheon(Ratings):
pass
class NA_Poppy_Jng_Poppy(Ratings):
pass
class NA_Poppy_Jng_Quinn(Ratings):
pass
class NA_Poppy_Jng_Rakan(Ratings):
pass
class NA_Poppy_Jng_Rammus(Ratings):
pass
class NA_Poppy_Jng_RekSai(Ratings):
pass
class NA_Poppy_Jng_Renekton(Ratings):
pass
class NA_Poppy_Jng_Rengar(Ratings):
pass
class NA_Poppy_Jng_Riven(Ratings):
pass
class NA_Poppy_Jng_Rumble(Ratings):
pass
class NA_Poppy_Jng_Ryze(Ratings):
pass
class NA_Poppy_Jng_Sejuani(Ratings):
pass
class NA_Poppy_Jng_Shaco(Ratings):
pass
class NA_Poppy_Jng_Shen(Ratings):
pass
class NA_Poppy_Jng_Shyvana(Ratings):
pass
class NA_Poppy_Jng_Singed(Ratings):
pass
class NA_Poppy_Jng_Sion(Ratings):
pass
class NA_Poppy_Jng_Sivir(Ratings):
pass
class NA_Poppy_Jng_Skarner(Ratings):
pass
class NA_Poppy_Jng_Sona(Ratings):
pass
class NA_Poppy_Jng_Soraka(Ratings):
pass
class NA_Poppy_Jng_Swain(Ratings):
pass
class NA_Poppy_Jng_Syndra(Ratings):
pass
class NA_Poppy_Jng_TahmKench(Ratings):
pass
class NA_Poppy_Jng_Taliyah(Ratings):
pass
class NA_Poppy_Jng_Talon(Ratings):
pass
class NA_Poppy_Jng_Taric(Ratings):
pass
class NA_Poppy_Jng_Teemo(Ratings):
pass
class NA_Poppy_Jng_Thresh(Ratings):
pass
class NA_Poppy_Jng_Tristana(Ratings):
pass
class NA_Poppy_Jng_Trundle(Ratings):
pass
class NA_Poppy_Jng_Tryndamere(Ratings):
pass
class NA_Poppy_Jng_TwistedFate(Ratings):
pass
class NA_Poppy_Jng_Twitch(Ratings):
pass
class NA_Poppy_Jng_Udyr(Ratings):
pass
class NA_Poppy_Jng_Urgot(Ratings):
pass
class NA_Poppy_Jng_Varus(Ratings):
pass
class NA_Poppy_Jng_Vayne(Ratings):
pass
class NA_Poppy_Jng_Veigar(Ratings):
pass
class NA_Poppy_Jng_Velkoz(Ratings):
pass
class NA_Poppy_Jng_Vi(Ratings):
pass
class NA_Poppy_Jng_Viktor(Ratings):
pass
class NA_Poppy_Jng_Vladimir(Ratings):
pass
class NA_Poppy_Jng_Volibear(Ratings):
pass
class NA_Poppy_Jng_Warwick(Ratings):
pass
class NA_Poppy_Jng_Xayah(Ratings):
pass
class NA_Poppy_Jng_Xerath(Ratings):
pass
class NA_Poppy_Jng_XinZhao(Ratings):
pass
class NA_Poppy_Jng_Yasuo(Ratings):
pass
class NA_Poppy_Jng_Yorick(Ratings):
pass
class NA_Poppy_Jng_Zac(Ratings):
pass
class NA_Poppy_Jng_Zed(Ratings):
pass
class NA_Poppy_Jng_Ziggs(Ratings):
pass
class NA_Poppy_Jng_Zilean(Ratings):
pass
class NA_Poppy_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
897a254918d1fb162762d25214144081e7c6e2d7
| 39
|
py
|
Python
|
androidframer/__init__.py
|
abc22413/androidframer
|
67cb721964f1a27ecbda5b6e84ccb49e75b0c458
|
[
"Apache-2.0"
] | 80
|
2019-08-03T12:26:22.000Z
|
2020-06-13T11:43:55.000Z
|
androidframer/__init__.py
|
abc22413/androidframer
|
67cb721964f1a27ecbda5b6e84ccb49e75b0c458
|
[
"Apache-2.0"
] | 7
|
2019-08-03T09:50:28.000Z
|
2020-07-01T17:10:10.000Z
|
androidframer/__init__.py
|
abc22413/androidframer
|
67cb721964f1a27ecbda5b6e84ccb49e75b0c458
|
[
"Apache-2.0"
] | 7
|
2020-07-06T11:27:08.000Z
|
2021-02-24T11:45:47.000Z
|
from androidframer.framer import Framer
| 39
| 39
| 0.897436
| 5
| 39
| 7
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9846cb63a0389b2e69e89ec1122292cd314266af
| 43
|
py
|
Python
|
pykutils/__init__.py
|
pyokagan/pykutils
|
339a4e4db12f0ce44fd5ca3a94d385dbae7f4c34
|
[
"MIT"
] | null | null | null |
pykutils/__init__.py
|
pyokagan/pykutils
|
339a4e4db12f0ce44fd5ca3a94d385dbae7f4c34
|
[
"MIT"
] | null | null | null |
pykutils/__init__.py
|
pyokagan/pykutils
|
339a4e4db12f0ce44fd5ca3a94d385dbae7f4c34
|
[
"MIT"
] | null | null | null |
"pyokagan's personal utilities collection"
| 21.5
| 42
| 0.837209
| 5
| 43
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0.930233
| 0
| 0
| 0
| 0
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9846d7a929dfc7206aaef3926e2eb249dc72eda7
| 30
|
py
|
Python
|
notebooks/utils/tui/__init__.py
|
k4t0mono/ipln
|
ba71860bc38df52780903f647fb2404c61a6b3f2
|
[
"BSD-2-Clause"
] | 1
|
2021-03-15T11:53:40.000Z
|
2021-03-15T11:53:40.000Z
|
python/progress/__init__.py
|
pedromxavier/cookbook
|
243532f893651c34e70fbba8a52f3f129dbc8dd3
|
[
"MIT"
] | 2
|
2020-03-24T17:06:03.000Z
|
2020-03-31T02:16:40.000Z
|
python/progress/__init__.py
|
pedromxavier/cookbook
|
243532f893651c34e70fbba8a52f3f129dbc8dd3
|
[
"MIT"
] | null | null | null |
from .progress import Progress
| 30
| 30
| 0.866667
| 4
| 30
| 6.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
985c54343672eceee6a9d1dd94eac7c468981170
| 42
|
py
|
Python
|
secrets/__init__.py
|
kskewes-sf/spinnakerbot
|
32a7ad75a0c12e09498d24fe0cc8dd1b810bd6f7
|
[
"Apache-2.0"
] | 6
|
2020-03-18T10:33:53.000Z
|
2021-05-02T01:58:10.000Z
|
secrets/__init__.py
|
kskewes-sf/spinnakerbot
|
32a7ad75a0c12e09498d24fe0cc8dd1b810bd6f7
|
[
"Apache-2.0"
] | 8
|
2020-03-28T23:01:00.000Z
|
2020-12-01T06:52:00.000Z
|
secrets/__init__.py
|
kskewes-sf/spinnakerbot
|
32a7ad75a0c12e09498d24fe0cc8dd1b810bd6f7
|
[
"Apache-2.0"
] | 10
|
2020-02-05T16:24:28.000Z
|
2022-03-17T21:37:36.000Z
|
from .gcp_secrets import GcpSecretsManager
| 42
| 42
| 0.904762
| 5
| 42
| 7.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7f3e63c1fb6018a2c9b775eebf7066a5b9766177
| 80
|
py
|
Python
|
2017-03-07-Introduction-to-Python/examples/11-dict.py
|
s3rvac/talks
|
469ea5d2d3d90527f77863b85746bbc2d7236cb1
|
[
"BSD-3-Clause"
] | 2
|
2019-05-15T06:42:32.000Z
|
2020-08-01T11:48:40.000Z
|
2017-03-07-Introduction-to-Python/examples/11-dict.py
|
s3rvac/talks
|
469ea5d2d3d90527f77863b85746bbc2d7236cb1
|
[
"BSD-3-Clause"
] | null | null | null |
2017-03-07-Introduction-to-Python/examples/11-dict.py
|
s3rvac/talks
|
469ea5d2d3d90527f77863b85746bbc2d7236cb1
|
[
"BSD-3-Clause"
] | 1
|
2017-03-28T21:14:37.000Z
|
2017-03-28T21:14:37.000Z
|
d = {
'John': 2.5,
'Paul': 1.5,
'Laura': 1
}
print(d['John']) # 2.5
| 11.428571
| 22
| 0.3875
| 14
| 80
| 2.214286
| 0.571429
| 0.322581
| 0.387097
| 0.451613
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 0.3125
| 80
| 6
| 23
| 13.333333
| 0.436364
| 0.0375
| 0
| 0
| 0
| 0
| 0.226667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7f761293833e609b7440a654c809853058049750
| 306
|
py
|
Python
|
Files/Exceptions.py
|
MPCodeWriter21/Member21
|
1db91e24a1e48ea8dbe0b59e0493a6e67a0acc70
|
[
"MIT"
] | 2
|
2021-01-31T04:11:20.000Z
|
2021-03-01T05:47:34.000Z
|
Files/Exceptions.py
|
MPCodeWriter21/Member21
|
1db91e24a1e48ea8dbe0b59e0493a6e67a0acc70
|
[
"MIT"
] | null | null | null |
Files/Exceptions.py
|
MPCodeWriter21/Member21
|
1db91e24a1e48ea8dbe0b59e0493a6e67a0acc70
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# Exceptions
# CodeWriter21
class APIConfigNotFoundException(Exception):
pass
class APIConfigDamagedException(Exception):
pass
class NoBotConfigFoundException(Exception):
pass
class NoBotEnabledConfigFoundException(Exception):
pass
class ButtonsNotSetException(Exception):
pass
| 17
| 50
| 0.833333
| 25
| 306
| 10.2
| 0.52
| 0.254902
| 0.282353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01083
| 0.094771
| 306
| 18
| 51
| 17
| 0.909747
| 0.133987
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
f6abd2bb70c830656d6dec419339541fc37671cc
| 97
|
py
|
Python
|
office365/sharepoint/lists/list_rule.py
|
rikeshtailor/Office365-REST-Python-Client
|
ca7bfa1b22212137bb4e984c0457632163e89a43
|
[
"MIT"
] | null | null | null |
office365/sharepoint/lists/list_rule.py
|
rikeshtailor/Office365-REST-Python-Client
|
ca7bfa1b22212137bb4e984c0457632163e89a43
|
[
"MIT"
] | null | null | null |
office365/sharepoint/lists/list_rule.py
|
rikeshtailor/Office365-REST-Python-Client
|
ca7bfa1b22212137bb4e984c0457632163e89a43
|
[
"MIT"
] | null | null | null |
from office365.sharepoint.base_entity import BaseEntity
class SPListRule(BaseEntity):
pass
| 16.166667
| 55
| 0.814433
| 11
| 97
| 7.090909
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.134021
| 97
| 5
| 56
| 19.4
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
f6e75ecad1ceab57bbff84d8d994897e463d2b9c
| 99
|
py
|
Python
|
tests/test_empty.py
|
ErikBjare/eeg-notebooks
|
73fd7ae7a723aafb41c36036da332b6e7e2a2b7b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_empty.py
|
ErikBjare/eeg-notebooks
|
73fd7ae7a723aafb41c36036da332b6e7e2a2b7b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_empty.py
|
ErikBjare/eeg-notebooks
|
73fd7ae7a723aafb41c36036da332b6e7e2a2b7b
|
[
"BSD-3-Clause"
] | null | null | null |
def test_empty():
# Empty test that only imports eegnb, to make `pytest` pass
import eegnb
| 24.75
| 63
| 0.69697
| 15
| 99
| 4.533333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.232323
| 99
| 3
| 64
| 33
| 0.894737
| 0.575758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
63fd4bd54b5d3ab935a4584d74cf20a2fb417b19
| 38
|
py
|
Python
|
tests/functional/modules/a_hidden_import/__init__.py
|
hawkhai/pyinstaller
|
016a24479b34de161792c72dde455a81ad4c78ae
|
[
"Apache-2.0"
] | 9,267
|
2015-01-01T04:08:45.000Z
|
2022-03-31T11:42:38.000Z
|
tests/functional/modules/a_hidden_import/__init__.py
|
hawkhai/pyinstaller
|
016a24479b34de161792c72dde455a81ad4c78ae
|
[
"Apache-2.0"
] | 5,150
|
2015-01-01T12:09:56.000Z
|
2022-03-31T18:06:12.000Z
|
tests/functional/modules/a_hidden_import/__init__.py
|
hawkhai/pyinstaller
|
016a24479b34de161792c72dde455a81ad4c78ae
|
[
"Apache-2.0"
] | 2,101
|
2015-01-03T10:25:27.000Z
|
2022-03-30T11:04:42.000Z
|
from . import submodule # noqa: F401
| 19
| 37
| 0.710526
| 5
| 38
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.210526
| 38
| 1
| 38
| 38
| 0.8
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
12447d579d2c9381064673a0bdaf590d555c5609
| 280
|
py
|
Python
|
pygarlic_validator/__init__.py
|
bybatkhuu/pygarlic_validator
|
8ea42545dbcede3faee553e02a5f7e560f9179c1
|
[
"MIT"
] | 1
|
2022-03-31T06:37:43.000Z
|
2022-03-31T06:37:43.000Z
|
pygarlic_validator/__init__.py
|
bybatkhuu/pygarlic_validator
|
8ea42545dbcede3faee553e02a5f7e560f9179c1
|
[
"MIT"
] | null | null | null |
pygarlic_validator/__init__.py
|
bybatkhuu/pygarlic_validator
|
8ea42545dbcede3faee553e02a5f7e560f9179c1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
try:
from pygarlic_validator.validator import validators, checkers, errors
from pygarlic_validator.__version__ import __version__
except ImportError:
from .validator import validators, checkers, errors
from .__version__ import __version__
| 31.111111
| 73
| 0.771429
| 30
| 280
| 6.6
| 0.466667
| 0.121212
| 0.212121
| 0.333333
| 0.434343
| 0.434343
| 0
| 0
| 0
| 0
| 0
| 0.004255
| 0.160714
| 280
| 8
| 74
| 35
| 0.838298
| 0.075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
125e4ce6af2e9208f22b7dc0cf1dfcc12caa2872
| 178
|
py
|
Python
|
src/__init__.py
|
mldev-ai/ML-Text-Classification
|
fc3d64aaa56796c4ed22b7842df66d59eb90b8ab
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
mldev-ai/ML-Text-Classification
|
fc3d64aaa56796c4ed22b7842df66d59eb90b8ab
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
mldev-ai/ML-Text-Classification
|
fc3d64aaa56796c4ed22b7842df66d59eb90b8ab
|
[
"MIT"
] | null | null | null |
from src.FeatureExtractor import Extractor
from src.DataLoader import Dataset
from src.Trainer import TrainClf
from src.Inference import PredictClf
from src.Models import MLModel
| 35.6
| 42
| 0.865169
| 25
| 178
| 6.16
| 0.52
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106742
| 178
| 5
| 43
| 35.6
| 0.968553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
12695b68be997681bd60160adfb2433e6ae6d592
| 363
|
py
|
Python
|
stko/calculators/results/rmsd_results.py
|
stevenbennett96/stko
|
ee340af4fc549d5a2c3e9cba8360661335efe0fd
|
[
"MIT"
] | null | null | null |
stko/calculators/results/rmsd_results.py
|
stevenbennett96/stko
|
ee340af4fc549d5a2c3e9cba8360661335efe0fd
|
[
"MIT"
] | null | null | null |
stko/calculators/results/rmsd_results.py
|
stevenbennett96/stko
|
ee340af4fc549d5a2c3e9cba8360661335efe0fd
|
[
"MIT"
] | 2
|
2020-05-08T17:51:25.000Z
|
2020-05-11T09:03:24.000Z
|
"""
RMSD Results
=============
#. :class:`.RmsdResults`
Results class for extracting RMSD of two molecules.
"""
from .results import Results
class RmsdResults(Results):
"""
Results class containing RMSD measures.
"""
def __init__(self, generator):
self._value = next(generator)
def get_rmsd(self):
return self._value
| 13.961538
| 51
| 0.630854
| 39
| 363
| 5.692308
| 0.538462
| 0.216216
| 0.207207
| 0.27027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231405
| 363
| 25
| 52
| 14.52
| 0.795699
| 0.399449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
126a6a2bd916dd0fdaa7da08e0f5d55a3f54c764
| 235
|
py
|
Python
|
section_5_price_alert/src/models/stores/views.py
|
sagarnildass/The-complete-python-web-course
|
d5c21eebeeea466a0fa46c9dfac203c0e5b01433
|
[
"MIT"
] | null | null | null |
section_5_price_alert/src/models/stores/views.py
|
sagarnildass/The-complete-python-web-course
|
d5c21eebeeea466a0fa46c9dfac203c0e5b01433
|
[
"MIT"
] | null | null | null |
section_5_price_alert/src/models/stores/views.py
|
sagarnildass/The-complete-python-web-course
|
d5c21eebeeea466a0fa46c9dfac203c0e5b01433
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
store_blueprint = Blueprint('stores', __name__)
@store_blueprint.route('/')
def index():
return "This is the stores index"
@store_blueprint.route('store/<string:name>')
def store_page(name):
pass
| 19.583333
| 47
| 0.731915
| 31
| 235
| 5.290323
| 0.548387
| 0.256098
| 0.231707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13617
| 235
| 11
| 48
| 21.363636
| 0.807882
| 0
| 0
| 0
| 0
| 0
| 0.212766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.125
| 0.125
| 0.125
| 0.5
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
|
0
| 6
|
89d96a6a92f05825ae249ab68a0f0c9119c3e39c
| 142
|
py
|
Python
|
azure-vote/azure-vote/mathlib.py
|
Naven360/azure-voting-app-redis
|
4db4ec14d3dc7e6973cf15abd35991e7b808f8ea
|
[
"MIT"
] | null | null | null |
azure-vote/azure-vote/mathlib.py
|
Naven360/azure-voting-app-redis
|
4db4ec14d3dc7e6973cf15abd35991e7b808f8ea
|
[
"MIT"
] | null | null | null |
azure-vote/azure-vote/mathlib.py
|
Naven360/azure-voting-app-redis
|
4db4ec14d3dc7e6973cf15abd35991e7b808f8ea
|
[
"MIT"
] | null | null | null |
def calc_addition(a, b):
return a + b
def calc_multiply(a, b):
return a * b
def calc_substraction(a, b):
return a - b
| 12.909091
| 28
| 0.577465
| 24
| 142
| 3.291667
| 0.333333
| 0.151899
| 0.303797
| 0.341772
| 0.556962
| 0.43038
| 0.43038
| 0
| 0
| 0
| 0
| 0
| 0.316901
| 142
| 10
| 29
| 14.2
| 0.814433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d6108461167974069c65c6a572584dc99e5f0b27
| 65
|
py
|
Python
|
src/oceansat/commandline.py
|
brorfred/oceansat
|
5b336e385ca5efee5e8c59da97e8f9435b0fd3de
|
[
"MIT"
] | null | null | null |
src/oceansat/commandline.py
|
brorfred/oceansat
|
5b336e385ca5efee5e8c59da97e8f9435b0fd3de
|
[
"MIT"
] | null | null | null |
src/oceansat/commandline.py
|
brorfred/oceansat
|
5b336e385ca5efee5e8c59da97e8f9435b0fd3de
|
[
"MIT"
] | null | null | null |
import oceansat
def version():
print(oceansat.__version__)
| 10.833333
| 31
| 0.738462
| 7
| 65
| 6.285714
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169231
| 65
| 5
| 32
| 13
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d61c05a3a19fdc413a5ccfadf754e3b2889f7b2e
| 7,588
|
py
|
Python
|
apps/fifth_edition/tests/test_NPC_view_get.py
|
tylerfrenchx13/django-dnd
|
b0c78c51aebeed4195fd91a3e55c313c645f9c3b
|
[
"MIT"
] | null | null | null |
apps/fifth_edition/tests/test_NPC_view_get.py
|
tylerfrenchx13/django-dnd
|
b0c78c51aebeed4195fd91a3e55c313c645f9c3b
|
[
"MIT"
] | null | null | null |
apps/fifth_edition/tests/test_NPC_view_get.py
|
tylerfrenchx13/django-dnd
|
b0c78c51aebeed4195fd91a3e55c313c645f9c3b
|
[
"MIT"
] | null | null | null |
from apps.fifth_edition.models import NPC
from django.test import TestCase
from rest_framework.test import APIClient
class TestNPCViewGET(TestCase):
"""
Test class to verify functionality of the NPCViewGET API view.
"""
def setUp(self):
"""
Method to create required test data
:return: None
"""
class_lookup = {
1: "Rogue",
2: "Paladin",
3: "Cleric",
4: "Barbarian",
5: "Warlock"
}
race_lookup = {
1: "Human",
2: "Dragonborn",
3: "Dwarf",
4: "Gnome",
5: "Tiefling"
}
# Create a set of NPC, with varying data
for num in range(1, 6):
npc_data = {
"name": "NPC {}".format(num),
"level": num * 2,
"npc_class": class_lookup[num],
"background": "Test Background",
"race": race_lookup[num],
"alignment": "Chaotic Good",
}
NPC.objects.create(**npc_data)
def tearDown(self):
"""
Method to remove extraneous test data generated as a side effect of individual tests
:return: None
"""
pass
def test_NPC_get_view_no_params_successful(self):
"""
Unit test to verify that a direct request, with no query parameters works properly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertEqual(results[0]["level"], 10)
self.assertEqual(results[-1]["level"], 2)
def test_NPC_get_view_query_by_name_successful(self):
"""
Unit test to verify that querying by name works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?name=NPC 1", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 1)
self.assertEqual(results[0]["name"], "NPC 1")
def test_NPC_get_view_query_by_name_multiple_results_successful(self):
"""
Unit test to verify that querying by name works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?name=NPC", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) > 1)
def test_NPC_get_view_query_by_name_no_results(self):
"""
Unit test to verify that querying by a name that doesn't exist works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?name=NPC 6", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 0)
def test_NPC_get_view_query_by_class_no_results(self):
"""
Unit test to verify that querying by a class that doesn't exist works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?class=Monk", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 0)
def test_NPC_get_view_query_by_race_successful(self):
"""
Unit test to verify that querying by race works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?race=Dwarf", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 1)
self.assertEqual(results[0]["race"], "Dwarf")
def test_NPC_get_view_query_by_race_no_results(self):
"""
Unit test to verify that querying by a race that doesn't exist works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?race=Half Orc", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 0)
def test_NPC_get_view_query_by_level_above_successful(self):
"""
Unit test to verify that querying by level_above works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?level_above=9", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 1)
self.assertEqual(results[0]["level"], 10)
def test_NPC_get_view_query_by_level_above_no_results(self):
"""
Unit test to verify that querying by a level_above that doesn't exist works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?level_above=11", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 0)
def test_NPC_get_view_query_by_level_below_successful(self):
"""
Unit test to verify that querying by level_below works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?level_below=3", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 1)
self.assertEqual(results[0]["level"], 2)
def test_NPC_get_view_query_by_level_below_no_results(self):
"""
Unit test to verify that querying by a level_below that doesn't exist works correctly
:return: None
"""
client = APIClient()
response = client.get("/api/NPC?level_below=1", format="json")
# Assert status code
self.assertEqual(response.status_code, 200)
# Assert data in response results, specifically verifying order of objects by level
results = response.data["results"]
self.assertTrue(len(results) == 0)
| 30.845528
| 93
| 0.6136
| 902
| 7,588
| 5.034368
| 0.13082
| 0.048447
| 0.024224
| 0.031491
| 0.82625
| 0.822506
| 0.81634
| 0.808853
| 0.800925
| 0.74477
| 0
| 0.014098
| 0.289536
| 7,588
| 245
| 94
| 30.971429
| 0.828232
| 0.306537
| 0
| 0.431373
| 0
| 0
| 0.106202
| 0.018461
| 0
| 0
| 0
| 0
| 0.264706
| 1
| 0.127451
| false
| 0.009804
| 0.029412
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d6402fece1eb7235af268a48c88cea7e7009af96
| 1,778
|
py
|
Python
|
pirates/leveleditor/worldData/pvpTestv2.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 3
|
2021-02-25T06:38:13.000Z
|
2022-03-22T07:00:15.000Z
|
pirates/leveleditor/worldData/pvpTestv2.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | null | null | null |
pirates/leveleditor/worldData/pvpTestv2.py
|
itsyaboyrocket/pirates
|
6ca1e7d571c670b0d976f65e608235707b5737e3
|
[
"BSD-3-Clause"
] | 1
|
2021-02-25T06:38:17.000Z
|
2021-02-25T06:38:17.000Z
|
# uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.pvpTestv2
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Objects': {'1128540775.81jubutler': {'Type': 'Region', 'Name': 'pvpCTLw00', 'Objects': {'1128540801.88jubutler': {'Type': 'Island', 'File': 'pvpTestIslev2', 'Hpr': VBase3(0.0, 0.0, 0.0), 'Objects': {'1128541283.64jubutler': {'Type': 'LOD Sphere', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(98.71, 144.17, 137.56), 'Radi': [1973, 2273, 2573], 'Scale': VBase3(1.0, 1.0, 1.0)}}, 'Pos': Point3(-20.329, 208.386, 0.348), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/islands/pir_m_are_isl_pvpSpanish'}}, '1128545304.45jubutler': {'Type': 'Ship Spawn Node', 'Hpr': VBase3(174.311, 0.0, 0.0), 'Level': '3', 'Pos': Point3(-340.74, 879.055, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Merchant', 'Team': '0'}, '1128545309.55jubutler': {'Type': 'Ship Spawn Node', 'Hpr': VBase3(-166.14, 0.0, 0.0), 'Level': '3', 'Pos': Point3(306.496, 929.333, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Merchant', 'Team': '0'}}}}, 'Node Links': [], 'Layers': {}, 'ObjectIds': {'1128540775.81jubutler': '["Objects"]["1128540775.81jubutler"]', '1128540801.88jubutler': '["Objects"]["1128540775.81jubutler"]["Objects"]["1128540801.88jubutler"]', '1128541283.64jubutler': '["Objects"]["1128540775.81jubutler"]["Objects"]["1128540801.88jubutler"]["Objects"]["1128541283.64jubutler"]', '1128545304.45jubutler': '["Objects"]["1128540775.81jubutler"]["Objects"]["1128545304.45jubutler"]', '1128545309.55jubutler': '["Objects"]["1128540775.81jubutler"]["Objects"]["1128545309.55jubutler"]'}}
| 296.333333
| 1,507
| 0.662542
| 243
| 1,778
| 4.831276
| 0.436214
| 0.030664
| 0.030664
| 0.027257
| 0.279387
| 0.279387
| 0.132879
| 0.132879
| 0.07155
| 0.07155
| 0
| 0.263319
| 0.081552
| 1,778
| 6
| 1,507
| 296.333333
| 0.455603
| 0.120922
| 0
| 0
| 0
| 0
| 0.560616
| 0.390635
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d649dcfccb6ac131b6a60ee86fa69ca48bf06995
| 1,911
|
py
|
Python
|
tests/commands/test_render_workflow_graph.py
|
SebastianKapunkt/joeflow
|
fbec6685d9d5fb2a4e3db92a9ca6e58cf5a4bce8
|
[
"BSD-3-Clause"
] | 46
|
2019-01-29T18:23:38.000Z
|
2022-02-03T12:47:16.000Z
|
tests/commands/test_render_workflow_graph.py
|
codingjoe/galahad
|
ee52b81a8df868e5ab050345550596dbdcd23d5f
|
[
"BSD-3-Clause"
] | 34
|
2019-10-11T03:42:56.000Z
|
2022-03-11T15:51:28.000Z
|
tests/commands/test_render_workflow_graph.py
|
codingjoe/galahad
|
ee52b81a8df868e5ab050345550596dbdcd23d5f
|
[
"BSD-3-Clause"
] | 11
|
2020-04-22T07:17:16.000Z
|
2022-02-15T08:44:47.000Z
|
import os
import tempfile
from pathlib import Path
from django.core.management import call_command
def test_call_no_args():
tmp_dir = Path(tempfile.mkdtemp())
call_command("render_workflow_graph", "-d", tmp_dir)
assert os.path.exists(str(tmp_dir / "testapp_simpleworkflow.svg"))
assert os.path.exists(str(tmp_dir / "testapp_simpleworkflow"))
def test_call_cleanup():
tmp_dir = Path(tempfile.mkdtemp())
call_command("render_workflow_graph", "-d", tmp_dir, "-c")
assert os.path.exists(str(tmp_dir / "testapp_simpleworkflow.svg"))
assert not os.path.exists(str(tmp_dir / "testapp_simpleworkflow"))
def test_call_format_pdf():
tmp_dir = Path(tempfile.mkdtemp())
call_command("render_workflow_graph", "-d", tmp_dir, "-f", "pdf")
assert os.path.exists(str(tmp_dir / "testapp_simpleworkflow.pdf"))
def test_call_format_png():
tmp_dir = Path(tempfile.mkdtemp())
call_command("render_workflow_graph", "-d", tmp_dir, "-f", "png")
assert os.path.exists(str(tmp_dir / "testapp_simpleworkflow.png"))
def test_call_explicit_workflow():
tmp_dir = Path(tempfile.mkdtemp())
call_command(
"render_workflow_graph",
"-d",
tmp_dir,
"testapp.loopworkflow",
"testapp.splitjoinworkflow",
)
assert not os.path.exists(str(tmp_dir / "testapp_simpleworkflow.svg"))
assert os.path.exists(str(tmp_dir / "testapp_loopworkflow.svg"))
assert os.path.exists(str(tmp_dir / "testapp_splitjoinworkflow.svg"))
def test_call_explicit_workflow_invalid():
tmp_dir = Path(tempfile.mkdtemp())
call_command(
"render_workflow_graph", "-d", tmp_dir, "auth.user", "testapp.splitjoinworkflow"
)
assert not os.path.exists(str(tmp_dir / "testapp_simpleworkflow.svg"))
assert not os.path.exists(str(tmp_dir / "auth_user.svg"))
assert os.path.exists(str(tmp_dir / "testapp_splitjoinworkflow.svg"))
| 34.125
| 88
| 0.710623
| 258
| 1,911
| 4.98062
| 0.158915
| 0.112062
| 0.112062
| 0.140078
| 0.841245
| 0.799222
| 0.799222
| 0.799222
| 0.799222
| 0.724514
| 0
| 0
| 0.151753
| 1,911
| 55
| 89
| 34.745455
| 0.792721
| 0
| 0
| 0.333333
| 0
| 0
| 0.274202
| 0.239665
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.142857
| false
| 0
| 0.095238
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d6587d85df1f828c5ddbfdc7e6f01ce7674777d7
| 10,750
|
py
|
Python
|
tests/test_transfer.py
|
ivomastre/python-swift-cloud-tools
|
1edf440d1bf96c423b0a349bc8a0215897427fa5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_transfer.py
|
ivomastre/python-swift-cloud-tools
|
1edf440d1bf96c423b0a349bc8a0215897427fa5
|
[
"Apache-2.0"
] | null | null | null |
tests/test_transfer.py
|
ivomastre/python-swift-cloud-tools
|
1edf440d1bf96c423b0a349bc8a0215897427fa5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Grupo Globo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from unittest import TestCase
from unittest.mock import Mock, MagicMock, patch
from swift_cloud_tools.client import SCTClient
class TestTransfer(TestCase):
def setUp(self):
self.sct_host = 'http://swift-cloud-tools-dev.gcloud.dev.globoi.com'
self.sct_api_key = 'd003d7dc6e2a48e99aed5082160de1fa'
self.client = SCTClient(
self.sct_host,
self.sct_api_key
)
@patch('swift_cloud_tools.client.requests.post')
def test_transfer_create(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
project_name = 'alan'
environment = 'dev'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
data = {
"project_id": project_id,
"project_name": project_name,
"environment": environment
}
status = 201
content = "Transfer project '{}' environment '{}' created".format(project_name, environment)
mock = Mock()
mock.content = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_create(project_id, project_name, environment)
mock_request.assert_called_once_with(
'{}/v1/transfer/'.format(self.sct_host),
data=json.dumps(data),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.content, content)
@patch('swift_cloud_tools.client.requests.post')
def test_transfer_create_unauthenticated(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
project_name = 'alan'
environment = 'dev'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': '123456789'
}
data = {
"project_id": project_id,
"project_name": project_name,
"environment": environment
}
status = 401
content = 'Unauthenticated'
mock = Mock()
mock.content = content
mock.status_code = status
mock_request.return_value = mock
client = SCTClient(
self.sct_host,
'123456789'
)
response = client.transfer_create(project_id, project_name, environment)
mock_request.assert_called_once_with(
'{}/v1/transfer/'.format(self.sct_host),
data=json.dumps(data),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.content, content)
@patch('swift_cloud_tools.client.requests.post')
def test_transfer_create_incorrect_parameters(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
project_name = 'alan'
environment = ''
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
data = {
"project_id": project_id,
"project_name": project_name,
"environment": environment
}
status = 422
content = 'incorrect parameters'
mock = Mock()
mock.content = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_create(project_id, project_name, environment)
mock_request.assert_called_once_with(
'{}/v1/transfer/'.format(self.sct_host),
data=json.dumps(data),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.content, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_get(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
project_name = 'alan'
environment = 'dev'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 200
content = {
'id': 22,
'project_id': project_id,
'project_name': project_name,
'environment': environment,
'container_count_swift': 0,
'object_count_swift': 0,
'bytes_used_swift': 0,
'last_object': '',
'count_error': 0,
'container_count_gcp': 0,
'object_count_gcp': 0,
'bytes_used_gcp': 0,
'initial_date': '2021-10-07 11:05:00',
'final_date': '2021-10-07 11:29:00'
}
mock = Mock()
mock.json = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_get(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.json, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_get_unauthenticated(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 401
content = 'Unauthenticated'
mock = Mock()
mock.content = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_get(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.content, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_get_not_found(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 404
content = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n<title>404 Not Found</title>\n<h1>Not Found</h1>\n<p>The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.</p>\n'
mock = Mock()
mock.content = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_get(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.content, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_status_uninitialized(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b_'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 200
content = {'status': 'Migração não inicializada'}
mock = Mock()
mock.json = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_status(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/status/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.json, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_status_completed(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 200
content = {'status': 'Migração concluída'}
mock = Mock()
mock.json = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_status(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/status/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.json, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_status_waiting(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 200
content = {'status': 'Aguardando migração'}
mock = Mock()
mock.json = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_status(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/status/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.json, content)
@patch('swift_cloud_tools.client.requests.get')
def test_transfer_status_progress(self, mock_request):
project_id = '64b10d56454c4b1eb91b46b62d27c8b2'
headers = {
'Content-type': 'application/json',
'X-Auth-Token': self.sct_api_key
}
status = 200
content = {'status': 'Migrando', 'progress': 93}
mock = Mock()
mock.json = content
mock.status_code = status
mock_request.return_value = mock
response = self.client.transfer_status(project_id)
mock_request.assert_called_once_with(
'{}/v1/transfer/status/{}'.format(self.sct_host, project_id),
headers=headers
)
self.assertEqual(response.status_code, status)
self.assertEqual(response.json, content)
| 32.379518
| 257
| 0.619628
| 1,156
| 10,750
| 5.541522
| 0.16263
| 0.049173
| 0.049953
| 0.03606
| 0.795348
| 0.779113
| 0.779113
| 0.77443
| 0.77443
| 0.77443
| 0
| 0.043865
| 0.278977
| 10,750
| 331
| 258
| 32.477341
| 0.782609
| 0.050977
| 0
| 0.700389
| 0
| 0.003891
| 0.212505
| 0.087063
| 0
| 0
| 0
| 0
| 0.116732
| 1
| 0.042802
| false
| 0
| 0.015564
| 0
| 0.062257
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c38bc899014dd58543369ec2535371a13a21d780
| 129
|
py
|
Python
|
sapi_app/admin.py
|
calixo888/sapi
|
a2ff327795a7ea088cb158f7738af9121e465a08
|
[
"MIT"
] | null | null | null |
sapi_app/admin.py
|
calixo888/sapi
|
a2ff327795a7ea088cb158f7738af9121e465a08
|
[
"MIT"
] | 7
|
2020-06-06T01:14:59.000Z
|
2022-02-10T09:21:28.000Z
|
sapi_app/admin.py
|
calixo888/sapi
|
a2ff327795a7ea088cb158f7738af9121e465a08
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from . import models
admin.site.register(models.APIKey)
admin.site.register(models.JSONRecord)
| 21.5
| 38
| 0.821705
| 18
| 129
| 5.888889
| 0.555556
| 0.169811
| 0.320755
| 0.433962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085271
| 129
| 5
| 39
| 25.8
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.