after_merge stringlengths 28 79.6k | before_merge stringlengths 20 79.6k | url stringlengths 38 71 | full_traceback stringlengths 43 922k | traceback_type stringclasses 555
values |
|---|---|---|---|---|
def plot(result_pickle_file_path, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_pickle_file_path)
plot_result(result_dict, show, plot_save_file)
| def plot(result_dict_file, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_dict_file)
plot_result(result_dict, show, plot_save_file)
| https://github.com/ricequant/rqalpha/issues/109 | Traceback (most recent call last):
File "c:\programdata\anaconda2\lib\runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "c:\programdata\anaconda2\lib\runpy.py", line 72, in _run_code
exec code in run_globals
│ └ {'__builtins__': <module '__builtin__' (built-in)>, '__file__': 'C... | TypeError |
def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
# verify that the line is JSON
line = line.decode("utf-8")
try:
json.loads(line)
except ValueEr... | def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
self.progress("log", line.decode("utf-8"))
| https://github.com/jupyterhub/binderhub/issues/164 | / # jupyter-repo2docker https://github.com/yuvipanda/example-requirements --json-logs
Traceback (most recent call last):
File "/usr/local/bin/jupyter-repo2docker", line 11, in <module>
load_entry_point('jupyter-repo2docker==0.4.1', 'console_scripts', 'jupyter-repo2docker')()
File "/usr/local/lib/python3.6/site-packages... | FileNotFoundError |
def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).toLocalFile()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# ... | def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).path()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove ... | https://github.com/alicevision/meshroom/issues/912 | [2020-05-23 16:12:48,660][ERROR] Traceback (most recent call last):
File "D:\Meshroom_Src\meshroom\meshroom\ui\reconstruction.py", line 432, in load
super(Reconstruction, self).load(filepath, setupProjectFile)
File "D:\Meshroom_Src\meshroom\meshroom\ui\graph.py", line 314, in load
g.load(filepath, setupProjectFile)
Fil... | OSError |
def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return in... | def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return in... | https://github.com/alicevision/meshroom/issues/127 | Traceback (most recent call last):
File "C:\Users\andre\work\meshroom\meshroom\ui\reconstruction.py", line 72, in start
raise RuntimeError("Invalid folder provided: {}".format(folder))
RuntimeError: Invalid folder provided: /F:/ai-ml-models/images/live | RuntimeError |
def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
... | def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
... | https://github.com/mathics/Mathics/issues/906 | $ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home... | KeyError |
def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the mo... | def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the mo... | https://github.com/mathics/Mathics/issues/906 | $ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home... | KeyError |
def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if not key.startswith("mathics."):
print(f'removing module "{key... | def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if key[:8] != "mathics.":
print("removing module ", key, " not i... | https://github.com/mathics/Mathics/issues/836 | Mathics 1.1.dev0
on CPython 3.6.9 (default, Jul 17 2020, 12:50:27)
using SymPy 1.6.2, mpmath 1.1.0
Copyright (C) 2011-2020 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license... | ValueError |
def apply(self, evaluation):
"Exit"
exit()
| def apply(self, evaluation):
"Exit[]"
sys.exit()
| https://github.com/mathics/Mathics/issues/813 | Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/M... | NameError |
def apply_n(self, n, evaluation):
"Exit[n_Integer]"
exit(n.get_int_value())
| def apply_n(self, n, evaluation):
"Exit[n_Integer]"
sys.exit(n.get_int_value())
| https://github.com/mathics/Mathics/issues/813 | Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/M... | NameError |
def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
f = urllib2.urlopen(py_url)
try:
if sys.version_info >= (3, 0):
... | def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
with urllib2.urlopen(py_url) as f:
content_type = f.info().get_content_type()
... | https://github.com/mathics/Mathics/issues/562 | In[1]:= Import["https://upload.wikimedia.org/wikipedia/en/2/24/Lenna.png"]
Traceback (most recent call last):
File "/home/angus/venv_pypy/bin/mathics", line 11, in <module>
load_entry_point('Mathics', 'console_scripts', 'mathics')()
File "/home/angus/Mathics/mathics/main.py", line 286, in main
result = evaluation.evalu... | AttributeError |
def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
try:
return {
"backend_cpu_percentage": "{}%".format(
backend_data["system"]["cpu_percentage"]
),
"number... | def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
return {
"backend_cpu_percentage": backend_data["system"]["cpu_percentage"],
"number_of_running_analyses": len(backend_data["analysis"]["current_ana... | https://github.com/fkie-cad/FACT_core/issues/448 | [2020-07-07 09:46:38,595] ERROR in app: Exception on /ajax/stats/system [GET]
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1952, in full_dispa... | KeyError |
def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
[
... | def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
["unzip 3.3.9",... | https://github.com/fkie-cad/FACT_core/issues/392 | [2020-04-16 10:42:50][frontend][INFO]: Install static jstree content
Traceback (most recent call last):
File "src/install.py", line 173, in <module>
install()
File "src/install.py", line 157, in install
frontend(not args.no_radare, args.nginx)
File "/home/weidenba/FACT_core/src/install/frontend.py", line 165, in main
_... | helperFunctions.install.InstallationError |
def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
[pie_off, pie_on, pie_partial, pie_invalid]
)
self.append_pie_stats_to_result_dict(
pi... | def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
pie_off, pie_on, pie_partial, pie_invalid
)
self.append_pie_stats_to_result_dict(
pie_... | https://github.com/fkie-cad/FACT_core/issues/88 | [2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats... | IndexError |
def calculate_total_files_for_pie(pie_stats):
total_amount_of_files = 0
for item in pie_stats:
with suppress(IndexError):
total_amount_of_files += item[0][1]
return total_amount_of_files
| def calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid):
if (
len(pie_on) > 0
or len(pie_off) > 0
or len(pie_partial) > 0
or len(pie_invalid) > 0
):
total_amount_of_files = (
pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0... | https://github.com/fkie-cad/FACT_core/issues/88 | [2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats... | IndexError |
def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_fe... | def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_fe... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
redY_unique = np.unique(redY)
classifier = None
if len(re... | def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
classifier = pickle.loads(self.pickled_estimator)
oracle_call_... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights =... | def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights =... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_fea... | def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_fea... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.n... | def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.n... | https://github.com/fairlearn/fairlearn/issues/395 | from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueE... | ValueError |
def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(se... | def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(se... | https://github.com/activeloopai/Hub/issues/316 | Traceback (most recent call last):
File "examples/upload_mpi.py", line 52, in <module>
res_ds = out_ds.store(tag)
File "/Hub/hub/compute/transform.py", line 372, in store
n_results = self.store_shard(ds_in_shard, ds_out, start, token=token)
File "/Hub/hub/compute/transform.py", line 288, in store_shard
self.upload(
Fil... | AttributeError |
def __init__(
self,
url: str,
mode: str = "a",
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = defaults.DEFAULT_MEMORY_CACHE_SIZE,
storage_cache: int = defaults.DEFAULT_STORAGE_CACHE_SIZE,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or ... | def __init__(
self,
url: str,
mode: str = "a",
safe_mode: bool = False,
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = 2**26,
storage_cache: int = 2**28,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or existing dataset for read/writ... | https://github.com/activeloopai/Hub/issues/318 | Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumen... | hub.exceptions.ShapeArgumentNotFoundException |
def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self._mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.acti... | def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self.mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.activ... | https://github.com/activeloopai/Hub/issues/318 | Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumen... | hub.exceptions.ShapeArgumentNotFoundException |
def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self._shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._u... | def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self.shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._up... | https://github.com/activeloopai/Hub/issues/318 | Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumen... | hub.exceptions.ShapeArgumentNotFoundException |
def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(sha... | def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(sha... | https://github.com/activeloopai/Hub/issues/298 | ➜ feature_testing python upload_animals.py
26180
{'labels': ClassLabel(shape=(), dtype='int64', names=['pecora', 'mucca', 'cane', 'ragno', 'cavallo', 'elefante', 'gallina', 'gatto', 'scoiattolo', 'farfalla'], num_classes=10), 'image': Image(shape=(120, 120, 3), dtype='uint8', max_shape=(120, 120, 4))}
ClassLabel(shape... | AssertionError |
def verify_cli_version():
os.environ["OUTDATED_IGNORE"] = 1
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out ... | def verify_cli_version():
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out of date. Please upgrade the package by... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["description"]
except Exception:
message = " "
... | def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["error"]
except Exception:
message = " "
log... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
A... | def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
A... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of ge... | def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of ge... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
... | def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
keys = [sorted(dataset._tensors.keys()) for dataset in datasets]
for key in keys:
assert key == keys[0]
keys = keys[0]
... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
... | def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings th... | def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings th... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except Import... | def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except Import... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
logger.log(e)
return tf.variant
| def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
return tf.variant
| https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
... | def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, ... | def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, ... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daska... | def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daskarray)
self._meta = meta
self._array = daskarray
self._delayed_objs = delayed_objs
self._shape = _dask_shape_backward(daskarray.shape)
self._dt... | https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super(PermissionException, self).__init__(message=message)
| def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super().__init__(message=message)
| https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def _flatten(list_):
"""
Helper function to flatten the list
"""
return [item for sublist in list_ for item in sublist]
| def _flatten(l):
"""
Helper function to flatten the list
"""
return [item for sublist in l for item in sublist]
| https://github.com/activeloopai/Hub/issues/216 | Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upl... | ValueError |
def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(f... | def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(f... | https://github.com/falconry/falcon/issues/1254 | (falcon-bug-repro) falcon-bug-repro » python main.py
Traceback (most recent call last):
File "main.py", line 19, in <module>
MyMiddleware(),
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/api.py", line 156, in __init__
middleware, independent_middleware=independent_middleware)
... | IndexError |
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restr... | def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restr... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
... | def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and ... | def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and ... | https://github.com/falconry/falcon/issues/413 | Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string | TypeError |
End of preview. Expand in Data Studio
PyTraceBugs
Link to GitHub Repo: https://github.com/acheshkov/pytracebugs
- Downloads last month
- 5