id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
140,948 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/education/types/education_service.py
|
src.kagglesdk.education.types.education_service.LearnNudge
|
class LearnNudge(KaggleObject):
r"""
Attributes:
course_index (int)
course_name (str)
course_slug (str)
next_item_name (str)
next_item_url (str)
next_item_type (LearnNudgeType)
"""
def __init__(self):
self._course_index = 0
self._course_name = ""
self._course_slug = ""
self._next_item_name = ""
self._next_item_url = ""
self._next_item_type = LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS
self._freeze()
@property
def course_index(self) -> int:
return self._course_index
@course_index.setter
def course_index(self, course_index: int):
if course_index is None:
del self.course_index
return
if not isinstance(course_index, int):
raise TypeError('course_index must be of type int')
self._course_index = course_index
@property
def course_name(self) -> str:
return self._course_name
@course_name.setter
def course_name(self, course_name: str):
if course_name is None:
del self.course_name
return
if not isinstance(course_name, str):
raise TypeError('course_name must be of type str')
self._course_name = course_name
@property
def course_slug(self) -> str:
return self._course_slug
@course_slug.setter
def course_slug(self, course_slug: str):
if course_slug is None:
del self.course_slug
return
if not isinstance(course_slug, str):
raise TypeError('course_slug must be of type str')
self._course_slug = course_slug
@property
def next_item_name(self) -> str:
return self._next_item_name
@next_item_name.setter
def next_item_name(self, next_item_name: str):
if next_item_name is None:
del self.next_item_name
return
if not isinstance(next_item_name, str):
raise TypeError('next_item_name must be of type str')
self._next_item_name = next_item_name
@property
def next_item_url(self) -> str:
return self._next_item_url
@next_item_url.setter
def next_item_url(self, next_item_url: str):
if next_item_url is None:
del self.next_item_url
return
if not isinstance(next_item_url, str):
raise TypeError('next_item_url must be of type str')
self._next_item_url = next_item_url
@property
def next_item_type(self) -> 'LearnNudgeType':
return self._next_item_type
@next_item_type.setter
def next_item_type(self, next_item_type: 'LearnNudgeType'):
if next_item_type is None:
del self.next_item_type
return
if not isinstance(next_item_type, LearnNudgeType):
raise TypeError('next_item_type must be of type LearnNudgeType')
self._next_item_type = next_item_type
|
class LearnNudge(KaggleObject):
'''
Attributes:
course_index (int)
course_name (str)
course_slug (str)
next_item_name (str)
next_item_url (str)
next_item_type (LearnNudgeType)
'''
def __init__(self):
pass
@property
def course_index(self) -> int:
pass
@course_index.setter
def course_index(self) -> int:
pass
@property
def course_name(self) -> str:
pass
@course_name.setter
def course_name(self) -> str:
pass
@property
def course_slug(self) -> str:
pass
@course_slug.setter
def course_slug(self) -> str:
pass
@property
def next_item_name(self) -> str:
pass
@next_item_name.setter
def next_item_name(self) -> str:
pass
@property
def next_item_url(self) -> str:
pass
@next_item_url.setter
def next_item_url(self) -> str:
pass
@property
def next_item_type(self) -> 'LearnNudgeType':
pass
@next_item_type.setter
def next_item_type(self) -> 'LearnNudgeType':
pass
| 26 | 1 | 5 | 0 | 5 | 0 | 2 | 0.12 | 1 | 4 | 1 | 0 | 13 | 6 | 13 | 30 | 97 | 13 | 75 | 32 | 49 | 9 | 63 | 20 | 49 | 3 | 2 | 1 | 25 |
140,949 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/education/types/education_service.py
|
src.kagglesdk.education.types.education_service.LearnNudgeType
|
class LearnNudgeType(enum.Enum):
COURSE_COMPLETE_NO_BONUS_LESSONS = 0
COURSE_COMPLETE_WITH_BONUS_LESSONS = 1
COURSE_INCOMPLETE = 2
DO_EXERCISE = 3
DO_TUTORIAL = 4
|
class LearnNudgeType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 4 | 0 | 0 |
140,950 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kaggle_client.py
|
src.kagglesdk.kaggle_client.KaggleClient
|
class KaggleClient(object):
class Kernels(object):
def __init__(self, http_client: KaggleHttpClient):
self.kernels_api_client = KernelsApiClient(http_client)
class Blobs(object):
def __init__(self, http_client: KaggleHttpClient):
self.blob_api_client = BlobApiClient(http_client)
class Education(object):
def __init__(self, http_client: KaggleHttpClient):
self.education_api_client = EducationApiClient(http_client)
class Models(object):
def __init__(self, http_client: KaggleHttpClient):
self.model_api_client = ModelApiClient(http_client)
self.model_client = ModelClient(http_client)
class Competitions(object):
def __init__(self, http_client: KaggleHttpClient):
self.competition_api_client = CompetitionApiClient(http_client)
class Datasets(object):
def __init__(self, http_client: KaggleHttpClient):
self.dataset_api_client = DatasetApiClient(http_client)
class Admin(object):
def __init__(self, http_client: KaggleHttpClient):
self.inbox_file_client = InboxFileClient(http_client)
def __init__(self, env: KaggleEnv = None, verbose: bool = False, username: str = None, password: str = None):
self._http_client = http_client = KaggleHttpClient(env, verbose, self._renew_iap_token, username=username, password=password)
self.kernels = KaggleClient.Kernels(http_client)
self.blobs = KaggleClient.Blobs(http_client)
self.education = KaggleClient.Education(http_client)
self.models = KaggleClient.Models(http_client)
self.competitions = KaggleClient.Competitions(http_client)
self.datasets = KaggleClient.Datasets(http_client)
self.admin = KaggleClient.Admin(http_client)
self.username = username
self.password = password
def http_client(self):
return self._http_client
def _renew_iap_token(self):
return self.admin.admin_client.renew_iap_token()
def __enter__(self):
self._http_client.__enter__()
return self
def __exit__(self, exc_type, exc_value, tb):
self._http_client.__exit__(exc_type, exc_value, tb)
|
class KaggleClient(object):
class Kernels(object):
def __init__(self, http_client: KaggleHttpClient):
pass
class Blobs(object):
def __init__(self, http_client: KaggleHttpClient):
pass
class Education(object):
def __init__(self, http_client: KaggleHttpClient):
pass
class Models(object):
def __init__(self, http_client: KaggleHttpClient):
pass
class Competitions(object):
def __init__(self, http_client: KaggleHttpClient):
pass
class Datasets(object):
def __init__(self, http_client: KaggleHttpClient):
pass
class Admin(object):
def __init__(self, http_client: KaggleHttpClient):
pass
def __init__(self, http_client: KaggleHttpClient):
pass
def http_client(self):
pass
def _renew_iap_token(self):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, tb):
pass
| 20 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 11 | 9 | 0 | 5 | 10 | 5 | 5 | 54 | 11 | 43 | 38 | 23 | 0 | 43 | 38 | 23 | 1 | 1 | 0 | 12 |
140,951 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kaggle_env.py
|
src.kagglesdk.kaggle_env.KaggleEnv
|
class KaggleEnv(Enum):
LOCAL = 0 # localhost
STAGING = 1 # staging.kaggle.com
ADMIN = 2 # admin.kaggle.com
QA = 3 # qa.kaggle.com
# Direct prod access is not allowed to have IAP protection during testing, but we support basic auth.
PROD = 4
|
class KaggleEnv(Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 7 | 0 | 6 | 6 | 5 | 6 | 6 | 6 | 5 | 0 | 4 | 0 | 0 |
140,952 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kaggle_http_client.py
|
src.kagglesdk.kaggle_http_client.KaggleHttpClient
|
class KaggleHttpClient(object):
_xsrf_cookie_name = 'XSRF-TOKEN'
_csrf_cookie_name = "CSRF-TOKEN"
_xsrf_cookies = (_xsrf_cookie_name, _csrf_cookie_name)
_xsrf_header_name = 'X-XSRF-TOKEN'
def __init__(self,
env: KaggleEnv = None,
verbose: bool = False,
renew_iap_token=None,
username=None,
password=None):
self._env = env or get_env()
self._signed_in = None
self._endpoint = get_endpoint(self._env)
self._verbose = verbose
self._session = None
self._username = username
self._password = password
def call(self, service_name: str, request_name: str, request: KaggleObject,
response_type: Type[KaggleObject]):
self._init_session()
http_request = self._prepare_request(service_name, request_name, request)
http_response = self._session.send(http_request)
response = self._prepare_response(response_type, http_response)
return response
def _prepare_request(self, service_name: str, request_name: str,
request: KaggleObject):
request_url = self._get_request_url(request)
method = request.method()
data= ''
if method == 'GET':
data = request.__class__.to_dict(request, ignore_defaults=False)
if request.endpoint_path():
words = find_words(request.endpoint_path())
list(map(data.pop, [to_lower_camel_case(w) for w in words]))
if len(data) == 0:
data = None
if data:
request_url = f'{request_url}?{urllib.parse.urlencode(clean_data(data))}'
data = ''
self._session.headers.update({
'Accept': 'application/json',
'Content-Type': 'text/plain',
})
elif method == 'POST':
data = request.to_field_map(request, ignore_defaults=True)
if isinstance(data, dict):
fields = request.body_fields()
if fields is not None:
if fields != '*':
data = data[fields]
data = clean_data(data)
if self.requires_form(request):
data, content_type = self.make_form(data)
else:
content_type = 'application/json'
data = json.dumps(data)
self._session.headers.update({
'Accept': 'application/json',
'Content-Type': content_type,
})
http_request = requests.Request(
method=method,
url=request_url,
data=data,
headers=self._session.headers,
# cookies=self._get_xsrf_cookies(),
auth=self._session.auth)
prepared_request = http_request.prepare()
self._print_request(prepared_request)
return prepared_request
def _get_xsrf_cookies(self):
cookies = requests.cookies.RequestsCookieJar()
for cookie in self._session.cookies:
if cookie.name in KaggleHttpClient._xsrf_cookies:
cookies[cookie.name] = cookie.value
return cookies
def _prepare_response(self, response_type, http_response):
self._print_response(http_response)
http_response.raise_for_status()
if 'application/json' in http_response.headers['Content-Type']:
resp = http_response.json()
if 'code' in resp and resp['code'] >= 400:
raise requests.exceptions.HTTPError(
resp['message'], response=http_response)
if response_type is None: # Method doesn't have a return type
return None
return response_type.prepare_from(http_response)
def _print_request(self, request):
if not self._verbose:
return
self._print('---------------------Request----------------------')
self._print(
f'{request.method} {request.url}\n{_headers_to_str(request.headers)}\n\n{request.body}'
)
self._print('--------------------------------------------------')
def _print_response(self, response, body=True):
if not self._verbose:
return
self._print('---------------------Response---------------------')
self._print(f'{response.status_code}\n{_headers_to_str(response.headers)}')
if body:
self._print(f'\n{response.text}')
self._print('--------------------------------------------------')
def _print(self, message: str):
if self._verbose:
print(message)
def __enter__(self):
self._init_session()
return self
def __exit__(self, exc_type, exc_value, tb):
if self._session is not None:
self._session.close()
def _init_session(self):
if self._session is not None:
return self._session
self._session = requests.Session()
self._session.headers.update({
'User-Agent': 'kaggle-api/v1.7.0', # Was: V2
'Content-Type': 'application/x-www-form-urlencoded', # Was: /json
})
iap_token = self._get_iap_token_if_required()
if iap_token is not None:
self._session.headers.update({
# https://cloud.google.com/iap/docs/authentication-howto#authenticating_from_proxy-authorization_header
'Proxy-Authorization': f'Bearer {iap_token}',
})
self._try_fill_auth()
# self._fill_xsrf_token(iap_token) # TODO Make this align with original handler.
def _get_iap_token_if_required(self):
if self._env not in (KaggleEnv.STAGING, KaggleEnv.ADMIN):
return None
iap_token = os.getenv('KAGGLE_IAP_TOKEN')
if iap_token is None:
raise Exception(f'Must set KAGGLE_IAP_TOKEN to access "{self._endpoint}"')
return iap_token
def _fill_xsrf_token(self, iap_token):
initial_get_request = requests.Request(
method='GET',
url=self._endpoint,
headers=self._session.headers,
auth=self._session.auth)
prepared_request = initial_get_request.prepare()
self._print_request(prepared_request)
http_response = self._session.send(prepared_request)
self._print_response(http_response, body=False)
if iap_token is not None and http_response.status_code in (401, 403):
raise requests.exceptions.HTTPError('IAP token invalid or expired')
http_response.raise_for_status()
self._session.headers.update({
KaggleHttpClient._xsrf_header_name:
self._session.cookies[KaggleHttpClient._xsrf_cookie_name],
})
class BearerAuth(requests.auth.AuthBase):
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers["Authorization"] = f"Bearer {self.token}"
return r
def _try_fill_auth(self):
if self._signed_in is not None:
return
api_token = os.getenv('KAGGLE_API_TOKEN')
if api_token is not None:
self._session.auth = KaggleHttpClient.BearerAuth(api_token)
self._signed_in = True
return
if self._username and self._password:
apikey_creds = self._username, self._password
else:
apikey_creds = _get_apikey_creds()
if apikey_creds is not None:
self._session.auth = apikey_creds
self._signed_in = True
return
self._signed_in = False
def _get_request_url(self, request):
return f'{self._endpoint}{request.endpoint()}'
@staticmethod
def make_form(fields):
body = BytesIO()
boundary = binascii.hexlify(os.urandom(16)).decode()
writer = codecs.lookup("utf-8")[3]
for field in fields.items():
field = RequestField.from_tuples(*field)
body.write(f"--{boundary}\r\n".encode("latin-1"))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data)
if isinstance(data, str):
writer(body).write(data)
else:
body.write(data)
body.write(b"\r\n")
body.write(f"--{boundary}--\r\n".encode("latin-1"))
content_type = f"multipart/form-data; boundary={boundary}"
return body.getvalue(), content_type
@staticmethod
def requires_form(request):
return type(request).__name__ in REQUESTS_REQUIRING_FORMS
|
class KaggleHttpClient(object):
def __init__(self,
env: KaggleEnv = None,
verbose: bool = False,
renew_iap_token=None,
username=None,
password=None):
pass
def call(self, service_name: str, request_name: str, request: KaggleObject,
response_type: Type[KaggleObject]):
pass
def _prepare_request(self, service_name: str, request_name: str,
request: KaggleObject):
pass
def _get_xsrf_cookies(self):
pass
def _prepare_response(self, response_type, http_response):
pass
def _print_request(self, request):
pass
def _print_response(self, response, body=True):
pass
def _print_request(self, request):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, tb):
pass
def _init_session(self):
pass
def _get_iap_token_if_required(self):
pass
def _fill_xsrf_token(self, iap_token):
pass
class BearerAuth(requests.auth.AuthBase):
def __init__(self,
env: KaggleEnv = None,
verbose: bool = False,
renew_iap_token=None,
username=None,
password=None):
pass
def __call__(self, r):
pass
def _try_fill_auth(self):
pass
def _get_request_url(self, request):
pass
@staticmethod
def make_form(fields):
pass
@staticmethod
def requires_form(request):
pass
| 23 | 0 | 11 | 1 | 10 | 0 | 3 | 0.03 | 1 | 14 | 3 | 0 | 15 | 7 | 17 | 17 | 240 | 39 | 198 | 69 | 168 | 6 | 159 | 60 | 138 | 10 | 1 | 4 | 50 |
140,953 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kaggle_object.py
|
src.kagglesdk.kaggle_object.DateTimeSerializer
|
class DateTimeSerializer(ObjectSerializer):
def __init__(self):
"""Date times are serialized/deserialized as a string in iso format"""
ObjectSerializer.__init__(self,
lambda cls, dt, _: DateTimeSerializer._to_str(dt),
lambda _, v: DateTimeSerializer._from_str(v))
@staticmethod
def _to_str(dt):
return dt.isoformat(timespec='milliseconds') + 'Z'
@staticmethod
def _from_str(v):
v = v.rstrip('Z')
fields = v.rsplit('.', maxsplit=1)
if len(fields) == 1:
return datetime.fromisoformat(v)
(dt, nanos) = fields
millis = nanos[:3]
try:
return datetime.fromisoformat(f'{dt}.{millis}')
except ValueError:
return datetime.fromisoformat(dt)
|
class DateTimeSerializer(ObjectSerializer):
def __init__(self):
'''Date times are serialized/deserialized as a string in iso format'''
pass
@staticmethod
def _to_str(dt):
pass
@staticmethod
def _from_str(v):
pass
| 6 | 1 | 6 | 0 | 6 | 1 | 2 | 0.1 | 1 | 2 | 0 | 0 | 1 | 0 | 3 | 4 | 23 | 2 | 20 | 9 | 14 | 2 | 16 | 7 | 12 | 3 | 2 | 1 | 5 |
140,954 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kaggle_object.py
|
src.kagglesdk.kaggle_object.EnumSerializer
|
class EnumSerializer(ObjectSerializer):
def __init__(self):
"""
Enum objects are serialized using their ".name" field and deserialized by indexing the string in the Enum type.
Example:
class Foo(Enum):
TEST = 1
foo = Foo.TEST
foo.name # => returns "TEST"
Foo["TEST"] # => returns Foo.TEST enum value.
"""
ObjectSerializer.__init__(self,
lambda cls, v, _: EnumSerializer._to_str(cls, v),
lambda cls, v: EnumSerializer._from_str(cls, v))
@staticmethod
def _to_str(cls, v):
# "v" corresponds to an enum instance: Example foo or Foo.Test above.
# "cls" corresponds to the enum type Foo above.
#enum_prefix = f'{_pascal_case_to_upper_snake_case(cls.__name__)}_'
#if v.name.startswith(enum_prefix):
# return v.name
#return f'{enum_prefix}{v.name}'
enum_prefix = f'{_pascal_case_to_upper_snake_case(cls.__name__)}_'
if v.name.find(enum_prefix) == 0:
return v.name[len(enum_prefix):].lower()
return v.name
@staticmethod
def _from_str(cls, v):
# "v" corresponds to enum string: Example "TEST" above.
# "cls" corresponds to the enum type Foo above.
# enum_items = {item.name: item for item in cls}
# if v in enum_items:
# return enum_items[v]
#
# # Try with enum prefix. Example: EnvironmentType.JSON -> "ENVIRONMENT_TYPE_JSON"
# enum_prefix = _pascal_case_to_upper_snake_case(cls.__name__)
# if v.startswith(enum_prefix):
# ix_start = len(enum_prefix) + 1
# return enum_items[v[ix_start:]]
#
# return enum_items[f'{enum_prefix}_{v}']
try:
return cls[v]
except KeyError:
dct = vars(cls)
n = v.lower()
nn = _convert(v).lower()
enum_prefix = _pascal_case_to_upper_snake_case(cls.__name__).lower()
for key in dct.keys():
k = key.lower()
if k == n:
return dct[key]
if k.startswith(enum_prefix) and k.endswith(n) or k.endswith(nn):
return dct[key]
raise
|
class EnumSerializer(ObjectSerializer):
def __init__(self):
'''
Enum objects are serialized using their ".name" field and deserialized by indexing the string in the Enum type.
Example:
class Foo(Enum):
TEST = 1
foo = Foo.TEST
foo.name # => returns "TEST"
Foo["TEST"] # => returns Foo.TEST enum value.
'''
pass
@staticmethod
def _to_str(cls, v):
pass
@staticmethod
def _from_str(cls, v):
pass
| 6 | 1 | 17 | 0 | 8 | 9 | 3 | 1.04 | 1 | 1 | 0 | 0 | 1 | 0 | 3 | 4 | 57 | 2 | 27 | 13 | 21 | 28 | 23 | 11 | 19 | 5 | 2 | 3 | 8 |
140,955 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/education/services/education_api_service.py
|
src.kagglesdk.education.services.education_api_service.EducationApiClient
|
class EducationApiClient(object):
def __init__(self, client: KaggleHttpClient):
self._client = client
def track_exercise_interaction(self, request: ApiTrackExerciseInteractionRequest = None) -> ApiTrackExerciseInteractionResponse:
r"""
Args:
request (ApiTrackExerciseInteractionRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiTrackExerciseInteractionRequest()
return self._client.call("education.EducationApiService", "ApiTrackExerciseInteraction", request, ApiTrackExerciseInteractionResponse)
|
class EducationApiClient(object):
def __init__(self, client: KaggleHttpClient):
pass
def track_exercise_interaction(self, request: ApiTrackExerciseInteractionRequest = None) -> ApiTrackExerciseInteractionResponse:
'''
Args:
request (ApiTrackExerciseInteractionRequest):
The request object; initialized to empty instance if not specified.
'''
pass
| 3 | 1 | 7 | 1 | 3 | 3 | 2 | 0.71 | 1 | 3 | 3 | 0 | 2 | 1 | 2 | 2 | 16 | 4 | 7 | 4 | 4 | 5 | 7 | 4 | 4 | 2 | 1 | 1 | 3 |
140,956 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kernels/types/kernels_enums.py
|
src.kagglesdk.kernels.types.kernels_enums.KernelsListSortType
|
class KernelsListSortType(enum.Enum):
HOTNESS = 0
COMMENT_COUNT = 1
DATE_CREATED = 2
DATE_RUN = 3
RELEVANCE = 4
SCORE_ASCENDING = 5
SCORE_DESCENDING = 6
VIEW_COUNT = 7
VOTE_COUNT = 8
|
class KernelsListSortType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 10 | 0 | 10 | 10 | 9 | 0 | 10 | 10 | 9 | 0 | 4 | 0 | 0 |
140,957 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/kernels/types/kernels_enums.py
|
src.kagglesdk.kernels.types.kernels_enums.KernelsListViewType
|
class KernelsListViewType(enum.Enum):
KERNELS_LIST_VIEW_TYPE_UNSPECIFIED = 0
PROFILE = 1
UPVOTED = 2
EVERYONE = 3
COLLABORATION = 4
FORK = 5
BOOKMARKED = 6
RECENTLY_VIEWED = 7
PUBLIC_AND_USERS_PRIVATE = 8
|
class KernelsListViewType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 10 | 0 | 10 | 10 | 9 | 0 | 10 | 10 | 9 | 0 | 4 | 0 | 0 |
140,958 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/services/model_api_service.py
|
src.kagglesdk.models.services.model_api_service.ModelApiClient
|
class ModelApiClient(object):
def __init__(self, client: KaggleHttpClient):
self._client = client
def create_model(self, request: ApiCreateModelRequest = None) -> ApiCreateModelResponse:
r"""
Args:
request (ApiCreateModelRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiCreateModelRequest()
return self._client.call("models.ModelApiService", "ApiCreateModel", request, ApiCreateModelResponse)
def create_model_instance(self, request: ApiCreateModelInstanceRequest = None) -> ApiCreateModelResponse:
r"""
Args:
request (ApiCreateModelInstanceRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiCreateModelInstanceRequest()
return self._client.call("models.ModelApiService", "ApiCreateModelInstance", request, ApiCreateModelResponse)
def create_model_instance_version(self, request: ApiCreateModelInstanceVersionRequest = None) -> ApiCreateModelResponse:
r"""
Args:
request (ApiCreateModelInstanceVersionRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiCreateModelInstanceVersionRequest()
return self._client.call("models.ModelApiService", "ApiCreateModelInstanceVersion", request, ApiCreateModelResponse)
def delete_model(self, request: ApiDeleteModelRequest = None) -> ApiDeleteModelResponse:
r"""
Args:
request (ApiDeleteModelRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiDeleteModelRequest()
return self._client.call("models.ModelApiService", "ApiDeleteModel", request, ApiDeleteModelResponse)
def delete_model_instance(self, request: ApiDeleteModelInstanceRequest = None) -> ApiDeleteModelResponse:
r"""
Args:
request (ApiDeleteModelInstanceRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiDeleteModelInstanceRequest()
return self._client.call("models.ModelApiService", "ApiDeleteModelInstance", request, ApiDeleteModelResponse)
def delete_model_instance_version(self, request: ApiDeleteModelInstanceVersionRequest = None) -> ApiDeleteModelResponse:
r"""
Args:
request (ApiDeleteModelInstanceVersionRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiDeleteModelInstanceVersionRequest()
return self._client.call("models.ModelApiService", "ApiDeleteModelInstanceVersion", request, ApiDeleteModelResponse)
def get_model(self, request: ApiGetModelRequest = None) -> ApiModel:
r"""
Args:
request (ApiGetModelRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiGetModelRequest()
return self._client.call("models.ModelApiService", "ApiGetModel", request, ApiModel)
def get_model_instance(self, request: ApiGetModelInstanceRequest = None) -> ApiModelInstance:
r"""
Args:
request (ApiGetModelInstanceRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiGetModelInstanceRequest()
return self._client.call("models.ModelApiService", "ApiGetModelInstance", request, ApiModelInstance)
def download_model_instance_version(self, request: ApiDownloadModelInstanceVersionRequest = None) -> HttpRedirect:
r"""
Args:
request (ApiDownloadModelInstanceVersionRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiDownloadModelInstanceVersionRequest()
return self._client.call("models.ModelApiService", "ApiDownloadModelInstanceVersion", request, HttpRedirect)
def list_models(self, request: ApiListModelsRequest = None) -> ApiListModelsResponse:
r"""
Args:
request (ApiListModelsRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiListModelsRequest()
return self._client.call("models.ModelApiService", "ApiListModels", request, ApiListModelsResponse)
def list_model_instance_version_files(self, request: ApiListModelInstanceVersionFilesRequest = None) -> ApiListModelInstanceVersionFilesResponse:
r"""
Args:
request (ApiListModelInstanceVersionFilesRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiListModelInstanceVersionFilesRequest()
return self._client.call("models.ModelApiService", "ApiListModelInstanceVersionFiles", request, ApiListModelInstanceVersionFilesResponse)
def update_model(self, request: ApiUpdateModelRequest = None) -> ApiUpdateModelResponse:
r"""
Args:
request (ApiUpdateModelRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiUpdateModelRequest()
return self._client.call("models.ModelApiService", "ApiUpdateModel", request, ApiUpdateModelResponse)
def update_model_instance(self, request: ApiUpdateModelInstanceRequest = None) -> ApiUpdateModelResponse:
r"""
Args:
request (ApiUpdateModelInstanceRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiUpdateModelInstanceRequest()
return self._client.call("models.ModelApiService", "ApiUpdateModelInstance", request, ApiUpdateModelResponse)
def upload_model_file(self, request: ApiUploadModelFileRequest = None) -> ApiUploadModelFileResponse:
r"""
Deprecated. Use the new unified BlobApiService#StartBlobUpload rpc.
Args:
request (ApiUploadModelFileRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiUploadModelFileRequest()
return self._client.call("models.ModelApiService", "ApiUploadModelFile", request, ApiUploadModelFileResponse)
def create_model_signing_token(self, request: CreateModelSigningTokenRequest = None) -> CreateModelSigningTokenResponse:
r"""
Creates an Kaggle issued identity token. The token is signed using a
private key held in KMS that is only accessible by Kaggle model-signer
service account.
Args:
request (CreateModelSigningTokenRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = CreateModelSigningTokenRequest()
return self._client.call("models.ModelApiService", "CreateModelSigningToken", request, CreateModelSigningTokenResponse)
def well_know_endpoint(self, request: WellKnowEndpointRequest = None) -> WellKnowEndpointResponse:
r"""
see spec -
https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig.
Must support CORS. The service will have a path component.
Args:
request (WellKnowEndpointRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = WellKnowEndpointRequest()
return self._client.call("models.ModelApiService", "WellKnowEndpoint", request, WellKnowEndpointResponse)
def keys(self, request: KeysRequest = None) -> KeysResponse:
r"""
The JWKS endpoint containing the keys to validate the signature of a kaggle
issued signing identity token.
Args:
request (KeysRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = KeysRequest()
return self._client.call("models.ModelApiService", "Keys", request, KeysResponse)
def list_model_gating_user_consents(self, request: ApiListModelGatingUserConsentsRequest = None) -> ApiListModelGatingUserConsentsResponse:
r"""
Model gating
List the user consents for a gated model, under the model's current active
agreement.
Args:
request (ApiListModelGatingUserConsentsRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiListModelGatingUserConsentsRequest()
return self._client.call("models.ModelApiService", "ApiListModelGatingUserConsents", request, ApiListModelGatingUserConsentsResponse)
def review_gating_user_consent(self, request: ApiReviewGatingUserConsentRequest = None):
r"""
Review the user consents for an agreement.
Args:
request (ApiReviewGatingUserConsentRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiReviewGatingUserConsentRequest()
self._client.call("models.ModelApiService", "ApiReviewGatingUserConsent", request, None)
|
class ModelApiClient(object):
def __init__(self, client: KaggleHttpClient):
pass
def create_model(self, request: ApiCreateModelRequest = None) -> ApiCreateModelResponse:
'''
Args:
request (ApiCreateModelRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def create_model_instance(self, request: ApiCreateModelInstanceRequest = None) -> ApiCreateModelResponse:
'''
Args:
request (ApiCreateModelInstanceRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def create_model_instance_version(self, request: ApiCreateModelInstanceVersionRequest = None) -> ApiCreateModelResponse:
'''
Args:
request (ApiCreateModelInstanceVersionRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def delete_model(self, request: ApiDeleteModelRequest = None) -> ApiDeleteModelResponse:
'''
Args:
request (ApiDeleteModelRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def delete_model_instance(self, request: ApiDeleteModelInstanceRequest = None) -> ApiDeleteModelResponse:
'''
Args:
request (ApiDeleteModelInstanceRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def delete_model_instance_version(self, request: ApiDeleteModelInstanceVersionRequest = None) -> ApiDeleteModelResponse:
'''
Args:
request (ApiDeleteModelInstanceVersionRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def get_model(self, request: ApiGetModelRequest = None) -> ApiModel:
'''
Args:
request (ApiGetModelRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def get_model_instance(self, request: ApiGetModelInstanceRequest = None) -> ApiModelInstance:
'''
Args:
request (ApiGetModelInstanceRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def download_model_instance_version(self, request: ApiDownloadModelInstanceVersionRequest = None) -> HttpRedirect:
'''
Args:
request (ApiDownloadModelInstanceVersionRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def list_models(self, request: ApiListModelsRequest = None) -> ApiListModelsResponse:
'''
Args:
request (ApiListModelsRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def list_model_instance_version_files(self, request: ApiListModelInstanceVersionFilesRequest = None) -> ApiListModelInstanceVersionFilesResponse:
'''
Args:
request (ApiListModelInstanceVersionFilesRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def update_model(self, request: ApiUpdateModelRequest = None) -> ApiUpdateModelResponse:
'''
Args:
request (ApiUpdateModelRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def update_model_instance(self, request: ApiUpdateModelInstanceRequest = None) -> ApiUpdateModelResponse:
'''
Args:
request (ApiUpdateModelInstanceRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def upload_model_file(self, request: ApiUploadModelFileRequest = None) -> ApiUploadModelFileResponse:
'''
Deprecated. Use the new unified BlobApiService#StartBlobUpload rpc.
Args:
request (ApiUploadModelFileRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def create_model_signing_token(self, request: CreateModelSigningTokenRequest = None) -> CreateModelSigningTokenResponse:
'''
Creates an Kaggle issued identity token. The token is signed using a
private key held in KMS that is only accessible by Kaggle model-signer
service account.
Args:
request (CreateModelSigningTokenRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def well_know_endpoint(self, request: WellKnowEndpointRequest = None) -> WellKnowEndpointResponse:
'''
see spec -
https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig.
Must support CORS. The service will have a path component.
Args:
request (WellKnowEndpointRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def keys(self, request: KeysRequest = None) -> KeysResponse:
'''
The JWKS endpoint containing the keys to validate the signature of a kaggle
issued signing identity token.
Args:
request (KeysRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def list_model_gating_user_consents(self, request: ApiListModelGatingUserConsentsRequest = None) -> ApiListModelGatingUserConsentsResponse:
'''
Model gating
List the user consents for a gated model, under the model's current active
agreement.
Args:
request (ApiListModelGatingUserConsentsRequest):
The request object; initialized to empty instance if not specified.
'''
pass
def review_gating_user_consent(self, request: ApiReviewGatingUserConsentRequest = None):
'''
Review the user consents for an agreement.
Args:
request (ApiReviewGatingUserConsentRequest):
The request object; initialized to empty instance if not specified.
'''
pass
| 21 | 19 | 12 | 2 | 4 | 5 | 2 | 1.37 | 1 | 33 | 33 | 0 | 20 | 1 | 20 | 20 | 251 | 64 | 79 | 22 | 58 | 108 | 79 | 22 | 58 | 2 | 1 | 1 | 39 |
140,959 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.CreateModelSigningTokenRequest
|
class CreateModelSigningTokenRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
def endpoint(self):
path = '/api/v1/models/signing/token'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class CreateModelSigningTokenRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 15 | 1 | 4 | 0 | 4 | 0 | 2 | 0.14 | 1 | 2 | 0 | 0 | 6 | 2 | 8 | 25 | 50 | 9 | 36 | 18 | 21 | 5 | 30 | 12 | 21 | 3 | 2 | 1 | 12 |
140,960 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.CreateModelSigningTokenResponse
|
class CreateModelSigningTokenResponse(KaggleObject):
r"""
Attributes:
id_token (str)
"""
def __init__(self):
self._id_token = ""
self._freeze()
@property
def id_token(self) -> str:
return self._id_token
@id_token.setter
def id_token(self, id_token: str):
if id_token is None:
del self.id_token
return
if not isinstance(id_token, str):
raise TypeError('id_token must be of type str')
self._id_token = id_token
@property
def idToken(self):
return self.id_token
|
class CreateModelSigningTokenResponse(KaggleObject):
'''
Attributes:
id_token (str)
'''
def __init__(self):
pass
@property
def id_token(self) -> str:
pass
@id_token.setter
def id_token(self) -> str:
pass
@property
def idToken(self):
pass
| 8 | 1 | 4 | 0 | 4 | 0 | 2 | 0.22 | 1 | 2 | 0 | 0 | 4 | 1 | 4 | 21 | 26 | 4 | 18 | 9 | 10 | 4 | 15 | 6 | 10 | 3 | 2 | 1 | 6 |
140,961 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.JWK
|
class JWK(KaggleObject):
r"""
Attributes:
kty (str)
alg (str)
use (str)
kid (str)
n (str)
modulus
e (str)
public exponent
"""
def __init__(self):
self._kty = ""
self._alg = ""
self._use = ""
self._kid = ""
self._n = ""
self._e = ""
self._freeze()
@property
def kty(self) -> str:
return self._kty
@kty.setter
def kty(self, kty: str):
if kty is None:
del self.kty
return
if not isinstance(kty, str):
raise TypeError('kty must be of type str')
self._kty = kty
@property
def alg(self) -> str:
return self._alg
@alg.setter
def alg(self, alg: str):
if alg is None:
del self.alg
return
if not isinstance(alg, str):
raise TypeError('alg must be of type str')
self._alg = alg
@property
def use(self) -> str:
return self._use
@use.setter
def use(self, use: str):
if use is None:
del self.use
return
if not isinstance(use, str):
raise TypeError('use must be of type str')
self._use = use
@property
def kid(self) -> str:
return self._kid
@kid.setter
def kid(self, kid: str):
if kid is None:
del self.kid
return
if not isinstance(kid, str):
raise TypeError('kid must be of type str')
self._kid = kid
@property
def n(self) -> str:
"""modulus"""
return self._n
@n.setter
def n(self, n: str):
if n is None:
del self.n
return
if not isinstance(n, str):
raise TypeError('n must be of type str')
self._n = n
@property
def e(self) -> str:
"""public exponent"""
return self._e
@e.setter
def e(self, e: str):
if e is None:
del self.e
return
if not isinstance(e, str):
raise TypeError('e must be of type str')
self._e = e
|
class JWK(KaggleObject):
'''
Attributes:
kty (str)
alg (str)
use (str)
kid (str)
n (str)
modulus
e (str)
public exponent
'''
def __init__(self):
pass
@property
def kty(self) -> str:
pass
@kty.setter
def kty(self) -> str:
pass
@property
def alg(self) -> str:
pass
@alg.setter
def alg(self) -> str:
pass
@property
def use(self) -> str:
pass
@use.setter
def use(self) -> str:
pass
@property
def kid(self) -> str:
pass
@kid.setter
def kid(self) -> str:
pass
@property
def n(self) -> str:
'''modulus'''
pass
@n.setter
def n(self) -> str:
pass
@property
def e(self) -> str:
'''public exponent'''
pass
@e.setter
def e(self) -> str:
pass
| 26 | 3 | 5 | 0 | 5 | 0 | 2 | 0.17 | 1 | 2 | 0 | 0 | 13 | 6 | 13 | 30 | 101 | 13 | 75 | 32 | 49 | 13 | 63 | 20 | 49 | 3 | 2 | 1 | 25 |
140,962 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.KeysRequest
|
class KeysRequest(KaggleObject):
r"""
"""
pass
def endpoint(self):
path = '/api/v1/models/signing/keys'
return path.format_map(self.to_field_map(self))
|
class KeysRequest(KaggleObject):
'''
'''
def endpoint(self):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 1 | 0.4 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 18 | 8 | 1 | 5 | 3 | 3 | 2 | 5 | 3 | 3 | 1 | 2 | 0 | 1 |
140,963 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.KeysResponse
|
class KeysResponse(KaggleObject):
r"""
JWKS specification can be found:
https://openid.net/specs/draft-jones-json-web-key-03.html//
https://datatracker.ietf.org/doc/html/rfc7517
Attributes:
keys (JWK)
"""
def __init__(self):
self._keys = []
self._freeze()
@property
def keys(self) -> Optional[List[Optional['JWK']]]:
return self._keys
@keys.setter
def keys(self, keys: Optional[List[Optional['JWK']]]):
if keys is None:
del self.keys
return
if not isinstance(keys, list):
raise TypeError('keys must be of type list')
if not all([isinstance(t, JWK) for t in keys]):
raise TypeError('keys must contain only items of type JWK')
self._keys = keys
|
class KeysResponse(KaggleObject):
'''
JWKS specification can be found:
https://openid.net/specs/draft-jones-json-web-key-03.html//
https://datatracker.ietf.org/doc/html/rfc7517
Attributes:
keys (JWK)
'''
def __init__(self):
pass
@property
def keys(self) -> Optional[List[Optional['JWK']]]:
pass
@keys.setter
def keys(self) -> Optional[List[Optional['JWK']]]:
pass
| 6 | 1 | 5 | 0 | 5 | 0 | 2 | 0.41 | 1 | 3 | 1 | 0 | 3 | 1 | 3 | 20 | 28 | 4 | 17 | 7 | 11 | 7 | 15 | 5 | 11 | 4 | 2 | 1 | 6 |
140,964 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.WellKnowEndpointRequest
|
class WellKnowEndpointRequest(KaggleObject):
r"""
"""
pass
def endpoint(self):
path = '/api/v1/models/signing/.well-known/openid-configuration'
return path.format_map(self.to_field_map(self))
|
class WellKnowEndpointRequest(KaggleObject):
'''
'''
def endpoint(self):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 1 | 0.4 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 18 | 8 | 1 | 5 | 3 | 3 | 2 | 5 | 3 | 3 | 1 | 2 | 0 | 1 |
140,965 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.WellKnowEndpointResponse
|
class WellKnowEndpointResponse(KaggleObject):
r"""
More details about the response can be found at
https://openid.net/specs/openid-connect-discovery-1_0.html#MetadataContents
Attributes:
issuer (str)
jwks_uri (str)
The keys endpoint from rpc `ModelApiService.Keys`
token_endpoint (str)
The token endpoint from rpc `ModelApiService.CreateModelSigningToken`
id_token_signing_alg_values_supported (str)
claims_supported (str)
response_types_supported (str)
subject_types_supported (str)
"""
def __init__(self):
self._issuer = ""
self._jwks_uri = ""
self._token_endpoint = ""
self._id_token_signing_alg_values_supported = []
self._claims_supported = []
self._response_types_supported = []
self._subject_types_supported = []
self._freeze()
@property
def issuer(self) -> str:
return self._issuer
@issuer.setter
def issuer(self, issuer: str):
if issuer is None:
del self.issuer
return
if not isinstance(issuer, str):
raise TypeError('issuer must be of type str')
self._issuer = issuer
@property
def jwks_uri(self) -> str:
"""The keys endpoint from rpc `ModelApiService.Keys`"""
return self._jwks_uri
@jwks_uri.setter
def jwks_uri(self, jwks_uri: str):
if jwks_uri is None:
del self.jwks_uri
return
if not isinstance(jwks_uri, str):
raise TypeError('jwks_uri must be of type str')
self._jwks_uri = jwks_uri
@property
def token_endpoint(self) -> str:
"""The token endpoint from rpc `ModelApiService.CreateModelSigningToken`"""
return self._token_endpoint
@token_endpoint.setter
def token_endpoint(self, token_endpoint: str):
if token_endpoint is None:
del self.token_endpoint
return
if not isinstance(token_endpoint, str):
raise TypeError('token_endpoint must be of type str')
self._token_endpoint = token_endpoint
@property
def id_token_signing_alg_values_supported(self) -> Optional[List[str]]:
return self._id_token_signing_alg_values_supported
@id_token_signing_alg_values_supported.setter
def id_token_signing_alg_values_supported(self, id_token_signing_alg_values_supported: Optional[List[str]]):
if id_token_signing_alg_values_supported is None:
del self.id_token_signing_alg_values_supported
return
if not isinstance(id_token_signing_alg_values_supported, list):
raise TypeError('id_token_signing_alg_values_supported must be of type list')
if not all([isinstance(t, str) for t in id_token_signing_alg_values_supported]):
raise TypeError('id_token_signing_alg_values_supported must contain only items of type str')
self._id_token_signing_alg_values_supported = id_token_signing_alg_values_supported
@property
def claims_supported(self) -> Optional[List[str]]:
return self._claims_supported
@claims_supported.setter
def claims_supported(self, claims_supported: Optional[List[str]]):
if claims_supported is None:
del self.claims_supported
return
if not isinstance(claims_supported, list):
raise TypeError('claims_supported must be of type list')
if not all([isinstance(t, str) for t in claims_supported]):
raise TypeError('claims_supported must contain only items of type str')
self._claims_supported = claims_supported
@property
def response_types_supported(self) -> Optional[List[str]]:
return self._response_types_supported
@response_types_supported.setter
def response_types_supported(self, response_types_supported: Optional[List[str]]):
if response_types_supported is None:
del self.response_types_supported
return
if not isinstance(response_types_supported, list):
raise TypeError('response_types_supported must be of type list')
if not all([isinstance(t, str) for t in response_types_supported]):
raise TypeError('response_types_supported must contain only items of type str')
self._response_types_supported = response_types_supported
@property
def subject_types_supported(self) -> Optional[List[str]]:
return self._subject_types_supported
@subject_types_supported.setter
def subject_types_supported(self, subject_types_supported: Optional[List[str]]):
if subject_types_supported is None:
del self.subject_types_supported
return
if not isinstance(subject_types_supported, list):
raise TypeError('subject_types_supported must be of type list')
if not all([isinstance(t, str) for t in subject_types_supported]):
raise TypeError('subject_types_supported must contain only items of type str')
self._subject_types_supported = subject_types_supported
@property
def jwksUri(self):
return self.jwks_uri
@property
def tokenEndpoint(self):
return self.token_endpoint
@property
def idTokenSigningAlgValuesSupported(self):
return self.id_token_signing_alg_values_supported
@property
def claimsSupported(self):
return self.claims_supported
@property
def responseTypesSupported(self):
return self.response_types_supported
@property
def subjectTypesSupported(self):
return self.subject_types_supported
|
class WellKnowEndpointResponse(KaggleObject):
'''
More details about the response can be found at
https://openid.net/specs/openid-connect-discovery-1_0.html#MetadataContents
Attributes:
issuer (str)
jwks_uri (str)
The keys endpoint from rpc `ModelApiService.Keys`
token_endpoint (str)
The token endpoint from rpc `ModelApiService.CreateModelSigningToken`
id_token_signing_alg_values_supported (str)
claims_supported (str)
response_types_supported (str)
subject_types_supported (str)
'''
def __init__(self):
pass
@property
def issuer(self) -> str:
pass
@issuer.setter
def issuer(self) -> str:
pass
@property
def jwks_uri(self) -> str:
'''The keys endpoint from rpc `ModelApiService.Keys`'''
pass
@jwks_uri.setter
def jwks_uri(self) -> str:
pass
@property
def token_endpoint(self) -> str:
'''The token endpoint from rpc `ModelApiService.CreateModelSigningToken`'''
pass
@token_endpoint.setter
def token_endpoint(self) -> str:
pass
@property
def id_token_signing_alg_values_supported(self) -> Optional[List[str]]:
pass
@id_token_signing_alg_values_supported.setter
def id_token_signing_alg_values_supported(self) -> Optional[List[str]]:
pass
@property
def claims_supported(self) -> Optional[List[str]]:
pass
@claims_supported.setter
def claims_supported(self) -> Optional[List[str]]:
pass
@property
def response_types_supported(self) -> Optional[List[str]]:
pass
@response_types_supported.setter
def response_types_supported(self) -> Optional[List[str]]:
pass
@property
def subject_types_supported(self) -> Optional[List[str]]:
pass
@subject_types_supported.setter
def subject_types_supported(self) -> Optional[List[str]]:
pass
@property
def jwksUri(self):
pass
@property
def tokenEndpoint(self):
pass
@property
def idTokenSigningAlgValuesSupported(self):
pass
@property
def claimsSupported(self):
pass
@property
def responseTypesSupported(self):
pass
@property
def subjectTypesSupported(self):
pass
| 42 | 3 | 4 | 0 | 4 | 0 | 2 | 0.14 | 1 | 3 | 0 | 0 | 21 | 7 | 21 | 38 | 151 | 22 | 113 | 49 | 71 | 16 | 93 | 29 | 71 | 4 | 2 | 1 | 39 |
140,966 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_enums.py
|
src.kagglesdk.models.types.model_enums.GatingAgreementRequestsExpiryStatus
|
class GatingAgreementRequestsExpiryStatus(enum.Enum):
GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED = 0
GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_NOT_EXPIRED = 1
GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_IS_EXPIRED = 2
|
class GatingAgreementRequestsExpiryStatus(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 4 | 0 | 0 |
140,967 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_enums.py
|
src.kagglesdk.models.types.model_enums.GatingAgreementRequestsReviewStatus
|
class GatingAgreementRequestsReviewStatus(enum.Enum):
GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED = 0
GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_PENDING = 1
GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_ACCEPTED = 2
GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_REJECTED = 3
|
class GatingAgreementRequestsReviewStatus(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 5 | 0 | 5 | 5 | 4 | 0 | 5 | 5 | 4 | 0 | 4 | 0 | 0 |
140,968 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_enums.py
|
src.kagglesdk.models.types.model_enums.ListModelsOrderBy
|
class ListModelsOrderBy(enum.Enum):
LIST_MODELS_ORDER_BY_UNSPECIFIED = 0
LIST_MODELS_ORDER_BY_HOTNESS = 1
LIST_MODELS_ORDER_BY_DOWNLOAD_COUNT = 2
LIST_MODELS_ORDER_BY_VOTE_COUNT = 3
LIST_MODELS_ORDER_BY_NOTEBOOK_COUNT = 4
LIST_MODELS_ORDER_BY_PUBLISH_TIME = 5
LIST_MODELS_ORDER_BY_CREATE_TIME = 6
LIST_MODELS_ORDER_BY_UPDATE_TIME = 7
LIST_MODELS_ORDER_BY_VIEW_TIME_DESC = 8
|
class ListModelsOrderBy(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 10 | 0 | 10 | 10 | 9 | 0 | 10 | 10 | 9 | 0 | 4 | 0 | 0 |
140,969 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_enums.py
|
src.kagglesdk.models.types.model_enums.ModelFramework
|
class ModelFramework(enum.Enum):
MODEL_FRAMEWORK_UNSPECIFIED = 0
MODEL_FRAMEWORK_TENSOR_FLOW_1 = 1
MODEL_FRAMEWORK_TENSOR_FLOW_2 = 2
MODEL_FRAMEWORK_TF_LITE = 3
MODEL_FRAMEWORK_TF_JS = 4
MODEL_FRAMEWORK_PY_TORCH = 5
MODEL_FRAMEWORK_JAX = 6
MODEL_FRAMEWORK_FLAX = 14
MODEL_FRAMEWORK_PAX = 15
MODEL_FRAMEWORK_MAX_TEXT = 17
MODEL_FRAMEWORK_GEMMA_CPP = 18
MODEL_FRAMEWORK_GGML = 19
MODEL_FRAMEWORK_GGUF = 21
MODEL_FRAMEWORK_CORAL = 7
MODEL_FRAMEWORK_SCIKIT_LEARN = 8
MODEL_FRAMEWORK_MXNET = 9
MODEL_FRAMEWORK_ONNX = 10
MODEL_FRAMEWORK_KERAS = 11
MODEL_FRAMEWORK_TRANSFORMERS = 16
MODEL_FRAMEWORK_API = 12
MODEL_FRAMEWORK_OTHER = 13
MODEL_FRAMEWORK_TENSOR_RT_LLM = 20
MODEL_FRAMEWORK_TRITON = 22
|
class ModelFramework(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 24 | 0 | 24 | 24 | 23 | 0 | 24 | 24 | 23 | 0 | 4 | 0 | 0 |
140,970 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_enums.py
|
src.kagglesdk.models.types.model_enums.ModelInstanceType
|
class ModelInstanceType(enum.Enum):
MODEL_INSTANCE_TYPE_UNSPECIFIED = 0
MODEL_INSTANCE_TYPE_BASE_MODEL = 1
MODEL_INSTANCE_TYPE_KAGGLE_VARIANT = 2
MODEL_INSTANCE_TYPE_EXTERNAL_VARIANT = 3
|
class ModelInstanceType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 5 | 0 | 5 | 5 | 4 | 0 | 5 | 5 | 4 | 0 | 4 | 0 | 0 |
140,971 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_enums.py
|
src.kagglesdk.models.types.model_enums.ModelVersionLinkType
|
class ModelVersionLinkType(enum.Enum):
MODEL_VERSION_LINK_TYPE_UNSPECIFIED = 0
MODEL_VERSION_LINK_TYPE_VERTEX_OPEN = 1
MODEL_VERSION_LINK_TYPE_VERTEX_DEPLOY = 2
|
class ModelVersionLinkType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 4 | 0 | 0 |
140,972 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_service.py
|
src.kagglesdk.models.types.model_service.GetModelMetricsRequest
|
class GetModelMetricsRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
start_time (datetime)
Optional start time for the time series. A year ago by default.
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._start_time = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def start_time(self) -> datetime:
"""Optional start time for the time series. A year ago by default."""
return self._start_time or None
@start_time.setter
def start_time(self, start_time: datetime):
if start_time is None:
del self.start_time
return
if not isinstance(start_time, datetime):
raise TypeError('start_time must be of type datetime')
self._start_time = start_time
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/metrics'
return path.format_map(self.to_field_map(self))
@staticmethod
def endpoint_path():
return '/models/{owner_slug}/{model_slug}/metrics'
|
class GetModelMetricsRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
start_time (datetime)
Optional start time for the time series. A year ago by default.
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def start_time(self) -> datetime:
'''Optional start time for the time series. A year ago by default.'''
pass
@start_time.setter
def start_time(self) -> datetime:
pass
def endpoint(self):
pass
@staticmethod
def endpoint_path():
pass
| 17 | 2 | 4 | 0 | 4 | 0 | 2 | 0.18 | 1 | 3 | 0 | 0 | 8 | 3 | 9 | 26 | 62 | 9 | 45 | 21 | 28 | 8 | 38 | 14 | 28 | 3 | 2 | 1 | 15 |
140,973 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_service.py
|
src.kagglesdk.models.types.model_service.GetModelMetricsResponse
|
class GetModelMetricsResponse(KaggleObject):
r"""
Attributes:
metrics (ModelMetric)
"""
def __init__(self):
self._metrics = []
self._freeze()
@property
def metrics(self) -> Optional[List[Optional['ModelMetric']]]:
return self._metrics
@metrics.setter
def metrics(self, metrics: Optional[List[Optional['ModelMetric']]]):
if metrics is None:
del self.metrics
return
if not isinstance(metrics, list):
raise TypeError('metrics must be of type list')
if not all([isinstance(t, ModelMetric) for t in metrics]):
raise TypeError('metrics must contain only items of type ModelMetric')
self._metrics = metrics
|
class GetModelMetricsResponse(KaggleObject):
'''
Attributes:
metrics (ModelMetric)
'''
def __init__(self):
pass
@property
def metrics(self) -> Optional[List[Optional['ModelMetric']]]:
pass
@metrics.setter
def metrics(self) -> Optional[List[Optional['ModelMetric']]]:
pass
| 6 | 1 | 5 | 0 | 5 | 0 | 2 | 0.24 | 1 | 3 | 1 | 0 | 3 | 1 | 3 | 20 | 24 | 3 | 17 | 7 | 11 | 4 | 15 | 5 | 11 | 4 | 2 | 1 | 6 |
140,974 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_service.py
|
src.kagglesdk.models.types.model_service.ModelInstanceMetric
|
class ModelInstanceMetric(KaggleObject):
r"""
Attributes:
variation (str)
framework (ModelFramework)
downloads (int)
notebooks (int)
"""
def __init__(self):
self._variation = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._downloads = 0
self._notebooks = 0
self._freeze()
@property
def variation(self) -> str:
return self._variation
@variation.setter
def variation(self, variation: str):
if variation is None:
del self.variation
return
if not isinstance(variation, str):
raise TypeError('variation must be of type str')
self._variation = variation
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def downloads(self) -> int:
return self._downloads
@downloads.setter
def downloads(self, downloads: int):
if downloads is None:
del self.downloads
return
if not isinstance(downloads, int):
raise TypeError('downloads must be of type int')
self._downloads = downloads
@property
def notebooks(self) -> int:
return self._notebooks
@notebooks.setter
def notebooks(self, notebooks: int):
if notebooks is None:
del self.notebooks
return
if not isinstance(notebooks, int):
raise TypeError('notebooks must be of type int')
self._notebooks = notebooks
|
class ModelInstanceMetric(KaggleObject):
'''
Attributes:
variation (str)
framework (ModelFramework)
downloads (int)
notebooks (int)
'''
def __init__(self):
pass
@property
def variation(self) -> str:
pass
@variation.setter
def variation(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def downloads(self) -> int:
pass
@downloads.setter
def downloads(self) -> int:
pass
@property
def notebooks(self) -> int:
pass
@notebooks.setter
def notebooks(self) -> int:
pass
| 18 | 1 | 5 | 0 | 5 | 0 | 2 | 0.14 | 1 | 4 | 1 | 0 | 9 | 4 | 9 | 26 | 67 | 9 | 51 | 22 | 33 | 7 | 43 | 14 | 33 | 3 | 2 | 1 | 17 |
140,975 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_service.py
|
src.kagglesdk.models.types.model_service.ModelMetric
|
class ModelMetric(KaggleObject):
r"""
Attributes:
date (str)
views (int)
downloads (int)
notebooks (int)
instances (ModelInstanceMetric)
"""
def __init__(self):
self._date = ""
self._views = 0
self._downloads = 0
self._notebooks = 0
self._instances = []
self._freeze()
@property
def date(self) -> str:
return self._date
@date.setter
def date(self, date: str):
if date is None:
del self.date
return
if not isinstance(date, str):
raise TypeError('date must be of type str')
self._date = date
@property
def views(self) -> int:
return self._views
@views.setter
def views(self, views: int):
if views is None:
del self.views
return
if not isinstance(views, int):
raise TypeError('views must be of type int')
self._views = views
@property
def downloads(self) -> int:
return self._downloads
@downloads.setter
def downloads(self, downloads: int):
if downloads is None:
del self.downloads
return
if not isinstance(downloads, int):
raise TypeError('downloads must be of type int')
self._downloads = downloads
@property
def notebooks(self) -> int:
return self._notebooks
@notebooks.setter
def notebooks(self, notebooks: int):
if notebooks is None:
del self.notebooks
return
if not isinstance(notebooks, int):
raise TypeError('notebooks must be of type int')
self._notebooks = notebooks
@property
def instances(self) -> Optional[List[Optional['ModelInstanceMetric']]]:
return self._instances
@instances.setter
def instances(self, instances: Optional[List[Optional['ModelInstanceMetric']]]):
if instances is None:
del self.instances
return
if not isinstance(instances, list):
raise TypeError('instances must be of type list')
if not all([isinstance(t, ModelInstanceMetric) for t in instances]):
raise TypeError('instances must contain only items of type ModelInstanceMetric')
self._instances = instances
|
class ModelMetric(KaggleObject):
'''
Attributes:
date (str)
views (int)
downloads (int)
notebooks (int)
instances (ModelInstanceMetric)
'''
def __init__(self):
pass
@property
def date(self) -> str:
pass
@date.setter
def date(self) -> str:
pass
@property
def views(self) -> int:
pass
@views.setter
def views(self) -> int:
pass
@property
def downloads(self) -> int:
pass
@downloads.setter
def downloads(self) -> int:
pass
@property
def notebooks(self) -> int:
pass
@notebooks.setter
def notebooks(self) -> int:
pass
@property
def instances(self) -> Optional[List[Optional['ModelInstanceMetric']]]:
pass
@instances.setter
def instances(self) -> Optional[List[Optional['ModelInstanceMetric']]]:
pass
| 22 | 1 | 5 | 0 | 5 | 0 | 2 | 0.12 | 1 | 5 | 1 | 0 | 11 | 5 | 11 | 28 | 84 | 11 | 65 | 27 | 43 | 8 | 55 | 17 | 43 | 4 | 2 | 1 | 22 |
140,976 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_types.py
|
src.kagglesdk.models.types.model_types.BaseModelInstanceInformation
|
class BaseModelInstanceInformation(KaggleObject):
r"""
Attributes:
id (int)
owner (Owner)
model_slug (str)
instance_slug (str)
framework (ModelFramework)
"""
def __init__(self):
self._id = 0
self._owner = None
self._model_slug = ""
self._instance_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._freeze()
@property
def id(self) -> int:
return self._id
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def owner(self) -> Optional['Owner']:
return self._owner
@owner.setter
def owner(self, owner: Optional['Owner']):
if owner is None:
del self.owner
return
if not isinstance(owner, Owner):
raise TypeError('owner must be of type Owner')
self._owner = owner
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
|
class BaseModelInstanceInformation(KaggleObject):
'''
Attributes:
id (int)
owner (Owner)
model_slug (str)
instance_slug (str)
framework (ModelFramework)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def owner(self) -> Optional['Owner']:
pass
@owner.setter
def owner(self) -> Optional['Owner']:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
| 22 | 1 | 5 | 0 | 5 | 0 | 2 | 0.13 | 1 | 5 | 2 | 0 | 11 | 5 | 11 | 28 | 82 | 11 | 63 | 27 | 41 | 8 | 53 | 17 | 41 | 3 | 2 | 1 | 21 |
140,977 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_types.py
|
src.kagglesdk.models.types.model_types.ModelLink
|
class ModelLink(KaggleObject):
r"""
Attributes:
type (ModelVersionLinkType)
url (str)
"""
def __init__(self):
self._type = ModelVersionLinkType.MODEL_VERSION_LINK_TYPE_UNSPECIFIED
self._url = ""
self._freeze()
@property
def type(self) -> 'ModelVersionLinkType':
return self._type
@type.setter
def type(self, type: 'ModelVersionLinkType'):
if type is None:
del self.type
return
if not isinstance(type, ModelVersionLinkType):
raise TypeError('type must be of type ModelVersionLinkType')
self._type = type
@property
def url(self) -> str:
return self._url
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
|
class ModelLink(KaggleObject):
'''
Attributes:
type (ModelVersionLinkType)
url (str)
'''
def __init__(self):
pass
@property
def type(self) -> 'ModelVersionLinkType':
pass
@type.setter
def type(self) -> 'ModelVersionLinkType':
pass
@property
def url(self) -> str:
pass
@url.setter
def url(self) -> str:
pass
| 10 | 1 | 4 | 0 | 4 | 0 | 2 | 0.19 | 1 | 3 | 1 | 0 | 5 | 2 | 5 | 22 | 37 | 5 | 27 | 12 | 17 | 5 | 23 | 8 | 17 | 3 | 2 | 1 | 9 |
140,978 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/users/types/users_enums.py
|
src.kagglesdk.users.types.users_enums.CollaboratorType
|
class CollaboratorType(enum.Enum):
COLLABORATOR_TYPE_UNSPECIFIED = 0
READER = 1
WRITER = 2
OWNER = 3
ADMIN = 4
|
class CollaboratorType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 4 | 0 | 0 |
140,979 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/users/types/users_enums.py
|
src.kagglesdk.users.types.users_enums.UserAchievementTier
|
class UserAchievementTier(enum.Enum):
NOVICE = 0
CONTRIBUTOR = 1
EXPERT = 2
MASTER = 3
GRANDMASTER = 4
STAFF = 5
"""Kaggle admins"""
ORGANIZATION = 11
"""Organizations"""
RECALC = 21
"""Flag user for tier recalculation"""
|
class UserAchievementTier(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 12 | 0 | 9 | 9 | 8 | 3 | 9 | 9 | 8 | 0 | 4 | 0 | 0 |
140,980 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/test/test_client.py
|
test_client.TestClient
|
class TestClient(unittest.TestCase):
def setUp(self):
print("setup class:%s" % self)
def tearDown(self):
print("teardown class:TestStuff")
# Environment
def test_kaggle_environment(self):
os.environ['KAGGLE_API_ENVIRONMENT'] = 'PROD'
env = kaggle_env.get_env()
self.assertEqual(env, KaggleEnv.PROD)
endpoint = kaggle_env.get_endpoint(env)
self.assertEqual(endpoint, 'https://www.kaggle.com')
# Client
def test_kaggle_client(self):
client = KaggleClient(
env=KaggleEnv.PROD,
verbose=False,
username='dinosaur',
password='xxxxxxxxxxxx')
self.assertEqual(client.username, 'dinosaur')
self.assertEqual(client.password, 'xxxxxxxxxxxx')
self.assertEqual(client.http_client()._endpoint, 'https://www.kaggle.com')
self.assertEqual(client.http_client()._verbose, False)
|
class TestClient(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_kaggle_environment(self):
pass
def test_kaggle_client(self):
pass
| 5 | 0 | 6 | 1 | 5 | 0 | 1 | 0.1 | 1 | 2 | 2 | 0 | 4 | 0 | 4 | 76 | 32 | 9 | 21 | 8 | 16 | 2 | 17 | 8 | 12 | 1 | 2 | 0 | 4 |
140,981 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/test/test_client.py
|
test_client.TestClient
|
class TestClient(unittest.TestCase):
def setUp(self):
print("setup class:%s" % self)
def tearDown(self):
print("teardown class:TestStuff")
# Environment
def test_kaggle_environment(self):
os.environ['KAGGLE_API_ENVIRONMENT'] = 'PROD'
env = kaggle_env.get_env()
self.assertEqual(env, KaggleEnv.PROD)
endpoint = kaggle_env.get_endpoint(env)
self.assertEqual(endpoint, 'https://www.kaggle.com')
# Client
def test_kaggle_client(self):
client = KaggleClient(
env=KaggleEnv.PROD,
verbose=False,
username='dinosaur',
password='xxxxxxxxxxxx')
self.assertEqual(client.username, 'dinosaur')
self.assertEqual(client.password, 'xxxxxxxxxxxx')
self.assertEqual(client.http_client()._endpoint, 'https://www.kaggle.com')
self.assertEqual(client.http_client()._verbose, False)
|
class TestClient(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_kaggle_environment(self):
pass
def test_kaggle_client(self):
pass
| 5 | 0 | 6 | 1 | 5 | 0 | 1 | 0.1 | 1 | 2 | 2 | 0 | 4 | 0 | 4 | 76 | 32 | 9 | 21 | 8 | 16 | 2 | 17 | 8 | 12 | 1 | 2 | 0 | 4 |
140,982 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/integration_tests/test_models.py
|
test_models.TestModels
|
class TestModels(unittest.TestCase):
def setUp(self):
self.api = KaggleApi()
self.api.authenticate()
def test_list_models(self) -> None:
models = self.api.model_list()
self.assertGreater(len(models), 0)
def test_get_model(self) -> None:
model = self.api.model_get(MODEL_HANDLE)
self.assertEqual(MODEL_ID, model.id)
|
class TestModels(unittest.TestCase):
def setUp(self):
pass
def test_list_models(self) -> None:
pass
def test_get_model(self) -> None:
pass
| 4 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 3 | 1 | 3 | 75 | 12 | 2 | 10 | 7 | 6 | 0 | 10 | 7 | 6 | 1 | 2 | 0 | 3 |
140,983 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/tests/unit_tests.py
|
unit_tests.TestKaggleApi
|
class TestKaggleApi(unittest.TestCase):
version_number, meta_file = initialize_dataset_metadata_file(
dataset_directory)
# Initialized from Response objects.
competition_file = None
kernel_slug = ''
kernel_metadata_path = ''
dataset = ''
dataset_file = None
model_instance = ''
model_meta_data = None
model_metadata_file = ''
instance_metadata_file = ''
# Inbox
def test_files_upload(self):
a = 2 # Change this value to run this test.
if a - 1 == 1:
return # Only run this test when needed because it uploads an inbox file.
filename = 'tmp_file.test'
with open(filename, 'w') as f:
f.write('test')
try:
api.files_upload_cli([filename], 'kaggle-api-test', False, False)
finally:
if os.path.exists('tmp_file.test'):
os.remove('tmp_file.test')
# Kernels
def test_kernels_a_list(self):
try:
kernels = api.kernels_list(sort_by='dateCreated', user='stevemessick', language='python')
self.assertGreater(len(kernels), 0)
api.kernels_list_cli(user='stevemessick', csv_display=True)
except ApiException as e:
self.fail(f"kernels_list failed: {e}")
def test_kernels_b_initialize(self):
try:
self.kernel_metadata_path = api.kernels_initialize(kernel_directory)
self.assertTrue(os.path.exists(self.kernel_metadata_path))
except ApiException as e:
self.fail(f"kernels_initialize failed: {e}")
def test_kernels_c_push(self):
if self.kernel_metadata_path == '':
self.test_kernels_b_initialize()
try:
md = update_kernel_metadata_file(self.kernel_metadata_path, kernel_name)
push_result = api.kernels_push(kernel_directory)
self.assertIsNotNone(push_result.ref)
self.assertTrue(isinstance(push_result.version_number, int))
self.kernel_slug = md['id']
time.sleep(30)
except ApiException as e:
self.fail(f"kernels_push failed: {e}")
def test_kernels_d_status(self):
if self.kernel_slug == '':
self.test_kernels_c_push()
try:
status_result = api.kernels_status(self.kernel_slug)
start_time = time.time()
# If this loop is stuck because the kernel stays queued, go to the Kaggle website
# on localhost and cancel the active event. That will exit the loop, but you may
# need to clean up other active kernels to get it to run again.
count = 0
while status_result.status == 'running' or status_result.status == 'queued' or count >= max_status_tries:
time.sleep(5)
status_result = api.kernels_status(self.kernel_slug)
print(status_result.status)
if count >= max_status_tries:
self.fail(f"Could not get kernel status in allowed trys. Status: {status_result.status}")
end_time = time.time()
print(f'kernels_status ready in {end_time-start_time}s')
except ApiException as e:
self.fail(f"kernels_status failed: {e}")
def test_kernels_e_list_files(self):
if self.kernel_slug == '':
self.test_kernels_c_push()
try:
fs = api.kernels_list_files(self.kernel_slug)
# TODO Make sure the test uses a kernel that has at least one file.
self.assertGreaterEqual(len(fs.files), 0)
except ApiException as e:
self.fail(f"kernels_list_files failed: {e}")
def test_kernels_f_output(self):
fs = []
if self.kernel_slug == '':
self.test_kernels_c_push()
try:
fs, token = api.kernels_output(self.kernel_slug, 'kernel/tmp')
self.assertIsInstance(fs, list)
if token:
print(token)
except ApiException as e:
self.fail(f"kernels_output failed: {e}")
finally:
for file in fs:
if os.path.exists(file):
os.remove(file)
if os.path.exists('kernel/tmp'):
os.rmdir('kernel/tmp')
def test_kernels_g_pull(self):
if self.kernel_metadata_path == '':
self.test_kernels_c_push()
fs = ''
try:
fs = api.kernels_pull(f'{test_user}/testing', 'kernel/tmp', metadata=True)
self.assertTrue(os.path.exists(fs))
with open(f'{fs}/{self.kernel_metadata_path.split("/")[1]}') as f:
metadata = json.load(f)
[
self.assertTrue(metadata.get(f))
for f in ['id','id_no','title','code_file','language','kernel_type']
]
[
self.assertTrue(metadata.get(f) is not None)
for f in ['is_private','enable_gpu','enable_tpu','enable_internet','keywords','dataset_sources','kernel_sources','competition_sources','model_sources']
]
except ApiException as e:
self.fail(f"kernels_pull failed: {e}")
finally:
for file in [
f'{fs}/{self.kernel_metadata_path.split("/")[1]}',
f'{fs}/{kernel_name}.ipynb'
]:
if os.path.exists(file):
os.remove(file)
if os.path.exists(fs):
os.rmdir(fs)
# Competitions
def test_competition_a_list(self):
try:
competitions = api.competitions_list(group='general')
self.assertGreater(len(competitions), 0)
self.assertLessEqual(len(competitions), 20)
[
self.assertTrue(hasattr(competitions[0], api.camel_to_snake(f)))
for f in api.competition_fields
]
competitions = api.competitions_list(page=2, category='gettingStarted', sort_by='prize')
self.assertEqual(len(competitions), 0)
except ApiException as e:
self.fail(f"competitions_list failed: {e}")
def test_competition_b_submit(self):
try:
self.skip_submissions = False
result = api.competition_submit_cli(up_file, description, competition)
if not result or not result.startswith('Successfully submitted'):
self.fail(f'competition_submit failed: {result}')
except HTTPError:
# Handle submission limit reached gracefully (potentially skip the test)
print('Competition submission limit reached for the day')
self.skip_submissions = True
pass
except ApiException as e:
self.fail(f"competition_submit failed: {e}")
def test_competition_c_submissions(self):
self.test_competition_b_submit()
try:
submissions = api.competition_submissions(competition)
self.assertIsInstance(submissions, list)
if not self.skip_submissions:
self.assertGreater(len(submissions), 0)
[
self.assertTrue(hasattr(submissions[0], api.camel_to_snake(f)))
for f in api.submission_fields
]
except ApiException as e:
self.fail(f"competition_submissions failed: {e}")
def test_competition_d_list_files(self):
try:
competition_files = api.competition_list_files(competition).files
self.assertIsInstance(competition_files, list)
self.assertGreater(len(competition_files), 0)
self.competition_file = competition_files[0]
[
self.assertTrue(hasattr(competition_files[0], api.camel_to_snake(f)))
for f in api.competition_file_fields
]
except ApiException as e:
self.fail(f"competition_list_files failed: {e}")
def test_competition_e_download_file(self):
if self.competition_file is None:
self.test_competition_d_list_files()
try:
api.competition_download_file(
competition, self.competition_file.ref, force=True)
self.assertTrue(os.path.exists(self.competition_file.ref))
api.competition_download_file(
competition, self.competition_file.ref, force=False)
except ApiException as e:
self.fail(f"competition_download_file failed: {e}")
finally:
if os.path.exists(self.competition_file.ref):
os.remove(self.competition_file.ref)
def test_competition_f_download_files(self):
try:
api.competition_download_files(competition)
self.assertTrue(os.path.exists(f'{competition}.zip'))
self.assertTrue(os.path.getsize(f'{competition}.zip') > 0)
except ApiException as e:
self.fail(f"competition_download_files failed: {e}")
finally:
if os.path.exists(f'{competition}.zip'):
os.remove(f'{competition}.zip')
def test_competition_g_leaderboard_view(self):
try:
result = api.competition_leaderboard_view(competition)
self.assertIsInstance(result, list)
self.assertGreater(len(result), 0)
[
self.assertTrue(hasattr(result[0], api.camel_to_snake(f)))
for f in api.competition_leaderboard_fields
]
except ApiException as e:
self.fail(f"competition_leaderboard_view failed: {e}")
def test_competition_h_leaderboard_download(self):
try:
api.competition_leaderboard_download(competition, 'tmp')
self.assertTrue(os.path.exists(f'tmp/{competition}.zip'))
except ApiException as e:
self.fail(f"competition_leaderboard_download failed: {e}")
finally:
if os.path.exists(f'tmp/{competition}.zip'):
os.remove(f'tmp/{competition}.zip')
if os.path.exists('tmp'):
os.rmdir('tmp')
# Datasets
def test_dataset_a_list(self):
try:
datasets = api.dataset_list(sort_by='votes')
self.assertGreater(len(datasets), 0)
self.dataset = str(datasets[0].ref)
[
self.assertTrue(hasattr(datasets[0], api.camel_to_snake(f)))
for f in api.dataset_fields
]
datasets = api.dataset_list(license_name='other', file_type='bigQuery')
self.assertGreater(len(datasets), 10)
except ApiException as e:
self.fail(f"dataset_list failed: {e}")
def test_dataset_b_metadata(self):
if self.dataset == '':
self.test_dataset_a_list()
m = ''
try:
m = api.dataset_metadata(self.dataset, dataset_directory)
self.assertTrue(os.path.exists(m))
except ApiException as e:
self.fail(f"dataset_metadata failed: {e}")
def test_dataset_c_metadata_update(self):
if self.dataset == '':
self.test_dataset_a_list()
if not os.path.exists(
os.path.join(dataset_directory, api.DATASET_METADATA_FILE)):
self.test_dataset_b_metadata()
try:
api.dataset_metadata_update(self.dataset, dataset_directory)
# TODO Make the API method return something, and not exit when it fails.
except ApiException as e:
self.fail(f"dataset_metadata_update failed: {e}")
def test_dataset_d_list_files(self):
if self.dataset == '':
self.test_dataset_a_list()
try:
response = api.dataset_list_files(self.dataset)
self.assertIsInstance(response.dataset_files, list)
self.assertGreater(len(response.dataset_files), 0)
self.dataset_file = response.dataset_files[0]
[
self.assertTrue(hasattr(self.dataset_file, api.camel_to_snake(f)))
for f in api.dataset_file_fields
]
api.dataset_list_files_cli(self.dataset)
except ApiException as e:
self.fail(f"dataset_list_files failed: {e}")
def test_dataset_e_status(self):
if self.dataset == '':
self.test_dataset_a_list()
try:
status = api.dataset_status(self.dataset)
self.assertIn(status, ['ready', 'pending', 'error'])
except ApiException as e:
self.fail(f"dataset_status failed: {e}")
def test_dataset_f_download_file(self):
if self.dataset_file is None:
self.test_dataset_d_list_files()
try:
api.dataset_download_file(self.dataset, self.dataset_file.name, 'tmp')
self.assertTrue(os.path.exists(f'tmp/{self.dataset_file.name}'))
except ApiException as e:
self.fail(f"dataset_download_file failed: {e}")
finally:
if os.path.exists(f'tmp/{self.dataset_file.name}'):
os.remove(f'tmp/{self.dataset_file.name}')
if os.path.exists('tmp'):
os.rmdir('tmp')
def test_dataset_g_download_files(self):
if self.dataset == '':
self.test_dataset_a_list()
ds = ['a', 'b']
try:
api.dataset_download_files(self.dataset)
ds = self.dataset.split('/')
self.assertTrue(os.path.exists(f'{ds[1]}.zip'))
except ApiException as e:
self.fail(f"dataset_download_files failed: {e}")
finally:
if os.path.exists(f'{ds[1]}.zip'):
os.remove(f'{ds[1]}.zip')
def test_dataset_h_initialize(self):
try:
api.dataset_initialize('dataset')
self.assertTrue(
os.path.exists(
os.path.join(dataset_directory, api.DATASET_METADATA_FILE)))
except ApiException as e:
self.fail(f"dataset_initialize failed: {e}")
def test_dataset_i_create_new(self):
if not os.path.exists(
os.path.join(dataset_directory, api.DATASET_METADATA_FILE)):
self.test_dataset_h_initialize()
try:
update_dataset_metadata_file(self.meta_file, dataset_name,
self.version_number)
new_dataset = api.dataset_create_new(dataset_directory)
self.assertIsNotNone(new_dataset)
if new_dataset.error is not None:
if 'already in use' in new_dataset.error:
print(new_dataset.error) # This is likely to happen, and that's OK.
self.skip_create_version = True
else:
self.fail(f"dataset_create_new failed: {new_dataset.error}")
except ApiException as e:
self.fail(f"dataset_create_new failed: {e}")
def test_dataset_j_create_version(self):
if not os.path.exists(
os.path.join(dataset_directory, api.DATASET_METADATA_FILE)):
self.test_dataset_i_create_new()
try:
new_version = api.dataset_create_version(dataset_directory, "Notes")
self.assertIsNotNone(new_version)
self.assertTrue(new_version.error == '')
self.assertFalse(new_version.ref == '')
except ApiException as e:
self.fail(f"dataset_create_version failed: {e}")
# Models
def test_model_a_list(self):
try:
ms = api.model_list()
self.assertIsInstance(ms, list)
self.assertGreater(len(ms), 0)
[self.assertTrue(hasattr(ms[0], api.camel_to_snake(f)))
for f in api.model_fields]
except ApiException as e:
self.fail(f"models_list failed: {e}")
def test_model_b_initialize(self):
try:
self.model_metadata_file = api.model_initialize(model_directory)
self.assertTrue(os.path.exists(self.model_metadata_file))
self.model_meta_data = update_model_metadata(self.model_metadata_file,
test_user, model_title,
model_title)
self.model_instance = f'{test_user}/{self.model_meta_data["slug"]}/{framework_name}/{instance_name}'
except ApiException as e:
self.fail(f"model_initialize failed: {e}")
def test_model_c_create_new(self):
if self.model_metadata_file == '':
self.test_model_b_initialize()
try:
model = api.model_create_new(model_directory)
if model.error:
if 'already used' in model.error:
delete_response = api.model_delete(f'{test_user}/{model_title}', True)
if delete_response.error:
self.fail(delete_response.error)
else:
model = api.model_create_new(model_directory)
if model.error:
self.fail(model.error)
else:
self.fail(model.error)
self.assertIsNotNone(model.ref)
self.assertGreater(len(model.ref), 0)
[self.assertTrue(hasattr(model, api.camel_to_snake(f)))
for f in ['id', 'url']]
except ApiException as e:
self.fail(f"model_create_new failed: {e}")
def test_model_d_get(self):
try:
model_data = api.model_get(f'{test_user}/{model_title}')
self.assertIsNotNone(model_data.ref)
self.assertGreater(len(model_data.ref), 0)
self.assertEqual(model_data.title, model_title)
[self.assertTrue(hasattr(model_data, api.camel_to_snake(f)))
for f in api.model_all_fields]
except ApiException as e:
self.fail(f"model_get failed: {e}")
def test_model_e_update(self):
if self.model_metadata_file == '':
self.test_model_c_create_new()
try:
update_response = api.model_update(model_directory)
self.assertEqual(len(update_response.error), 0)
self.assertIsNotNone(update_response.ref)
self.assertGreater(len(update_response.ref), 0)
except ApiException as e:
self.fail(f"model_update failed: {e}")
# Model instances
def test_model_instance_a_initialize(self):
try:
self.instance_metadata_file = api.model_instance_initialize(
model_inst_directory)
self.assertTrue(os.path.exists(self.instance_metadata_file))
except ApiException as e:
self.fail(f"model_instance_initialize failed: {e}")
def test_model_instance_b_create(self, check_result: bool = True):
if self.model_meta_data is None:
self.test_model_b_initialize()
if self.instance_metadata_file == '':
self.test_model_instance_a_initialize()
try:
update_model_instance_metadata(self.instance_metadata_file, test_user,
self.model_meta_data['slug'],
instance_name, framework_name)
inst_create_resp = api.model_instance_create(model_inst_directory)
if check_result:
if inst_create_resp.error:
if 'already exists' in inst_create_resp.error:
delete_response = api.model_instance_delete(f'{test_user}/{model_title}', True)
if delete_response.error:
self.fail(delete_response.error)
else:
inst_create_resp = api.model_instance_create(model_inst_directory)
if inst_create_resp.error:
self.fail(inst_create_resp.error)
else:
self.fail(inst_create_resp.error)
self.assertIsNotNone(inst_create_resp.ref)
self.assertGreater(len(inst_create_resp.ref), 0)
except ApiException as e:
self.fail(f"model_instance_create failed: {e}")
def test_model_instance_b_wait_after_create(self):
# When running all tests sequentially, give the new model some time to stabilize.
time.sleep(10) # TODO: Find a better way to detect model stability.
def test_model_instance_c_get(self):
if self.model_instance == '':
self.test_model_b_initialize()
try:
inst_get_resp = api.model_instance_get(self.model_instance)
self.assertIsNotNone(inst_get_resp.url)
self.assertGreater(len(inst_get_resp.url), 0)
os.makedirs('model/tmp', exist_ok=True)
api.model_instance_get_cli(self.model_instance, 'model/tmp')
except ApiException as e:
self.fail(f"model_instance_get failed: {e}")
finally:
os.remove('model/tmp/model-instance-metadata.json')
os.rmdir('model/tmp')
def test_model_instance_d_files(self):
if self.model_instance == '':
self.test_model_b_initialize()
try:
inst_files_resp = api.model_instance_files(self.model_instance)
self.assertIsInstance(inst_files_resp.files, list)
self.assertGreater(len(inst_files_resp.files), 0)
[self.assertTrue(hasattr(inst_files_resp.files[0], api.camel_to_snake(f)))
for f in api.model_file_fields]
except ApiException as e:
self.fail(f"model_instance_files failed: {e}")
def test_model_instance_e_update(self):
if self.model_instance == '':
self.test_model_b_initialize()
self.test_model_instance_a_initialize()
try:
self.test_model_c_create_new()
except AssertionError:
pass
try:
update_model_instance_metadata(self.instance_metadata_file, test_user,
self.model_meta_data['slug'],
instance_name, framework_name)
except AssertionError:
pass
self.test_model_instance_b_create(check_result=False)
self.test_model_instance_b_wait_after_create()
try:
inst_update_resp = api.model_instance_update(model_inst_directory)
self.assertIsNotNone(inst_update_resp)
self.assertIsNotNone(inst_update_resp.ref)
self.assertGreater(len(inst_update_resp.ref), 0)
[self.assertTrue(hasattr(inst_update_resp, api.camel_to_snake(f)))
for f in ['error', 'id', 'ref', 'url']]
except ApiException as e:
self.fail(f"model_instance_update failed: {e}")
# Model instance versions
def test_model_instance_version_a_create(self):
if self.model_instance == '':
self.test_model_b_initialize()
try:
version_metadata_resp = api.model_instance_version_create(
self.model_instance, model_inst_vers_directory)
self.assertIsNotNone(version_metadata_resp.ref)
[self.assertTrue(hasattr(version_metadata_resp, api.camel_to_snake(f)))
for f in ['id', 'url', 'error']]
except ApiException as e:
self.fail(f"model_instance_version_create failed: {e}")
def test_model_instance_version_b_files(self):
if self.model_instance == '':
self.test_model_b_initialize()
try:
r = api.model_instance_version_files(f'{self.model_instance}/1')
self.assertIsInstance(r.files, list)
self.assertGreater(len(r.files), 0)
api.model_instance_version_files_cli(f'{self.model_instance}/1')
except ApiException as e:
self.fail(f"model_instance_version_files failed: {e}")
def test_model_instance_version_c_download(self):
if self.model_instance == '':
self.test_model_b_initialize()
version_file = ''
try:
version_file = api.model_instance_version_download(
f'{self.model_instance}/1', 'tmp', force=True)
self.assertTrue(os.path.exists(version_file))
except KeyError:
pass # TODO Create a version that has content.
except ApiException as e:
self.fail(f"model_instance_version_download failed: {e}")
finally:
if os.path.exists(version_file):
os.remove(version_file)
if os.path.exists('tmp'):
os.rmdir('tmp')
# Model deletion
def test_model_instance_version_d_delete(self):
if self.model_instance == '':
self.test_model_b_initialize()
try:
version_delete_resp = api.model_instance_version_delete(
f'{self.model_instance}/1', True)
self.assertEqual(len(version_delete_resp.error), 0, msg=version_delete_resp.error)
except ApiException as e:
self.fail(f"model_instance_version_delete failed: {e}")
def test_model_instance_x_delete(self):
if self.model_instance == '':
self.test_model_b_initialize()
try:
inst_update_resp = api.model_instance_delete(self.model_instance, True)
self.assertIsNotNone(inst_update_resp)
if len(inst_update_resp.error):
print(inst_update_resp.error)
self.assertEqual(len(inst_update_resp.error), 0)
except ApiException as e:
self.fail(f"model_instance_delete failed: {e}")
def test_model_z_delete(self):
try:
delete_response = api.model_delete(f'{test_user}/{model_title}', True)
if delete_response.error:
self.fail(delete_response.error)
else:
pass
except ApiException as e:
self.fail(f"model_delete failed: {e}")
|
class TestKaggleApi(unittest.TestCase):
def test_files_upload(self):
pass
def test_kernels_a_list(self):
pass
def test_kernels_b_initialize(self):
pass
def test_kernels_c_push(self):
pass
def test_kernels_d_status(self):
pass
def test_kernels_e_list_files(self):
pass
def test_kernels_f_output(self):
pass
def test_kernels_g_pull(self):
pass
def test_competition_a_list(self):
pass
def test_competition_b_submit(self):
pass
def test_competition_c_submissions(self):
pass
def test_competition_d_list_files(self):
pass
def test_competition_e_download_file(self):
pass
def test_competition_f_download_files(self):
pass
def test_competition_g_leaderboard_view(self):
pass
def test_competition_h_leaderboard_download(self):
pass
def test_dataset_a_list(self):
pass
def test_dataset_b_metadata(self):
pass
def test_dataset_c_metadata_update(self):
pass
def test_dataset_d_list_files(self):
pass
def test_dataset_e_status(self):
pass
def test_dataset_f_download_file(self):
pass
def test_dataset_g_download_files(self):
pass
def test_dataset_h_initialize(self):
pass
def test_dataset_i_create_new(self):
pass
def test_dataset_j_create_version(self):
pass
def test_model_a_list(self):
pass
def test_model_b_initialize(self):
pass
def test_model_c_create_new(self):
pass
def test_model_d_get(self):
pass
def test_model_e_update(self):
pass
def test_model_instance_a_initialize(self):
pass
def test_model_instance_b_create(self, check_result: bool = True):
pass
def test_model_instance_b_wait_after_create(self):
pass
def test_model_instance_c_get(self):
pass
def test_model_instance_d_files(self):
pass
def test_model_instance_e_update(self):
pass
def test_model_instance_version_a_create(self):
pass
def test_model_instance_version_b_files(self):
pass
def test_model_instance_version_c_download(self):
pass
def test_model_instance_version_d_delete(self):
pass
def test_model_instance_x_delete(self):
pass
def test_model_z_delete(self):
pass
| 44 | 0 | 13 | 0 | 12 | 0 | 3 | 0.04 | 1 | 7 | 0 | 0 | 43 | 2 | 43 | 115 | 614 | 53 | 545 | 143 | 501 | 21 | 477 | 100 | 433 | 9 | 2 | 6 | 150 |
140,984 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiUploadModelFileResponse
|
class ApiUploadModelFileResponse(KaggleObject):
r"""
Attributes:
token (str)
Opaque string token used to reference the new BlobFile.
create_url (str)
URL to use to start the upload
"""
def __init__(self):
self._token = ""
self._create_url = ""
self._freeze()
@property
def token(self) -> str:
"""Opaque string token used to reference the new BlobFile."""
return self._token
@token.setter
def token(self, token: str):
if token is None:
del self.token
return
if not isinstance(token, str):
raise TypeError('token must be of type str')
self._token = token
@property
def create_url(self) -> str:
"""URL to use to start the upload"""
return self._create_url
@create_url.setter
def create_url(self, create_url: str):
if create_url is None:
del self.create_url
return
if not isinstance(create_url, str):
raise TypeError('create_url must be of type str')
self._create_url = create_url
@property
def createUrl(self):
return self.create_url
|
class ApiUploadModelFileResponse(KaggleObject):
'''
Attributes:
token (str)
Opaque string token used to reference the new BlobFile.
create_url (str)
URL to use to start the upload
'''
def __init__(self):
pass
@property
def token(self) -> str:
'''Opaque string token used to reference the new BlobFile.'''
pass
@token.setter
def token(self) -> str:
pass
@property
def create_url(self) -> str:
'''URL to use to start the upload'''
pass
@create_url.setter
def create_url(self) -> str:
pass
@property
def createUrl(self):
pass
| 12 | 3 | 4 | 0 | 4 | 0 | 2 | 0.3 | 1 | 2 | 0 | 0 | 6 | 2 | 6 | 23 | 45 | 6 | 30 | 14 | 18 | 9 | 25 | 9 | 18 | 3 | 2 | 1 | 10 |
140,985 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/datasets/types/dataset_api_service.py
|
src.kagglesdk.datasets.types.dataset_api_service.ApiUploadDirectoryInfo
|
class ApiUploadDirectoryInfo(KaggleObject):
r"""
Attributes:
name (str)
directories (ApiUploadDirectoryInfo)
files (ApiDatasetNewFile)
"""
def __init__(self):
self._name = ""
self._directories = []
self._files = []
self._freeze()
@property
def name(self) -> str:
return self._name
@name.setter
def name(self, name: str):
if name is None:
del self.name
return
if not isinstance(name, str):
raise TypeError('name must be of type str')
self._name = name
@property
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
return self._directories
@directories.setter
def directories(self, directories: Optional[List[Optional['ApiUploadDirectoryInfo']]]):
if directories is None:
del self.directories
return
if not isinstance(directories, list):
raise TypeError('directories must be of type list')
if not all([isinstance(t, ApiUploadDirectoryInfo) for t in directories]):
raise TypeError('directories must contain only items of type ApiUploadDirectoryInfo')
self._directories = directories
@property
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
return self._files
@files.setter
def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]):
if files is None:
del self.files
return
if not isinstance(files, list):
raise TypeError('files must be of type list')
if not all([isinstance(t, ApiDatasetNewFile) for t in files]):
raise TypeError('files must contain only items of type ApiDatasetNewFile')
self._files = files
|
class ApiUploadDirectoryInfo(KaggleObject):
'''
Attributes:
name (str)
directories (ApiUploadDirectoryInfo)
files (ApiDatasetNewFile)
'''
def __init__(self):
pass
@property
def name(self) -> str:
pass
@name.setter
def name(self) -> str:
pass
@property
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
pass
@directories.setter
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
pass
@property
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
pass
@files.setter
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
pass
| 14 | 1 | 5 | 0 | 5 | 0 | 2 | 0.14 | 1 | 4 | 1 | 0 | 7 | 3 | 7 | 24 | 56 | 7 | 43 | 17 | 29 | 6 | 37 | 11 | 29 | 4 | 2 | 1 | 15 |
140,986 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiUploadModelFileRequest
|
class ApiUploadModelFileRequest(KaggleObject):
r"""
Attributes:
file_name (str)
content_length (int)
last_modified_epoch_seconds (int)
"""
def __init__(self):
self._file_name = ""
self._content_length = 0
self._last_modified_epoch_seconds = 0
self._freeze()
@property
def file_name(self) -> str:
return self._file_name
@file_name.setter
def file_name(self, file_name: str):
if file_name is None:
del self.file_name
return
if not isinstance(file_name, str):
raise TypeError('file_name must be of type str')
self._file_name = file_name
@property
def content_length(self) -> int:
return self._content_length
@content_length.setter
def content_length(self, content_length: int):
if content_length is None:
del self.content_length
return
if not isinstance(content_length, int):
raise TypeError('content_length must be of type int')
self._content_length = content_length
@property
def last_modified_epoch_seconds(self) -> int:
return self._last_modified_epoch_seconds
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self, last_modified_epoch_seconds: int):
if last_modified_epoch_seconds is None:
del self.last_modified_epoch_seconds
return
if not isinstance(last_modified_epoch_seconds, int):
raise TypeError('last_modified_epoch_seconds must be of type int')
self._last_modified_epoch_seconds = last_modified_epoch_seconds
def endpoint(self):
path = '/api/v1/models/upload/file/{content_length}/{last_modified_epoch_seconds}'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
|
class ApiUploadModelFileRequest(KaggleObject):
'''
Attributes:
file_name (str)
content_length (int)
last_modified_epoch_seconds (int)
'''
def __init__(self):
pass
@property
def file_name(self) -> str:
pass
@file_name.setter
def file_name(self) -> str:
pass
@property
def content_length(self) -> int:
pass
@content_length.setter
def content_length(self) -> int:
pass
@property
def last_modified_epoch_seconds(self) -> int:
pass
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self) -> int:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
| 17 | 1 | 4 | 0 | 4 | 0 | 2 | 0.13 | 1 | 3 | 0 | 0 | 8 | 3 | 9 | 26 | 61 | 10 | 45 | 21 | 28 | 6 | 38 | 14 | 28 | 3 | 2 | 1 | 15 |
140,987 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiUpdateModelRequest
|
class ApiUpdateModelRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
title (str)
subtitle (str)
is_private (bool)
description (str)
publish_time (datetime)
provenance_sources (str)
update_mask (FieldMask)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._title = ""
self._subtitle = None
self._is_private = False
self._description = None
self._publish_time = None
self._provenance_sources = None
self._update_mask = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def title(self) -> str:
return self._title
@title.setter
def title(self, title: str):
if title is None:
del self.title
return
if not isinstance(title, str):
raise TypeError('title must be of type str')
self._title = title
@property
def subtitle(self) -> str:
return self._subtitle or ""
@subtitle.setter
def subtitle(self, subtitle: str):
if subtitle is None:
del self.subtitle
return
if not isinstance(subtitle, str):
raise TypeError('subtitle must be of type str')
self._subtitle = subtitle
@property
def is_private(self) -> bool:
return self._is_private
@is_private.setter
def is_private(self, is_private: bool):
if is_private is None:
del self.is_private
return
if not isinstance(is_private, bool):
raise TypeError('is_private must be of type bool')
self._is_private = is_private
@property
def description(self) -> str:
return self._description or ""
@description.setter
def description(self, description: str):
if description is None:
del self.description
return
if not isinstance(description, str):
raise TypeError('description must be of type str')
self._description = description
@property
def publish_time(self) -> datetime:
return self._publish_time
@publish_time.setter
def publish_time(self, publish_time: datetime):
if publish_time is None:
del self.publish_time
return
if not isinstance(publish_time, datetime):
raise TypeError('publish_time must be of type datetime')
self._publish_time = publish_time
@property
def provenance_sources(self) -> str:
return self._provenance_sources or ""
@provenance_sources.setter
def provenance_sources(self, provenance_sources: str):
if provenance_sources is None:
del self.provenance_sources
return
if not isinstance(provenance_sources, str):
raise TypeError('provenance_sources must be of type str')
self._provenance_sources = provenance_sources
@property
def update_mask(self) -> FieldMask:
return self._update_mask
@update_mask.setter
def update_mask(self, update_mask: FieldMask):
if update_mask is None:
del self.update_mask
return
if not isinstance(update_mask, FieldMask):
raise TypeError('update_mask must be of type FieldMask')
self._update_mask = update_mask
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/update'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class ApiUpdateModelRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
title (str)
subtitle (str)
is_private (bool)
description (str)
publish_time (datetime)
provenance_sources (str)
update_mask (FieldMask)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def title(self) -> str:
pass
@title.setter
def title(self) -> str:
pass
@property
def subtitle(self) -> str:
pass
@subtitle.setter
def subtitle(self) -> str:
pass
@property
def is_private(self) -> bool:
pass
@is_private.setter
def is_private(self) -> bool:
pass
@property
def description(self) -> str:
pass
@description.setter
def description(self) -> str:
pass
@property
def publish_time(self) -> datetime:
pass
@publish_time.setter
def publish_time(self) -> datetime:
pass
@property
def provenance_sources(self) -> str:
pass
@provenance_sources.setter
def provenance_sources(self) -> str:
pass
@property
def update_mask(self) -> FieldMask:
pass
@update_mask.setter
def update_mask(self) -> FieldMask:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 43 | 1 | 5 | 0 | 5 | 0 | 2 | 0.1 | 1 | 4 | 0 | 0 | 20 | 9 | 22 | 39 | 155 | 23 | 120 | 53 | 77 | 12 | 100 | 33 | 77 | 3 | 2 | 1 | 40 |
140,988 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/services/model_service.py
|
src.kagglesdk.models.services.model_service.ModelClient
|
class ModelClient(object):
def __init__(self, client: KaggleHttpClient):
self._client = client
def get_model_metrics(self, request: GetModelMetricsRequest = None) -> GetModelMetricsResponse:
r"""
Args:
request (GetModelMetricsRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = GetModelMetricsRequest()
return self._client.call("models.ModelService", "GetModelMetrics", request, GetModelMetricsResponse)
|
class ModelClient(object):
def __init__(self, client: KaggleHttpClient):
pass
def get_model_metrics(self, request: GetModelMetricsRequest = None) -> GetModelMetricsResponse:
'''
Args:
request (GetModelMetricsRequest):
The request object; initialized to empty instance if not specified.
'''
pass
| 3 | 1 | 7 | 1 | 3 | 3 | 2 | 0.71 | 1 | 3 | 3 | 0 | 2 | 1 | 2 | 2 | 16 | 4 | 7 | 4 | 4 | 5 | 7 | 4 | 4 | 2 | 1 | 1 | 3 |
140,989 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiCreateModelInstanceRequest
|
class ApiCreateModelInstanceRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
body (ApiCreateModelInstanceRequestBody)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._body = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def body(self) -> Optional['ApiCreateModelInstanceRequestBody']:
return self._body
@body.setter
def body(self, body: Optional['ApiCreateModelInstanceRequestBody']):
if body is None:
del self.body
return
if not isinstance(body, ApiCreateModelInstanceRequestBody):
raise TypeError('body must be of type ApiCreateModelInstanceRequestBody')
self._body = body
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/create/instance'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return 'body'
|
class ApiCreateModelInstanceRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
body (ApiCreateModelInstanceRequestBody)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def body(self) -> Optional['ApiCreateModelInstanceRequestBody']:
pass
@body.setter
def body(self) -> Optional['ApiCreateModelInstanceRequestBody']:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 19 | 1 | 4 | 0 | 4 | 0 | 2 | 0.13 | 1 | 3 | 1 | 0 | 8 | 3 | 10 | 27 | 65 | 11 | 48 | 23 | 29 | 6 | 40 | 15 | 29 | 3 | 2 | 1 | 16 |
140,990 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiCreateModelInstanceRequestBody
|
class ApiCreateModelInstanceRequestBody(KaggleObject):
r"""
Attributes:
instance_slug (str)
framework (ModelFramework)
overview (str)
usage (str)
fine_tunable (bool)
training_data (str)
files (ApiDatasetNewFile)
directories (ApiUploadDirectoryInfo)
license_name (str)
model_instance_type (ModelInstanceType)
base_model_instance (str)
external_base_model_url (str)
sigstore (bool)
"""
def __init__(self):
self._instance_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._overview = ""
self._usage = ""
self._fine_tunable = None
self._training_data = []
self._files = []
self._directories = []
self._license_name = None
self._model_instance_type = None
self._base_model_instance = None
self._external_base_model_url = None
self._sigstore = None
self._freeze()
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def overview(self) -> str:
return self._overview
@overview.setter
def overview(self, overview: str):
if overview is None:
del self.overview
return
if not isinstance(overview, str):
raise TypeError('overview must be of type str')
self._overview = overview
@property
def usage(self) -> str:
return self._usage
@usage.setter
def usage(self, usage: str):
if usage is None:
del self.usage
return
if not isinstance(usage, str):
raise TypeError('usage must be of type str')
self._usage = usage
@property
def fine_tunable(self) -> bool:
return self._fine_tunable or False
@fine_tunable.setter
def fine_tunable(self, fine_tunable: bool):
if fine_tunable is None:
del self.fine_tunable
return
if not isinstance(fine_tunable, bool):
raise TypeError('fine_tunable must be of type bool')
self._fine_tunable = fine_tunable
@property
def training_data(self) -> Optional[List[str]]:
return self._training_data
@training_data.setter
def training_data(self, training_data: Optional[List[str]]):
if training_data is None:
del self.training_data
return
if not isinstance(training_data, list):
raise TypeError('training_data must be of type list')
if not all([isinstance(t, str) for t in training_data]):
raise TypeError('training_data must contain only items of type str')
self._training_data = training_data
@property
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
return self._files
@files.setter
def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]):
if files is None:
del self.files
return
if not isinstance(files, list):
raise TypeError('files must be of type list')
if not all([isinstance(t, ApiDatasetNewFile) for t in files]):
raise TypeError('files must contain only items of type ApiDatasetNewFile')
self._files = files
@property
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
return self._directories
@directories.setter
def directories(self, directories: Optional[List[Optional['ApiUploadDirectoryInfo']]]):
if directories is None:
del self.directories
return
if not isinstance(directories, list):
raise TypeError('directories must be of type list')
if not all([isinstance(t, ApiUploadDirectoryInfo) for t in directories]):
raise TypeError('directories must contain only items of type ApiUploadDirectoryInfo')
self._directories = directories
@property
def license_name(self) -> str:
return self._license_name or ""
@license_name.setter
def license_name(self, license_name: str):
if license_name is None:
del self.license_name
return
if not isinstance(license_name, str):
raise TypeError('license_name must be of type str')
self._license_name = license_name
@property
def model_instance_type(self) -> 'ModelInstanceType':
return self._model_instance_type or ModelInstanceType.MODEL_INSTANCE_TYPE_UNSPECIFIED
@model_instance_type.setter
def model_instance_type(self, model_instance_type: 'ModelInstanceType'):
if model_instance_type is None:
del self.model_instance_type
return
if not isinstance(model_instance_type, ModelInstanceType):
raise TypeError('model_instance_type must be of type ModelInstanceType')
self._model_instance_type = model_instance_type
@property
def base_model_instance(self) -> str:
return self._base_model_instance or ""
@base_model_instance.setter
def base_model_instance(self, base_model_instance: str):
if base_model_instance is None:
del self.base_model_instance
return
if not isinstance(base_model_instance, str):
raise TypeError('base_model_instance must be of type str')
self._base_model_instance = base_model_instance
@property
def external_base_model_url(self) -> str:
return self._external_base_model_url or ""
@external_base_model_url.setter
def external_base_model_url(self, external_base_model_url: str):
if external_base_model_url is None:
del self.external_base_model_url
return
if not isinstance(external_base_model_url, str):
raise TypeError('external_base_model_url must be of type str')
self._external_base_model_url = external_base_model_url
@property
def sigstore(self) -> bool:
return self._sigstore or False
@sigstore.setter
def sigstore(self, sigstore: bool):
if sigstore is None:
del self.sigstore
return
if not isinstance(sigstore, bool):
raise TypeError('sigstore must be of type bool')
self._sigstore = sigstore
|
class ApiCreateModelInstanceRequestBody(KaggleObject):
'''
Attributes:
instance_slug (str)
framework (ModelFramework)
overview (str)
usage (str)
fine_tunable (bool)
training_data (str)
files (ApiDatasetNewFile)
directories (ApiUploadDirectoryInfo)
license_name (str)
model_instance_type (ModelInstanceType)
base_model_instance (str)
external_base_model_url (str)
sigstore (bool)
'''
def __init__(self):
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def overview(self) -> str:
pass
@overview.setter
def overview(self) -> str:
pass
@property
def usage(self) -> str:
pass
@usage.setter
def usage(self) -> str:
pass
@property
def fine_tunable(self) -> bool:
pass
@fine_tunable.setter
def fine_tunable(self) -> bool:
pass
@property
def training_data(self) -> Optional[List[str]]:
pass
@training_data.setter
def training_data(self) -> Optional[List[str]]:
pass
@property
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
pass
@files.setter
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
pass
@property
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
pass
@directories.setter
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
pass
@property
def license_name(self) -> str:
pass
@license_name.setter
def license_name(self) -> str:
pass
@property
def model_instance_type(self) -> 'ModelInstanceType':
pass
@model_instance_type.setter
def model_instance_type(self) -> 'ModelInstanceType':
pass
@property
def base_model_instance(self) -> str:
pass
@base_model_instance.setter
def base_model_instance(self) -> str:
pass
@property
def external_base_model_url(self) -> str:
pass
@external_base_model_url.setter
def external_base_model_url(self) -> str:
pass
@property
def sigstore(self) -> bool:
pass
@sigstore.setter
def sigstore(self) -> bool:
pass
| 54 | 1 | 5 | 0 | 5 | 0 | 2 | 0.1 | 1 | 8 | 4 | 0 | 27 | 13 | 27 | 44 | 208 | 27 | 165 | 67 | 111 | 16 | 139 | 41 | 111 | 4 | 2 | 1 | 56 |
140,991 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiCreateModelInstanceVersionRequest
|
class ApiCreateModelInstanceVersionRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
body (ApiCreateModelInstanceVersionRequestBody)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._instance_slug = ""
self._body = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def body(self) -> Optional['ApiCreateModelInstanceVersionRequestBody']:
return self._body
@body.setter
def body(self, body: Optional['ApiCreateModelInstanceVersionRequestBody']):
if body is None:
del self.body
return
if not isinstance(body, ApiCreateModelInstanceVersionRequestBody):
raise TypeError('body must be of type ApiCreateModelInstanceVersionRequestBody')
self._body = body
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/create/version'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return 'body'
|
class ApiCreateModelInstanceVersionRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
body (ApiCreateModelInstanceVersionRequestBody)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def body(self) -> Optional['ApiCreateModelInstanceVersionRequestBody']:
pass
@body.setter
def body(self) -> Optional['ApiCreateModelInstanceVersionRequestBody']:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 27 | 1 | 4 | 0 | 4 | 0 | 2 | 0.11 | 1 | 4 | 2 | 0 | 12 | 5 | 14 | 31 | 95 | 15 | 72 | 33 | 45 | 8 | 60 | 21 | 45 | 3 | 2 | 1 | 24 |
140,992 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiCreateModelInstanceVersionRequestBody
|
class ApiCreateModelInstanceVersionRequestBody(KaggleObject):
r"""
Attributes:
version_notes (str)
files (ApiDatasetNewFile)
directories (ApiUploadDirectoryInfo)
sigstore (bool)
"""
def __init__(self):
self._version_notes = None
self._files = []
self._directories = []
self._sigstore = None
self._freeze()
@property
def version_notes(self) -> str:
return self._version_notes or ""
@version_notes.setter
def version_notes(self, version_notes: str):
if version_notes is None:
del self.version_notes
return
if not isinstance(version_notes, str):
raise TypeError('version_notes must be of type str')
self._version_notes = version_notes
@property
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
return self._files
@files.setter
def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]):
if files is None:
del self.files
return
if not isinstance(files, list):
raise TypeError('files must be of type list')
if not all([isinstance(t, ApiDatasetNewFile) for t in files]):
raise TypeError('files must contain only items of type ApiDatasetNewFile')
self._files = files
@property
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
return self._directories
@directories.setter
def directories(self, directories: Optional[List[Optional['ApiUploadDirectoryInfo']]]):
if directories is None:
del self.directories
return
if not isinstance(directories, list):
raise TypeError('directories must be of type list')
if not all([isinstance(t, ApiUploadDirectoryInfo) for t in directories]):
raise TypeError('directories must contain only items of type ApiUploadDirectoryInfo')
self._directories = directories
@property
def sigstore(self) -> bool:
return self._sigstore or False
@sigstore.setter
def sigstore(self, sigstore: bool):
if sigstore is None:
del self.sigstore
return
if not isinstance(sigstore, bool):
raise TypeError('sigstore must be of type bool')
self._sigstore = sigstore
|
class ApiCreateModelInstanceVersionRequestBody(KaggleObject):
'''
Attributes:
version_notes (str)
files (ApiDatasetNewFile)
directories (ApiUploadDirectoryInfo)
sigstore (bool)
'''
def __init__(self):
pass
@property
def version_notes(self) -> str:
pass
@version_notes.setter
def version_notes(self) -> str:
pass
@property
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
pass
@files.setter
def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]:
pass
@property
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
pass
@directories.setter
def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]:
pass
@property
def sigstore(self) -> bool:
pass
@sigstore.setter
def sigstore(self) -> bool:
pass
| 18 | 1 | 5 | 0 | 5 | 0 | 2 | 0.13 | 1 | 6 | 2 | 0 | 9 | 4 | 9 | 26 | 71 | 9 | 55 | 22 | 37 | 7 | 47 | 14 | 37 | 4 | 2 | 1 | 19 |
140,993 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiCreateModelRequest
|
class ApiCreateModelRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
slug (str)
title (str)
subtitle (str)
is_private (bool)
description (str)
publish_time (datetime)
provenance_sources (str)
"""
def __init__(self):
self._owner_slug = ""
self._slug = ""
self._title = ""
self._subtitle = None
self._is_private = None
self._description = None
self._publish_time = None
self._provenance_sources = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def slug(self) -> str:
return self._slug
@slug.setter
def slug(self, slug: str):
if slug is None:
del self.slug
return
if not isinstance(slug, str):
raise TypeError('slug must be of type str')
self._slug = slug
@property
def title(self) -> str:
return self._title
@title.setter
def title(self, title: str):
if title is None:
del self.title
return
if not isinstance(title, str):
raise TypeError('title must be of type str')
self._title = title
@property
def subtitle(self) -> str:
return self._subtitle or ""
@subtitle.setter
def subtitle(self, subtitle: str):
if subtitle is None:
del self.subtitle
return
if not isinstance(subtitle, str):
raise TypeError('subtitle must be of type str')
self._subtitle = subtitle
@property
def is_private(self) -> bool:
return self._is_private or False
@is_private.setter
def is_private(self, is_private: bool):
if is_private is None:
del self.is_private
return
if not isinstance(is_private, bool):
raise TypeError('is_private must be of type bool')
self._is_private = is_private
@property
def description(self) -> str:
return self._description or ""
@description.setter
def description(self, description: str):
if description is None:
del self.description
return
if not isinstance(description, str):
raise TypeError('description must be of type str')
self._description = description
@property
def publish_time(self) -> datetime:
return self._publish_time
@publish_time.setter
def publish_time(self, publish_time: datetime):
if publish_time is None:
del self.publish_time
return
if not isinstance(publish_time, datetime):
raise TypeError('publish_time must be of type datetime')
self._publish_time = publish_time
@property
def provenance_sources(self) -> str:
return self._provenance_sources or ""
@provenance_sources.setter
def provenance_sources(self, provenance_sources: str):
if provenance_sources is None:
del self.provenance_sources
return
if not isinstance(provenance_sources, str):
raise TypeError('provenance_sources must be of type str')
self._provenance_sources = provenance_sources
def endpoint(self):
path = '/api/v1/models/create/new'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class ApiCreateModelRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
slug (str)
title (str)
subtitle (str)
is_private (bool)
description (str)
publish_time (datetime)
provenance_sources (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def slug(self) -> str:
pass
@slug.setter
def slug(self) -> str:
pass
@property
def title(self) -> str:
pass
@title.setter
def title(self) -> str:
pass
@property
def subtitle(self) -> str:
pass
@subtitle.setter
def subtitle(self) -> str:
pass
@property
def is_private(self) -> bool:
pass
@is_private.setter
def is_private(self) -> bool:
pass
@property
def description(self) -> str:
pass
@description.setter
def description(self) -> str:
pass
@property
def publish_time(self) -> datetime:
pass
@publish_time.setter
def publish_time(self) -> datetime:
pass
@property
def provenance_sources(self) -> str:
pass
@provenance_sources.setter
def provenance_sources(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 39 | 1 | 4 | 0 | 4 | 0 | 2 | 0.1 | 1 | 4 | 0 | 0 | 18 | 8 | 20 | 37 | 140 | 21 | 108 | 48 | 69 | 11 | 90 | 30 | 69 | 3 | 2 | 1 | 36 |
140,994 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiCreateModelResponse
|
class ApiCreateModelResponse(KaggleObject):
r"""
Attributes:
id (int)
ref (str)
error (str)
error_code (int)
url (str)
"""
def __init__(self):
self._id = None
self._ref = None
self._error = None
self._error_code = None
self._url = None
self._freeze()
@property
def id(self) -> int:
return self._id or 0
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def ref(self) -> str:
return self._ref or ""
@ref.setter
def ref(self, ref: str):
if ref is None:
del self.ref
return
if not isinstance(ref, str):
raise TypeError('ref must be of type str')
self._ref = ref
@property
def error(self) -> str:
return self._error or ""
@error.setter
def error(self, error: str):
if error is None:
del self.error
return
if not isinstance(error, str):
raise TypeError('error must be of type str')
self._error = error
@property
def error_code(self) -> int:
return self._error_code or 0
@error_code.setter
def error_code(self, error_code: int):
if error_code is None:
del self.error_code
return
if not isinstance(error_code, int):
raise TypeError('error_code must be of type int')
self._error_code = error_code
@property
def url(self) -> str:
return self._url or ""
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
@property
def errorCode(self):
return self.error_code
|
class ApiCreateModelResponse(KaggleObject):
'''
Attributes:
id (int)
ref (str)
error (str)
error_code (int)
url (str)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def ref(self) -> str:
pass
@ref.setter
def ref(self) -> str:
pass
@property
def error(self) -> str:
pass
@error.setter
def error(self) -> str:
pass
@property
def error_code(self) -> int:
pass
@error_code.setter
def error_code(self) -> int:
pass
@property
def url(self) -> str:
pass
@url.setter
def url(self) -> str:
pass
@property
def errorCode(self):
pass
| 24 | 1 | 5 | 0 | 5 | 0 | 2 | 0.12 | 1 | 3 | 0 | 0 | 12 | 5 | 12 | 29 | 86 | 12 | 66 | 29 | 42 | 8 | 55 | 18 | 42 | 3 | 2 | 1 | 22 |
140,995 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiDeleteModelInstanceRequest
|
class ApiDeleteModelInstanceRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._instance_slug = ""
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/delete'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
|
class ApiDeleteModelInstanceRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
| 21 | 1 | 4 | 0 | 4 | 0 | 2 | 0.12 | 1 | 3 | 1 | 0 | 10 | 4 | 11 | 28 | 76 | 12 | 57 | 26 | 36 | 7 | 48 | 17 | 36 | 3 | 2 | 1 | 19 |
140,996 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiDeleteModelInstanceVersionRequest
|
class ApiDeleteModelInstanceVersionRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
version_number (int)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._instance_slug = ""
self._version_number = 0
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def version_number(self) -> int:
return self._version_number
@version_number.setter
def version_number(self, version_number: int):
if version_number is None:
del self.version_number
return
if not isinstance(version_number, int):
raise TypeError('version_number must be of type int')
self._version_number = version_number
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/{version_number}/delete'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
|
class ApiDeleteModelInstanceVersionRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
version_number (int)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def version_number(self) -> int:
pass
@version_number.setter
def version_number(self) -> int:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
| 25 | 1 | 4 | 0 | 4 | 0 | 2 | 0.12 | 1 | 4 | 1 | 0 | 12 | 5 | 13 | 30 | 91 | 14 | 69 | 31 | 44 | 8 | 58 | 20 | 44 | 3 | 2 | 1 | 23 |
140,997 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiDeleteModelResponse
|
class ApiDeleteModelResponse(KaggleObject):
r"""
Attributes:
error (str)
"""
def __init__(self):
self._error = None
self._freeze()
@property
def error(self) -> str:
return self._error or ""
@error.setter
def error(self, error: str):
if error is None:
del self.error
return
if not isinstance(error, str):
raise TypeError('error must be of type str')
self._error = error
|
class ApiDeleteModelResponse(KaggleObject):
'''
Attributes:
error (str)
'''
def __init__(self):
pass
@property
def error(self) -> str:
pass
@error.setter
def error(self) -> str:
pass
| 6 | 1 | 4 | 0 | 4 | 0 | 2 | 0.27 | 1 | 2 | 0 | 0 | 3 | 1 | 3 | 20 | 22 | 3 | 15 | 7 | 9 | 4 | 13 | 5 | 9 | 3 | 2 | 1 | 5 |
140,998 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiDownloadModelInstanceVersionRequest
|
class ApiDownloadModelInstanceVersionRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
version_number (int)
path (str)
Relative path to a specific file inside the databundle.
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._instance_slug = ""
self._version_number = 0
self._path = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def version_number(self) -> int:
return self._version_number
@version_number.setter
def version_number(self, version_number: int):
if version_number is None:
del self.version_number
return
if not isinstance(version_number, int):
raise TypeError('version_number must be of type int')
self._version_number = version_number
@property
def path(self) -> str:
"""Relative path to a specific file inside the databundle."""
return self._path or ""
@path.setter
def path(self, path: str):
if path is None:
del self.path
return
if not isinstance(path, str):
raise TypeError('path must be of type str')
self._path = path
def endpoint(self):
if self.path:
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/{version_number}/download/{path}'
else:
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/{version_number}/download'
return path.format_map(self.to_field_map(self))
@staticmethod
def endpoint_path():
return '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/{version_number}/download'
|
class ApiDownloadModelInstanceVersionRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
version_number (int)
path (str)
Relative path to a specific file inside the databundle.
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def version_number(self) -> int:
pass
@version_number.setter
def version_number(self) -> int:
pass
@property
def path(self) -> str:
'''Relative path to a specific file inside the databundle.'''
pass
@path.setter
def path(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def endpoint_path():
pass
| 29 | 2 | 5 | 0 | 5 | 0 | 2 | 0.13 | 1 | 4 | 1 | 0 | 14 | 6 | 15 | 32 | 110 | 15 | 84 | 36 | 55 | 11 | 70 | 23 | 54 | 3 | 2 | 1 | 28 |
140,999 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiGatingUserConsent
|
class ApiGatingUserConsent(KaggleObject):
r"""
ApiGatingUserConsent presents GatingUserConsent data for publisher access,
omitting or modifying certain fields as needed such as user_id.
Attributes:
id (int)
agreement_id (int)
user_name (str)
request_data (str)
request_time (datetime)
review_time (datetime)
review_status (GatingAgreementRequestsReviewStatus)
expiry_status (GatingAgreementRequestsExpiryStatus)
expiry_time (datetime)
publisher_notes (str)
"""
def __init__(self):
self._id = 0
self._agreement_id = 0
self._user_name = ""
self._request_data = None
self._request_time = None
self._review_time = None
self._review_status = GatingAgreementRequestsReviewStatus.GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED
self._expiry_status = GatingAgreementRequestsExpiryStatus.GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED
self._expiry_time = None
self._publisher_notes = None
self._freeze()
@property
def id(self) -> int:
return self._id
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def agreement_id(self) -> int:
return self._agreement_id
@agreement_id.setter
def agreement_id(self, agreement_id: int):
if agreement_id is None:
del self.agreement_id
return
if not isinstance(agreement_id, int):
raise TypeError('agreement_id must be of type int')
self._agreement_id = agreement_id
@property
def user_name(self) -> str:
return self._user_name
@user_name.setter
def user_name(self, user_name: str):
if user_name is None:
del self.user_name
return
if not isinstance(user_name, str):
raise TypeError('user_name must be of type str')
self._user_name = user_name
@property
def request_data(self) -> str:
return self._request_data or ""
@request_data.setter
def request_data(self, request_data: str):
if request_data is None:
del self.request_data
return
if not isinstance(request_data, str):
raise TypeError('request_data must be of type str')
self._request_data = request_data
@property
def request_time(self) -> datetime:
return self._request_time
@request_time.setter
def request_time(self, request_time: datetime):
if request_time is None:
del self.request_time
return
if not isinstance(request_time, datetime):
raise TypeError('request_time must be of type datetime')
self._request_time = request_time
@property
def review_time(self) -> datetime:
return self._review_time or None
@review_time.setter
def review_time(self, review_time: datetime):
if review_time is None:
del self.review_time
return
if not isinstance(review_time, datetime):
raise TypeError('review_time must be of type datetime')
self._review_time = review_time
@property
def review_status(self) -> 'GatingAgreementRequestsReviewStatus':
return self._review_status
@review_status.setter
def review_status(self, review_status: 'GatingAgreementRequestsReviewStatus'):
if review_status is None:
del self.review_status
return
if not isinstance(review_status, GatingAgreementRequestsReviewStatus):
raise TypeError('review_status must be of type GatingAgreementRequestsReviewStatus')
self._review_status = review_status
@property
def expiry_status(self) -> 'GatingAgreementRequestsExpiryStatus':
return self._expiry_status
@expiry_status.setter
def expiry_status(self, expiry_status: 'GatingAgreementRequestsExpiryStatus'):
if expiry_status is None:
del self.expiry_status
return
if not isinstance(expiry_status, GatingAgreementRequestsExpiryStatus):
raise TypeError('expiry_status must be of type GatingAgreementRequestsExpiryStatus')
self._expiry_status = expiry_status
@property
def expiry_time(self) -> datetime:
return self._expiry_time or None
@expiry_time.setter
def expiry_time(self, expiry_time: datetime):
if expiry_time is None:
del self.expiry_time
return
if not isinstance(expiry_time, datetime):
raise TypeError('expiry_time must be of type datetime')
self._expiry_time = expiry_time
@property
def publisher_notes(self) -> str:
return self._publisher_notes or ""
@publisher_notes.setter
def publisher_notes(self, publisher_notes: str):
if publisher_notes is None:
del self.publisher_notes
return
if not isinstance(publisher_notes, str):
raise TypeError('publisher_notes must be of type str')
self._publisher_notes = publisher_notes
|
class ApiGatingUserConsent(KaggleObject):
'''
ApiGatingUserConsent presents GatingUserConsent data for publisher access,
omitting or modifying certain fields as needed such as user_id.
Attributes:
id (int)
agreement_id (int)
user_name (str)
request_data (str)
request_time (datetime)
review_time (datetime)
review_status (GatingAgreementRequestsReviewStatus)
expiry_status (GatingAgreementRequestsExpiryStatus)
expiry_time (datetime)
publisher_notes (str)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def agreement_id(self) -> int:
pass
@agreement_id.setter
def agreement_id(self) -> int:
pass
@property
def user_name(self) -> str:
pass
@user_name.setter
def user_name(self) -> str:
pass
@property
def request_data(self) -> str:
pass
@request_data.setter
def request_data(self) -> str:
pass
@property
def request_time(self) -> datetime:
pass
@request_time.setter
def request_time(self) -> datetime:
pass
@property
def review_time(self) -> datetime:
pass
@review_time.setter
def review_time(self) -> datetime:
pass
@property
def review_status(self) -> 'GatingAgreementRequestsReviewStatus':
pass
@review_status.setter
def review_status(self) -> 'GatingAgreementRequestsReviewStatus':
pass
@property
def expiry_status(self) -> 'GatingAgreementRequestsExpiryStatus':
pass
@expiry_status.setter
def expiry_status(self) -> 'GatingAgreementRequestsExpiryStatus':
pass
@property
def expiry_time(self) -> datetime:
pass
@expiry_time.setter
def expiry_time(self) -> datetime:
pass
@property
def publisher_notes(self) -> str:
pass
@publisher_notes.setter
def publisher_notes(self) -> str:
pass
| 42 | 1 | 5 | 0 | 5 | 0 | 2 | 0.12 | 1 | 6 | 2 | 0 | 21 | 10 | 21 | 38 | 160 | 22 | 123 | 52 | 81 | 15 | 103 | 32 | 81 | 3 | 2 | 1 | 41 |
141,000 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiGetModelInstanceRequest
|
class ApiGetModelInstanceRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._instance_slug = ""
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/get'
return path.format_map(self.to_field_map(self))
@staticmethod
def endpoint_path():
return '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/get'
|
class ApiGetModelInstanceRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def endpoint_path():
pass
| 21 | 1 | 4 | 0 | 4 | 0 | 2 | 0.12 | 1 | 3 | 1 | 0 | 10 | 4 | 11 | 28 | 75 | 11 | 57 | 26 | 36 | 7 | 48 | 17 | 36 | 3 | 2 | 1 | 19 |
141,001 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiGetModelRequest
|
class ApiGetModelRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/get'
return path.format_map(self.to_field_map(self))
@staticmethod
def endpoint_path():
return '/api/v1/models/{owner_slug}/{model_slug}/get'
|
class ApiGetModelRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def endpoint_path():
pass
| 13 | 1 | 4 | 0 | 4 | 0 | 2 | 0.15 | 1 | 2 | 0 | 0 | 6 | 2 | 7 | 24 | 45 | 7 | 33 | 16 | 20 | 5 | 28 | 11 | 20 | 3 | 2 | 1 | 11 |
141,002 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiListModelGatingUserConsentsResponse
|
class ApiListModelGatingUserConsentsResponse(KaggleObject):
r"""
Attributes:
gating_user_consents (ApiGatingUserConsent)
total_size (int)
next_page_token (str)
"""
def __init__(self):
self._gating_user_consents = []
self._total_size = 0
self._next_page_token = ""
self._freeze()
@property
def gating_user_consents(self) -> Optional[List[Optional['ApiGatingUserConsent']]]:
return self._gating_user_consents
@gating_user_consents.setter
def gating_user_consents(self, gating_user_consents: Optional[List[Optional['ApiGatingUserConsent']]]):
if gating_user_consents is None:
del self.gating_user_consents
return
if not isinstance(gating_user_consents, list):
raise TypeError('gating_user_consents must be of type list')
if not all([isinstance(t, ApiGatingUserConsent) for t in gating_user_consents]):
raise TypeError('gating_user_consents must contain only items of type ApiGatingUserConsent')
self._gating_user_consents = gating_user_consents
@property
def total_size(self) -> int:
return self._total_size
@total_size.setter
def total_size(self, total_size: int):
if total_size is None:
del self.total_size
return
if not isinstance(total_size, int):
raise TypeError('total_size must be of type int')
self._total_size = total_size
@property
def next_page_token(self) -> str:
return self._next_page_token
@next_page_token.setter
def next_page_token(self, next_page_token: str):
if next_page_token is None:
del self.next_page_token
return
if not isinstance(next_page_token, str):
raise TypeError('next_page_token must be of type str')
self._next_page_token = next_page_token
@property
def gatingUserConsents(self):
return self.gating_user_consents
@property
def totalSize(self):
return self.total_size
@property
def nextPageToken(self):
return self.next_page_token
|
class ApiListModelGatingUserConsentsResponse(KaggleObject):
'''
Attributes:
gating_user_consents (ApiGatingUserConsent)
total_size (int)
next_page_token (str)
'''
def __init__(self):
pass
@property
def gating_user_consents(self) -> Optional[List[Optional['ApiGatingUserConsent']]]:
pass
@gating_user_consents.setter
def gating_user_consents(self) -> Optional[List[Optional['ApiGatingUserConsent']]]:
pass
@property
def total_size(self) -> int:
pass
@total_size.setter
def total_size(self) -> int:
pass
@property
def next_page_token(self) -> str:
pass
@next_page_token.setter
def next_page_token(self) -> str:
pass
@property
def gatingUserConsents(self):
pass
@property
def totalSize(self):
pass
@property
def nextPageToken(self):
pass
| 20 | 1 | 4 | 0 | 4 | 0 | 2 | 0.12 | 1 | 5 | 1 | 0 | 10 | 3 | 10 | 27 | 66 | 10 | 50 | 23 | 30 | 6 | 41 | 14 | 30 | 4 | 2 | 1 | 17 |
141,003 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiListModelInstanceVersionFilesRequest
|
class ApiListModelInstanceVersionFilesRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
instance_slug (str)
framework (ModelFramework)
version_number (int)
page_size (int)
page_token (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._instance_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._version_number = None
self._page_size = None
self._page_token = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def version_number(self) -> int:
return self._version_number or 0
@version_number.setter
def version_number(self, version_number: int):
if version_number is None:
del self.version_number
return
if not isinstance(version_number, int):
raise TypeError('version_number must be of type int')
self._version_number = version_number
@property
def page_size(self) -> int:
return self._page_size or 0
@page_size.setter
def page_size(self, page_size: int):
if page_size is None:
del self.page_size
return
if not isinstance(page_size, int):
raise TypeError('page_size must be of type int')
self._page_size = page_size
@property
def page_token(self) -> str:
return self._page_token or ""
@page_token.setter
def page_token(self, page_token: str):
if page_token is None:
del self.page_token
return
if not isinstance(page_token, str):
raise TypeError('page_token must be of type str')
self._page_token = page_token
def endpoint(self):
if self.version_number:
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/{version_number}/files'
else:
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/files'
return path.format_map(self.to_field_map(self))
@staticmethod
def endpoint_path():
return '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/files'
|
class ApiListModelInstanceVersionFilesRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
instance_slug (str)
framework (ModelFramework)
version_number (int)
page_size (int)
page_token (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def version_number(self) -> int:
pass
@version_number.setter
def version_number(self) -> int:
pass
@property
def page_size(self) -> int:
pass
@page_size.setter
def page_size(self) -> int:
pass
@property
def page_token(self) -> str:
pass
@page_token.setter
def page_token(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def endpoint_path():
pass
| 33 | 1 | 5 | 0 | 5 | 0 | 2 | 0.1 | 1 | 4 | 1 | 0 | 16 | 7 | 17 | 34 | 123 | 17 | 96 | 41 | 63 | 10 | 80 | 26 | 62 | 3 | 2 | 1 | 32 |
141,004 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiListModelInstanceVersionFilesResponse
|
class ApiListModelInstanceVersionFilesResponse(KaggleObject):
r"""
Attributes:
files (ApiModelFile)
next_page_token (str)
"""
def __init__(self):
self._files = []
self._next_page_token = ""
self._freeze()
@property
def files(self) -> Optional[List[Optional['ApiModelFile']]]:
return self._files
@files.setter
def files(self, files: Optional[List[Optional['ApiModelFile']]]):
if files is None:
del self.files
return
if not isinstance(files, list):
raise TypeError('files must be of type list')
if not all([isinstance(t, ApiModelFile) for t in files]):
raise TypeError('files must contain only items of type ApiModelFile')
self._files = files
@property
def next_page_token(self) -> str:
return self._next_page_token
@next_page_token.setter
def next_page_token(self, next_page_token: str):
if next_page_token is None:
del self.next_page_token
return
if not isinstance(next_page_token, str):
raise TypeError('next_page_token must be of type str')
self._next_page_token = next_page_token
@property
def nextPageToken(self):
return self.next_page_token
|
class ApiListModelInstanceVersionFilesResponse(KaggleObject):
'''
Attributes:
files (ApiModelFile)
next_page_token (str)
'''
def __init__(self):
pass
@property
def files(self) -> Optional[List[Optional['ApiModelFile']]]:
pass
@files.setter
def files(self) -> Optional[List[Optional['ApiModelFile']]]:
pass
@property
def next_page_token(self) -> str:
pass
@next_page_token.setter
def next_page_token(self) -> str:
pass
@property
def nextPageToken(self):
pass
| 12 | 1 | 4 | 0 | 4 | 0 | 2 | 0.16 | 1 | 4 | 1 | 0 | 6 | 2 | 6 | 23 | 43 | 6 | 32 | 14 | 20 | 5 | 27 | 9 | 20 | 4 | 2 | 1 | 11 |
141,005 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiListModelsRequest
|
class ApiListModelsRequest(KaggleObject):
r"""
Attributes:
search (str)
Display models matching the specified search terms.
sort_by (ListModelsOrderBy)
Sort the results (default is 'hotness'). 'relevance' only works if there is
a search query.
owner (str)
Display models by a particular user or organization.
page_size (int)
Page size.
page_token (str)
Page token used for pagination.
only_vertex_models (bool)
Only list models that have Vertex URLs
"""
def __init__(self):
self._search = None
self._sort_by = None
self._owner = None
self._page_size = None
self._page_token = None
self._only_vertex_models = None
self._freeze()
@property
def search(self) -> str:
"""Display models matching the specified search terms."""
return self._search or ""
@search.setter
def search(self, search: str):
if search is None:
del self.search
return
if not isinstance(search, str):
raise TypeError('search must be of type str')
self._search = search
@property
def sort_by(self) -> 'ListModelsOrderBy':
r"""
Sort the results (default is 'hotness'). 'relevance' only works if there is
a search query.
"""
return self._sort_by or ListModelsOrderBy.LIST_MODELS_ORDER_BY_UNSPECIFIED
@sort_by.setter
def sort_by(self, sort_by: 'ListModelsOrderBy'):
if sort_by is None:
del self.sort_by
return
if not isinstance(sort_by, ListModelsOrderBy):
raise TypeError('sort_by must be of type ListModelsOrderBy')
self._sort_by = sort_by
@property
def owner(self) -> str:
"""Display models by a particular user or organization."""
return self._owner or ""
@owner.setter
def owner(self, owner: str):
if owner is None:
del self.owner
return
if not isinstance(owner, str):
raise TypeError('owner must be of type str')
self._owner = owner
@property
def page_size(self) -> int:
"""Page size."""
return self._page_size or 0
@page_size.setter
def page_size(self, page_size: int):
if page_size is None:
del self.page_size
return
if not isinstance(page_size, int):
raise TypeError('page_size must be of type int')
self._page_size = page_size
@property
def page_token(self) -> str:
"""Page token used for pagination."""
return self._page_token or ""
@page_token.setter
def page_token(self, page_token: str):
if page_token is None:
del self.page_token
return
if not isinstance(page_token, str):
raise TypeError('page_token must be of type str')
self._page_token = page_token
@property
def only_vertex_models(self) -> bool:
"""Only list models that have Vertex URLs"""
return self._only_vertex_models or False
@only_vertex_models.setter
def only_vertex_models(self, only_vertex_models: bool):
if only_vertex_models is None:
del self.only_vertex_models
return
if not isinstance(only_vertex_models, bool):
raise TypeError('only_vertex_models must be of type bool')
self._only_vertex_models = only_vertex_models
def endpoint(self):
path = '/api/v1/models/list'
return path.format_map(self.to_field_map(self))
|
class ApiListModelsRequest(KaggleObject):
'''
Attributes:
search (str)
Display models matching the specified search terms.
sort_by (ListModelsOrderBy)
Sort the results (default is 'hotness'). 'relevance' only works if there is
a search query.
owner (str)
Display models by a particular user or organization.
page_size (int)
Page size.
page_token (str)
Page token used for pagination.
only_vertex_models (bool)
Only list models that have Vertex URLs
'''
def __init__(self):
pass
@property
def search(self) -> str:
'''Display models matching the specified search terms.'''
pass
@search.setter
def search(self) -> str:
pass
@property
def sort_by(self) -> 'ListModelsOrderBy':
'''
Sort the results (default is 'hotness'). 'relevance' only works if there is
a search query.
'''
pass
@sort_by.setter
def sort_by(self) -> 'ListModelsOrderBy':
pass
@property
def owner(self) -> str:
'''Display models by a particular user or organization.'''
pass
@owner.setter
def owner(self) -> str:
pass
@property
def page_size(self) -> int:
'''Page size.'''
pass
@page_size.setter
def page_size(self) -> int:
pass
@property
def page_token(self) -> str:
'''Page token used for pagination.'''
pass
@page_token.setter
def page_token(self) -> str:
pass
@property
def only_vertex_models(self) -> bool:
'''Only list models that have Vertex URLs'''
pass
@only_vertex_models.setter
def only_vertex_models(self) -> bool:
pass
def endpoint(self):
pass
| 27 | 7 | 5 | 0 | 5 | 1 | 2 | 0.32 | 1 | 5 | 1 | 0 | 14 | 6 | 14 | 31 | 117 | 14 | 78 | 34 | 51 | 25 | 66 | 22 | 51 | 3 | 2 | 1 | 26 |
141,006 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiListModelsResponse
|
class ApiListModelsResponse(KaggleObject):
r"""
Attributes:
models (ApiModel)
next_page_token (str)
total_results (int)
"""
def __init__(self):
self._models = []
self._next_page_token = ""
self._total_results = 0
self._freeze()
@property
def models(self) -> Optional[List[Optional['ApiModel']]]:
return self._models
@models.setter
def models(self, models: Optional[List[Optional['ApiModel']]]):
if models is None:
del self.models
return
if not isinstance(models, list):
raise TypeError('models must be of type list')
if not all([isinstance(t, ApiModel) for t in models]):
raise TypeError('models must contain only items of type ApiModel')
self._models = models
@property
def next_page_token(self) -> str:
return self._next_page_token
@next_page_token.setter
def next_page_token(self, next_page_token: str):
if next_page_token is None:
del self.next_page_token
return
if not isinstance(next_page_token, str):
raise TypeError('next_page_token must be of type str')
self._next_page_token = next_page_token
@property
def total_results(self) -> int:
return self._total_results
@total_results.setter
def total_results(self, total_results: int):
if total_results is None:
del self.total_results
return
if not isinstance(total_results, int):
raise TypeError('total_results must be of type int')
self._total_results = total_results
@property
def nextPageToken(self):
return self.next_page_token
@property
def totalResults(self):
return self.total_results
|
class ApiListModelsResponse(KaggleObject):
'''
Attributes:
models (ApiModel)
next_page_token (str)
total_results (int)
'''
def __init__(self):
pass
@property
def models(self) -> Optional[List[Optional['ApiModel']]]:
pass
@models.setter
def models(self) -> Optional[List[Optional['ApiModel']]]:
pass
@property
def next_page_token(self) -> str:
pass
@next_page_token.setter
def next_page_token(self) -> str:
pass
@property
def total_results(self) -> int:
pass
@total_results.setter
def total_results(self) -> int:
pass
@property
def nextPageToken(self):
pass
@property
def totalResults(self):
pass
| 18 | 1 | 4 | 0 | 4 | 0 | 2 | 0.13 | 1 | 5 | 1 | 0 | 9 | 3 | 9 | 26 | 62 | 9 | 47 | 21 | 29 | 6 | 39 | 13 | 29 | 4 | 2 | 1 | 16 |
141,007 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiModel
|
class ApiModel(KaggleObject):
r"""
Attributes:
id (int)
ref (str)
ref is `owner_slug/model_slug`
title (str)
subtitle (str)
author (str)
slug (str)
is_private (bool)
description (str)
instances (ApiModelInstance)
tags (ApiCategory)
publish_time (datetime)
provenance_sources (str)
url (str)
model_version_links (ModelLink)
"""
def __init__(self):
self._id = 0
self._ref = ""
self._title = ""
self._subtitle = ""
self._author = ""
self._slug = ""
self._is_private = False
self._description = ""
self._instances = []
self._tags = []
self._publish_time = None
self._provenance_sources = ""
self._url = ""
self._model_version_links = []
self._freeze()
@property
def id(self) -> int:
return self._id
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def ref(self) -> str:
"""ref is `owner_slug/model_slug`"""
return self._ref
@ref.setter
def ref(self, ref: str):
if ref is None:
del self.ref
return
if not isinstance(ref, str):
raise TypeError('ref must be of type str')
self._ref = ref
@property
def title(self) -> str:
return self._title
@title.setter
def title(self, title: str):
if title is None:
del self.title
return
if not isinstance(title, str):
raise TypeError('title must be of type str')
self._title = title
@property
def subtitle(self) -> str:
return self._subtitle
@subtitle.setter
def subtitle(self, subtitle: str):
if subtitle is None:
del self.subtitle
return
if not isinstance(subtitle, str):
raise TypeError('subtitle must be of type str')
self._subtitle = subtitle
@property
def author(self) -> str:
return self._author
@author.setter
def author(self, author: str):
if author is None:
del self.author
return
if not isinstance(author, str):
raise TypeError('author must be of type str')
self._author = author
@property
def slug(self) -> str:
return self._slug
@slug.setter
def slug(self, slug: str):
if slug is None:
del self.slug
return
if not isinstance(slug, str):
raise TypeError('slug must be of type str')
self._slug = slug
@property
def is_private(self) -> bool:
return self._is_private
@is_private.setter
def is_private(self, is_private: bool):
if is_private is None:
del self.is_private
return
if not isinstance(is_private, bool):
raise TypeError('is_private must be of type bool')
self._is_private = is_private
@property
def description(self) -> str:
return self._description
@description.setter
def description(self, description: str):
if description is None:
del self.description
return
if not isinstance(description, str):
raise TypeError('description must be of type str')
self._description = description
@property
def instances(self) -> Optional[List[Optional['ApiModelInstance']]]:
return self._instances
@instances.setter
def instances(self, instances: Optional[List[Optional['ApiModelInstance']]]):
if instances is None:
del self.instances
return
if not isinstance(instances, list):
raise TypeError('instances must be of type list')
if not all([isinstance(t, ApiModelInstance) for t in instances]):
raise TypeError('instances must contain only items of type ApiModelInstance')
self._instances = instances
@property
def tags(self) -> Optional[List[Optional['ApiCategory']]]:
return self._tags
@tags.setter
def tags(self, tags: Optional[List[Optional['ApiCategory']]]):
if tags is None:
del self.tags
return
if not isinstance(tags, list):
raise TypeError('tags must be of type list')
if not all([isinstance(t, ApiCategory) for t in tags]):
raise TypeError('tags must contain only items of type ApiCategory')
self._tags = tags
@property
def publish_time(self) -> datetime:
return self._publish_time
@publish_time.setter
def publish_time(self, publish_time: datetime):
if publish_time is None:
del self.publish_time
return
if not isinstance(publish_time, datetime):
raise TypeError('publish_time must be of type datetime')
self._publish_time = publish_time
@property
def provenance_sources(self) -> str:
return self._provenance_sources
@provenance_sources.setter
def provenance_sources(self, provenance_sources: str):
if provenance_sources is None:
del self.provenance_sources
return
if not isinstance(provenance_sources, str):
raise TypeError('provenance_sources must be of type str')
self._provenance_sources = provenance_sources
@property
def url(self) -> str:
return self._url
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
@property
def model_version_links(self) -> Optional[List[Optional['ModelLink']]]:
return self._model_version_links
@model_version_links.setter
def model_version_links(self, model_version_links: Optional[List[Optional['ModelLink']]]):
if model_version_links is None:
del self.model_version_links
return
if not isinstance(model_version_links, list):
raise TypeError('model_version_links must be of type list')
if not all([isinstance(t, ModelLink) for t in model_version_links]):
raise TypeError('model_version_links must contain only items of type ModelLink')
self._model_version_links = model_version_links
|
class ApiModel(KaggleObject):
'''
Attributes:
id (int)
ref (str)
ref is `owner_slug/model_slug`
title (str)
subtitle (str)
author (str)
slug (str)
is_private (bool)
description (str)
instances (ApiModelInstance)
tags (ApiCategory)
publish_time (datetime)
provenance_sources (str)
url (str)
model_version_links (ModelLink)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def ref(self) -> str:
'''ref is `owner_slug/model_slug`'''
pass
@ref.setter
def ref(self) -> str:
pass
@property
def title(self) -> str:
pass
@title.setter
def title(self) -> str:
pass
@property
def subtitle(self) -> str:
pass
@subtitle.setter
def subtitle(self) -> str:
pass
@property
def author(self) -> str:
pass
@author.setter
def author(self) -> str:
pass
@property
def slug(self) -> str:
pass
@slug.setter
def slug(self) -> str:
pass
@property
def is_private(self) -> bool:
pass
@is_private.setter
def is_private(self) -> bool:
pass
@property
def description(self) -> str:
pass
@description.setter
def description(self) -> str:
pass
@property
def instances(self) -> Optional[List[Optional['ApiModelInstance']]]:
pass
@instances.setter
def instances(self) -> Optional[List[Optional['ApiModelInstance']]]:
pass
@property
def tags(self) -> Optional[List[Optional['ApiCategory']]]:
pass
@tags.setter
def tags(self) -> Optional[List[Optional['ApiCategory']]]:
pass
@property
def publish_time(self) -> datetime:
pass
@publish_time.setter
def publish_time(self) -> datetime:
pass
@property
def provenance_sources(self) -> str:
pass
@provenance_sources.setter
def provenance_sources(self) -> str:
pass
@property
def url(self) -> str:
pass
@url.setter
def url(self) -> str:
pass
@property
def model_version_links(self) -> Optional[List[Optional['ModelLink']]]:
pass
@model_version_links.setter
def model_version_links(self) -> Optional[List[Optional['ModelLink']]]:
pass
| 58 | 2 | 5 | 0 | 5 | 0 | 2 | 0.11 | 1 | 9 | 3 | 0 | 29 | 14 | 29 | 46 | 225 | 29 | 177 | 72 | 119 | 19 | 149 | 44 | 119 | 4 | 2 | 1 | 60 |
141,008 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiModelFile
|
class ApiModelFile(KaggleObject):
r"""
Attributes:
name (str)
size (int)
creation_date (datetime)
"""
def __init__(self):
self._name = ""
self._size = 0
self._creation_date = None
self._freeze()
@property
def name(self) -> str:
return self._name
@name.setter
def name(self, name: str):
if name is None:
del self.name
return
if not isinstance(name, str):
raise TypeError('name must be of type str')
self._name = name
@property
def size(self) -> int:
return self._size
@size.setter
def size(self, size: int):
if size is None:
del self.size
return
if not isinstance(size, int):
raise TypeError('size must be of type int')
self._size = size
@property
def creation_date(self) -> datetime:
return self._creation_date or None
@creation_date.setter
def creation_date(self, creation_date: datetime):
if creation_date is None:
del self.creation_date
return
if not isinstance(creation_date, datetime):
raise TypeError('creation_date must be of type datetime')
self._creation_date = creation_date
|
class ApiModelFile(KaggleObject):
'''
Attributes:
name (str)
size (int)
creation_date (datetime)
'''
def __init__(self):
pass
@property
def name(self) -> str:
pass
@name.setter
def name(self) -> str:
pass
@property
def size(self) -> int:
pass
@size.setter
def size(self) -> int:
pass
@property
def creation_date(self) -> datetime:
pass
@creation_date.setter
def creation_date(self) -> datetime:
pass
| 14 | 1 | 5 | 0 | 5 | 0 | 2 | 0.15 | 1 | 4 | 0 | 0 | 7 | 3 | 7 | 24 | 52 | 7 | 39 | 17 | 25 | 6 | 33 | 11 | 25 | 3 | 2 | 1 | 13 |
141,009 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiModelInstance
|
class ApiModelInstance(KaggleObject):
r"""
Attributes:
id (int)
slug (str)
framework (ModelFramework)
fine_tunable (bool)
overview (str)
usage (str)
download_url (str)
version_id (int)
version_number (int)
training_data (str)
url (str)
license_name (str)
model_instance_type (ModelInstanceType)
base_model_instance_information (BaseModelInstanceInformation)
external_base_model_url (str)
total_uncompressed_bytes (int)
"""
def __init__(self):
self._id = 0
self._slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._fine_tunable = False
self._overview = ""
self._usage = ""
self._download_url = ""
self._version_id = 0
self._version_number = 0
self._training_data = []
self._url = ""
self._license_name = ""
self._model_instance_type = ModelInstanceType.MODEL_INSTANCE_TYPE_UNSPECIFIED
self._base_model_instance_information = None
self._external_base_model_url = ""
self._total_uncompressed_bytes = 0
self._freeze()
@property
def id(self) -> int:
return self._id
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def slug(self) -> str:
return self._slug
@slug.setter
def slug(self, slug: str):
if slug is None:
del self.slug
return
if not isinstance(slug, str):
raise TypeError('slug must be of type str')
self._slug = slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def fine_tunable(self) -> bool:
return self._fine_tunable
@fine_tunable.setter
def fine_tunable(self, fine_tunable: bool):
if fine_tunable is None:
del self.fine_tunable
return
if not isinstance(fine_tunable, bool):
raise TypeError('fine_tunable must be of type bool')
self._fine_tunable = fine_tunable
@property
def overview(self) -> str:
return self._overview
@overview.setter
def overview(self, overview: str):
if overview is None:
del self.overview
return
if not isinstance(overview, str):
raise TypeError('overview must be of type str')
self._overview = overview
@property
def usage(self) -> str:
return self._usage
@usage.setter
def usage(self, usage: str):
if usage is None:
del self.usage
return
if not isinstance(usage, str):
raise TypeError('usage must be of type str')
self._usage = usage
@property
def download_url(self) -> str:
return self._download_url
@download_url.setter
def download_url(self, download_url: str):
if download_url is None:
del self.download_url
return
if not isinstance(download_url, str):
raise TypeError('download_url must be of type str')
self._download_url = download_url
@property
def version_id(self) -> int:
return self._version_id
@version_id.setter
def version_id(self, version_id: int):
if version_id is None:
del self.version_id
return
if not isinstance(version_id, int):
raise TypeError('version_id must be of type int')
self._version_id = version_id
@property
def version_number(self) -> int:
return self._version_number
@version_number.setter
def version_number(self, version_number: int):
if version_number is None:
del self.version_number
return
if not isinstance(version_number, int):
raise TypeError('version_number must be of type int')
self._version_number = version_number
@property
def training_data(self) -> Optional[List[str]]:
return self._training_data
@training_data.setter
def training_data(self, training_data: Optional[List[str]]):
if training_data is None:
del self.training_data
return
if not isinstance(training_data, list):
raise TypeError('training_data must be of type list')
if not all([isinstance(t, str) for t in training_data]):
raise TypeError('training_data must contain only items of type str')
self._training_data = training_data
@property
def url(self) -> str:
return self._url
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
@property
def license_name(self) -> str:
return self._license_name
@license_name.setter
def license_name(self, license_name: str):
if license_name is None:
del self.license_name
return
if not isinstance(license_name, str):
raise TypeError('license_name must be of type str')
self._license_name = license_name
@property
def model_instance_type(self) -> 'ModelInstanceType':
return self._model_instance_type
@model_instance_type.setter
def model_instance_type(self, model_instance_type: 'ModelInstanceType'):
if model_instance_type is None:
del self.model_instance_type
return
if not isinstance(model_instance_type, ModelInstanceType):
raise TypeError('model_instance_type must be of type ModelInstanceType')
self._model_instance_type = model_instance_type
@property
def base_model_instance_information(self) -> Optional['BaseModelInstanceInformation']:
return self._base_model_instance_information or None
@base_model_instance_information.setter
def base_model_instance_information(self, base_model_instance_information: Optional['BaseModelInstanceInformation']):
if base_model_instance_information is None:
del self.base_model_instance_information
return
if not isinstance(base_model_instance_information, BaseModelInstanceInformation):
raise TypeError('base_model_instance_information must be of type BaseModelInstanceInformation')
self._base_model_instance_information = base_model_instance_information
@property
def external_base_model_url(self) -> str:
return self._external_base_model_url
@external_base_model_url.setter
def external_base_model_url(self, external_base_model_url: str):
if external_base_model_url is None:
del self.external_base_model_url
return
if not isinstance(external_base_model_url, str):
raise TypeError('external_base_model_url must be of type str')
self._external_base_model_url = external_base_model_url
@property
def total_uncompressed_bytes(self) -> int:
return self._total_uncompressed_bytes
@total_uncompressed_bytes.setter
def total_uncompressed_bytes(self, total_uncompressed_bytes: int):
if total_uncompressed_bytes is None:
del self.total_uncompressed_bytes
return
if not isinstance(total_uncompressed_bytes, int):
raise TypeError('total_uncompressed_bytes must be of type int')
self._total_uncompressed_bytes = total_uncompressed_bytes
|
class ApiModelInstance(KaggleObject):
'''
Attributes:
id (int)
slug (str)
framework (ModelFramework)
fine_tunable (bool)
overview (str)
usage (str)
download_url (str)
version_id (int)
version_number (int)
training_data (str)
url (str)
license_name (str)
model_instance_type (ModelInstanceType)
base_model_instance_information (BaseModelInstanceInformation)
external_base_model_url (str)
total_uncompressed_bytes (int)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def slug(self) -> str:
pass
@slug.setter
def slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def fine_tunable(self) -> bool:
pass
@fine_tunable.setter
def fine_tunable(self) -> bool:
pass
@property
def overview(self) -> str:
pass
@overview.setter
def overview(self) -> str:
pass
@property
def usage(self) -> str:
pass
@usage.setter
def usage(self) -> str:
pass
@property
def download_url(self) -> str:
pass
@download_url.setter
def download_url(self) -> str:
pass
@property
def version_id(self) -> int:
pass
@version_id.setter
def version_id(self) -> int:
pass
@property
def version_number(self) -> int:
pass
@version_number.setter
def version_number(self) -> int:
pass
@property
def training_data(self) -> Optional[List[str]]:
pass
@training_data.setter
def training_data(self) -> Optional[List[str]]:
pass
@property
def url(self) -> str:
pass
@url.setter
def url(self) -> str:
pass
@property
def license_name(self) -> str:
pass
@license_name.setter
def license_name(self) -> str:
pass
@property
def model_instance_type(self) -> 'ModelInstanceType':
pass
@model_instance_type.setter
def model_instance_type(self) -> 'ModelInstanceType':
pass
@property
def base_model_instance_information(self) -> Optional['BaseModelInstanceInformation']:
pass
@base_model_instance_information.setter
def base_model_instance_information(self) -> Optional['BaseModelInstanceInformation']:
pass
@property
def external_base_model_url(self) -> str:
pass
@external_base_model_url.setter
def external_base_model_url(self) -> str:
pass
@property
def total_uncompressed_bytes(self) -> int:
pass
@total_uncompressed_bytes.setter
def total_uncompressed_bytes(self) -> int:
pass
| 66 | 1 | 5 | 0 | 5 | 0 | 2 | 0.1 | 1 | 8 | 3 | 0 | 33 | 16 | 33 | 50 | 249 | 33 | 197 | 82 | 131 | 19 | 165 | 50 | 131 | 4 | 2 | 1 | 66 |
141,010 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiReviewGatingUserConsentRequest
|
class ApiReviewGatingUserConsentRequest(KaggleObject):
r"""
Attributes:
agreement_id (int)
user_name (str)
review_status (GatingAgreementRequestsReviewStatus)
publisher_notes (str)
"""
def __init__(self):
self._agreement_id = 0
self._user_name = ""
self._review_status = GatingAgreementRequestsReviewStatus.GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED
self._publisher_notes = None
self._freeze()
@property
def agreement_id(self) -> int:
return self._agreement_id
@agreement_id.setter
def agreement_id(self, agreement_id: int):
if agreement_id is None:
del self.agreement_id
return
if not isinstance(agreement_id, int):
raise TypeError('agreement_id must be of type int')
self._agreement_id = agreement_id
@property
def user_name(self) -> str:
return self._user_name
@user_name.setter
def user_name(self, user_name: str):
if user_name is None:
del self.user_name
return
if not isinstance(user_name, str):
raise TypeError('user_name must be of type str')
self._user_name = user_name
@property
def review_status(self) -> 'GatingAgreementRequestsReviewStatus':
return self._review_status
@review_status.setter
def review_status(self, review_status: 'GatingAgreementRequestsReviewStatus'):
if review_status is None:
del self.review_status
return
if not isinstance(review_status, GatingAgreementRequestsReviewStatus):
raise TypeError('review_status must be of type GatingAgreementRequestsReviewStatus')
self._review_status = review_status
@property
def publisher_notes(self) -> str:
return self._publisher_notes or ""
@publisher_notes.setter
def publisher_notes(self, publisher_notes: str):
if publisher_notes is None:
del self.publisher_notes
return
if not isinstance(publisher_notes, str):
raise TypeError('publisher_notes must be of type str')
self._publisher_notes = publisher_notes
def endpoint(self):
path = '/api/v1/gating/{agreement_id}/user-consent/review'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class ApiReviewGatingUserConsentRequest(KaggleObject):
'''
Attributes:
agreement_id (int)
user_name (str)
review_status (GatingAgreementRequestsReviewStatus)
publisher_notes (str)
'''
def __init__(self):
pass
@property
def agreement_id(self) -> int:
pass
@agreement_id.setter
def agreement_id(self) -> int:
pass
@property
def user_name(self) -> str:
pass
@user_name.setter
def user_name(self) -> str:
pass
@property
def review_status(self) -> 'GatingAgreementRequestsReviewStatus':
pass
@review_status.setter
def review_status(self) -> 'GatingAgreementRequestsReviewStatus':
pass
@property
def publisher_notes(self) -> str:
pass
@publisher_notes.setter
def publisher_notes(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 23 | 1 | 4 | 0 | 4 | 0 | 2 | 0.12 | 1 | 4 | 1 | 0 | 10 | 4 | 12 | 29 | 80 | 13 | 60 | 28 | 37 | 7 | 50 | 18 | 37 | 3 | 2 | 1 | 20 |
141,011 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiUpdateModelInstanceRequest
|
class ApiUpdateModelInstanceRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
overview (str)
usage (str)
fine_tunable (bool)
training_data (str)
update_mask (FieldMask)
license_name (str)
model_instance_type (ModelInstanceType)
base_model_instance (str)
external_base_model_url (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED
self._instance_slug = ""
self._overview = ""
self._usage = ""
self._fine_tunable = None
self._training_data = []
self._update_mask = None
self._license_name = None
self._model_instance_type = None
self._base_model_instance = None
self._external_base_model_url = None
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
@property
def framework(self) -> 'ModelFramework':
return self._framework
@framework.setter
def framework(self, framework: 'ModelFramework'):
if framework is None:
del self.framework
return
if not isinstance(framework, ModelFramework):
raise TypeError('framework must be of type ModelFramework')
self._framework = framework
@property
def instance_slug(self) -> str:
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug: str):
if instance_slug is None:
del self.instance_slug
return
if not isinstance(instance_slug, str):
raise TypeError('instance_slug must be of type str')
self._instance_slug = instance_slug
@property
def overview(self) -> str:
return self._overview
@overview.setter
def overview(self, overview: str):
if overview is None:
del self.overview
return
if not isinstance(overview, str):
raise TypeError('overview must be of type str')
self._overview = overview
@property
def usage(self) -> str:
return self._usage
@usage.setter
def usage(self, usage: str):
if usage is None:
del self.usage
return
if not isinstance(usage, str):
raise TypeError('usage must be of type str')
self._usage = usage
@property
def fine_tunable(self) -> bool:
return self._fine_tunable or False
@fine_tunable.setter
def fine_tunable(self, fine_tunable: bool):
if fine_tunable is None:
del self.fine_tunable
return
if not isinstance(fine_tunable, bool):
raise TypeError('fine_tunable must be of type bool')
self._fine_tunable = fine_tunable
@property
def training_data(self) -> Optional[List[str]]:
return self._training_data
@training_data.setter
def training_data(self, training_data: Optional[List[str]]):
if training_data is None:
del self.training_data
return
if not isinstance(training_data, list):
raise TypeError('training_data must be of type list')
if not all([isinstance(t, str) for t in training_data]):
raise TypeError('training_data must contain only items of type str')
self._training_data = training_data
@property
def update_mask(self) -> FieldMask:
return self._update_mask
@update_mask.setter
def update_mask(self, update_mask: FieldMask):
if update_mask is None:
del self.update_mask
return
if not isinstance(update_mask, FieldMask):
raise TypeError('update_mask must be of type FieldMask')
self._update_mask = update_mask
@property
def license_name(self) -> str:
return self._license_name or ""
@license_name.setter
def license_name(self, license_name: str):
if license_name is None:
del self.license_name
return
if not isinstance(license_name, str):
raise TypeError('license_name must be of type str')
self._license_name = license_name
@property
def model_instance_type(self) -> 'ModelInstanceType':
return self._model_instance_type or ModelInstanceType.MODEL_INSTANCE_TYPE_UNSPECIFIED
@model_instance_type.setter
def model_instance_type(self, model_instance_type: 'ModelInstanceType'):
if model_instance_type is None:
del self.model_instance_type
return
if not isinstance(model_instance_type, ModelInstanceType):
raise TypeError('model_instance_type must be of type ModelInstanceType')
self._model_instance_type = model_instance_type
@property
def base_model_instance(self) -> str:
return self._base_model_instance or ""
@base_model_instance.setter
def base_model_instance(self, base_model_instance: str):
if base_model_instance is None:
del self.base_model_instance
return
if not isinstance(base_model_instance, str):
raise TypeError('base_model_instance must be of type str')
self._base_model_instance = base_model_instance
@property
def external_base_model_url(self) -> str:
return self._external_base_model_url or ""
@external_base_model_url.setter
def external_base_model_url(self, external_base_model_url: str):
if external_base_model_url is None:
del self.external_base_model_url
return
if not isinstance(external_base_model_url, str):
raise TypeError('external_base_model_url must be of type str')
self._external_base_model_url = external_base_model_url
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/update'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class ApiUpdateModelInstanceRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
framework (ModelFramework)
instance_slug (str)
overview (str)
usage (str)
fine_tunable (bool)
training_data (str)
update_mask (FieldMask)
license_name (str)
model_instance_type (ModelInstanceType)
base_model_instance (str)
external_base_model_url (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
@property
def framework(self) -> 'ModelFramework':
pass
@framework.setter
def framework(self) -> 'ModelFramework':
pass
@property
def instance_slug(self) -> str:
pass
@instance_slug.setter
def instance_slug(self) -> str:
pass
@property
def overview(self) -> str:
pass
@overview.setter
def overview(self) -> str:
pass
@property
def usage(self) -> str:
pass
@usage.setter
def usage(self) -> str:
pass
@property
def fine_tunable(self) -> bool:
pass
@fine_tunable.setter
def fine_tunable(self) -> bool:
pass
@property
def training_data(self) -> Optional[List[str]]:
pass
@training_data.setter
def training_data(self) -> Optional[List[str]]:
pass
@property
def update_mask(self) -> FieldMask:
pass
@update_mask.setter
def update_mask(self) -> FieldMask:
pass
@property
def license_name(self) -> str:
pass
@license_name.setter
def license_name(self) -> str:
pass
@property
def model_instance_type(self) -> 'ModelInstanceType':
pass
@model_instance_type.setter
def model_instance_type(self) -> 'ModelInstanceType':
pass
@property
def base_model_instance(self) -> str:
pass
@base_model_instance.setter
def base_model_instance(self) -> str:
pass
@property
def external_base_model_url(self) -> str:
pass
@external_base_model_url.setter
def external_base_model_url(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 59 | 1 | 5 | 0 | 5 | 0 | 2 | 0.09 | 1 | 6 | 2 | 0 | 28 | 13 | 30 | 47 | 217 | 31 | 170 | 73 | 111 | 16 | 142 | 45 | 111 | 4 | 2 | 1 | 57 |
141,012 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiUpdateModelResponse
|
class ApiUpdateModelResponse(KaggleObject):
r"""
Attributes:
id (int)
ref (str)
error (str)
url (str)
"""
def __init__(self):
self._id = None
self._ref = None
self._error = None
self._url = None
self._freeze()
@property
def id(self) -> int:
return self._id or 0
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def ref(self) -> str:
return self._ref or ""
@ref.setter
def ref(self, ref: str):
if ref is None:
del self.ref
return
if not isinstance(ref, str):
raise TypeError('ref must be of type str')
self._ref = ref
@property
def error(self) -> str:
return self._error or ""
@error.setter
def error(self, error: str):
if error is None:
del self.error
return
if not isinstance(error, str):
raise TypeError('error must be of type str')
self._error = error
@property
def url(self) -> str:
return self._url or ""
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
|
class ApiUpdateModelResponse(KaggleObject):
'''
Attributes:
id (int)
ref (str)
error (str)
url (str)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def ref(self) -> str:
pass
@ref.setter
def ref(self) -> str:
pass
@property
def error(self) -> str:
pass
@error.setter
def error(self) -> str:
pass
@property
def url(self) -> str:
pass
@url.setter
def url(self) -> str:
pass
| 18 | 1 | 5 | 0 | 5 | 0 | 2 | 0.14 | 1 | 3 | 0 | 0 | 9 | 4 | 9 | 26 | 67 | 9 | 51 | 22 | 33 | 7 | 43 | 14 | 33 | 3 | 2 | 1 | 17 |
141,013 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/datasets/types/dataset_api_service.py
|
src.kagglesdk.datasets.types.dataset_api_service.ApiUploadDatasetFileResponse
|
class ApiUploadDatasetFileResponse(KaggleObject):
r"""
Attributes:
token (str)
Opaque string token used to reference the new BlobFile.
create_url (str)
URL to use to start the upload
"""
def __init__(self):
self._token = ""
self._create_url = ""
self._freeze()
@property
def token(self) -> str:
"""Opaque string token used to reference the new BlobFile."""
return self._token
@token.setter
def token(self, token: str):
if token is None:
del self.token
return
if not isinstance(token, str):
raise TypeError('token must be of type str')
self._token = token
@property
def create_url(self) -> str:
"""URL to use to start the upload"""
return self._create_url
@create_url.setter
def create_url(self, create_url: str):
if create_url is None:
del self.create_url
return
if not isinstance(create_url, str):
raise TypeError('create_url must be of type str')
self._create_url = create_url
@property
def createUrl(self):
return self.create_url
|
class ApiUploadDatasetFileResponse(KaggleObject):
'''
Attributes:
token (str)
Opaque string token used to reference the new BlobFile.
create_url (str)
URL to use to start the upload
'''
def __init__(self):
pass
@property
def token(self) -> str:
'''Opaque string token used to reference the new BlobFile.'''
pass
@token.setter
def token(self) -> str:
pass
@property
def create_url(self) -> str:
'''URL to use to start the upload'''
pass
@create_url.setter
def create_url(self) -> str:
pass
@property
def createUrl(self):
pass
| 12 | 3 | 4 | 0 | 4 | 0 | 2 | 0.3 | 1 | 2 | 0 | 0 | 6 | 2 | 6 | 23 | 45 | 6 | 30 | 14 | 18 | 9 | 25 | 9 | 18 | 3 | 2 | 1 | 10 |
141,014 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/models/types/model_api_service.py
|
src.kagglesdk.models.types.model_api_service.ApiDeleteModelRequest
|
class ApiDeleteModelRequest(KaggleObject):
r"""
Attributes:
owner_slug (str)
model_slug (str)
"""
def __init__(self):
self._owner_slug = ""
self._model_slug = ""
self._freeze()
@property
def owner_slug(self) -> str:
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug: str):
if owner_slug is None:
del self.owner_slug
return
if not isinstance(owner_slug, str):
raise TypeError('owner_slug must be of type str')
self._owner_slug = owner_slug
@property
def model_slug(self) -> str:
return self._model_slug
@model_slug.setter
def model_slug(self, model_slug: str):
if model_slug is None:
del self.model_slug
return
if not isinstance(model_slug, str):
raise TypeError('model_slug must be of type str')
self._model_slug = model_slug
def endpoint(self):
path = '/api/v1/models/{owner_slug}/{model_slug}/delete'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
|
class ApiDeleteModelRequest(KaggleObject):
'''
Attributes:
owner_slug (str)
model_slug (str)
'''
def __init__(self):
pass
@property
def owner_slug(self) -> str:
pass
@owner_slug.setter
def owner_slug(self) -> str:
pass
@property
def model_slug(self) -> str:
pass
@model_slug.setter
def model_slug(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
| 13 | 1 | 4 | 0 | 4 | 0 | 2 | 0.15 | 1 | 2 | 0 | 0 | 6 | 2 | 7 | 24 | 46 | 8 | 33 | 16 | 20 | 5 | 28 | 11 | 20 | 3 | 2 | 1 | 11 |
141,015 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/datasets/types/dataset_api_service.py
|
src.kagglesdk.datasets.types.dataset_api_service.ApiUpdateDatasetMetadataResponse
|
class ApiUpdateDatasetMetadataResponse(KaggleObject):
r"""
Attributes:
errors (str)
Required for backwards-compatibility.
"""
def __init__(self):
self._errors = []
self._freeze()
@property
def errors(self) -> Optional[List[str]]:
"""Required for backwards-compatibility."""
return self._errors
@errors.setter
def errors(self, errors: Optional[List[str]]):
if errors is None:
del self.errors
return
if not isinstance(errors, list):
raise TypeError('errors must be of type list')
if not all([isinstance(t, str) for t in errors]):
raise TypeError('errors must contain only items of type str')
self._errors = errors
|
class ApiUpdateDatasetMetadataResponse(KaggleObject):
'''
Attributes:
errors (str)
Required for backwards-compatibility.
'''
def __init__(self):
pass
@property
def errors(self) -> Optional[List[str]]:
'''Required for backwards-compatibility.'''
pass
@errors.setter
def errors(self) -> Optional[List[str]]:
pass
| 6 | 2 | 5 | 0 | 5 | 0 | 2 | 0.35 | 1 | 3 | 0 | 0 | 3 | 1 | 3 | 20 | 26 | 3 | 17 | 7 | 11 | 6 | 15 | 5 | 11 | 4 | 2 | 1 | 6 |
141,016 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/kernel_push_request.py
|
src.kaggle.models.kernel_push_request.KernelPushRequest
|
class KernelPushRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'id': 'int',
'slug': 'str',
'new_title': 'str',
'text': 'str',
'language': 'str',
'kernel_type': 'str',
'is_private': 'bool',
'enable_gpu': 'bool',
'enable_tpu': 'bool',
'enable_internet': 'bool',
'dataset_data_sources': 'list[str]',
'competition_data_sources': 'list[str]',
'kernel_data_sources': 'list[str]',
'model_data_sources': 'list[str]',
'category_ids': 'list[str]',
'docker_image_pinning_type': 'str'
}
attribute_map = {
'id': 'id',
'slug': 'slug',
'new_title': 'newTitle',
'text': 'text',
'language': 'language',
'kernel_type': 'kernelType',
'is_private': 'isPrivate',
'enable_gpu': 'enableGpu',
'enable_tpu': 'enableTpu',
'enable_internet': 'enableInternet',
'dataset_data_sources': 'datasetDataSources',
'competition_data_sources': 'competitionDataSources',
'kernel_data_sources': 'kernelDataSources',
'model_data_sources': 'modelDataSources',
'category_ids': 'categoryIds',
'docker_image_pinning_type': 'dockerImagePinningType'
}
def __init__(self, id=None, slug=None, new_title=None, text=None, language=None, kernel_type=None, is_private=None, enable_gpu=None, enable_tpu=None, enable_internet=None, dataset_data_sources=None, competition_data_sources=None, kernel_data_sources=None, model_data_sources=None, category_ids=None, docker_image_pinning_type=None): # noqa: E501
self._id = None
self._slug = None
self._new_title = None
self._text = None
self._language = None
self._kernel_type = None
self._is_private = None
self._enable_gpu = None
self._enable_tpu = None
self._enable_internet = None
self._dataset_data_sources = None
self._competition_data_sources = None
self._kernel_data_sources = None
self._model_data_sources = None
self._category_ids = None
self._docker_image_pinning_type = None
self.discriminator = None
if id is not None:
self.id = id
if slug is not None:
self.slug = slug
if new_title is not None:
self.new_title = new_title
self.text = text
self.language = language
self.kernel_type = kernel_type
if is_private is not None:
self.is_private = is_private
if enable_gpu is not None:
self.enable_gpu = enable_gpu
if enable_tpu is not None:
self.enable_tpu = enable_tpu
if enable_internet is not None:
self.enable_internet = enable_internet
if dataset_data_sources is not None:
self.dataset_data_sources = dataset_data_sources
if competition_data_sources is not None:
self.competition_data_sources = competition_data_sources
if kernel_data_sources is not None:
self.kernel_data_sources = kernel_data_sources
if model_data_sources is not None:
self.model_data_sources = model_data_sources
if category_ids is not None:
self.category_ids = category_ids
if docker_image_pinning_type is not None:
self.docker_image_pinning_type = docker_image_pinning_type
@property
def id(self):
"""Gets the id of this KernelPushRequest. # noqa: E501
The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:return: The id of this KernelPushRequest. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this KernelPushRequest.
The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:param id: The id of this KernelPushRequest. # noqa: E501
:type: int
"""
self._id = id
@property
def slug(self):
"""Gets the slug of this KernelPushRequest. # noqa: E501
The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:return: The slug of this KernelPushRequest. # noqa: E501
:rtype: str
"""
return self._slug
@slug.setter
def slug(self, slug):
"""Sets the slug of this KernelPushRequest.
The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:param slug: The slug of this KernelPushRequest. # noqa: E501
:type: str
"""
self._slug = slug
@property
def new_title(self):
"""Gets the new_title of this KernelPushRequest. # noqa: E501
The title to be set on the kernel # noqa: E501
:return: The new_title of this KernelPushRequest. # noqa: E501
:rtype: str
"""
return self._new_title
@new_title.setter
def new_title(self, new_title):
"""Sets the new_title of this KernelPushRequest.
The title to be set on the kernel # noqa: E501
:param new_title: The new_title of this KernelPushRequest. # noqa: E501
:type: str
"""
self._new_title = new_title
@property
def text(self):
"""Gets the text of this KernelPushRequest. # noqa: E501
The kernel's source code # noqa: E501
:return: The text of this KernelPushRequest. # noqa: E501
:rtype: str
"""
return self._text
@text.setter
def text(self, text):
"""Sets the text of this KernelPushRequest.
The kernel's source code # noqa: E501
:param text: The text of this KernelPushRequest. # noqa: E501
:type: str
"""
if text is None:
raise ValueError("Invalid value for `text`, must not be `None`") # noqa: E501
self._text = text
@property
def language(self):
"""Gets the language of this KernelPushRequest. # noqa: E501
The language that the kernel is written in # noqa: E501
:return: The language of this KernelPushRequest. # noqa: E501
:rtype: str
"""
return self._language
@language.setter
def language(self, language):
"""Sets the language of this KernelPushRequest.
The language that the kernel is written in # noqa: E501
:param language: The language of this KernelPushRequest. # noqa: E501
:type: str
"""
if language is None:
raise ValueError("Invalid value for `language`, must not be `None`") # noqa: E501
allowed_values = ["python", "r", "rmarkdown"] # noqa: E501
if language not in allowed_values:
raise ValueError(
"Invalid value for `language` ({0}), must be one of {1}" # noqa: E501
.format(language, allowed_values)
)
self._language = language
@property
def kernel_type(self):
"""Gets the kernel_type of this KernelPushRequest. # noqa: E501
The type of kernel. Cannot be changed once the kernel has been created # noqa: E501
:return: The kernel_type of this KernelPushRequest. # noqa: E501
:rtype: str
"""
return self._kernel_type
@kernel_type.setter
def kernel_type(self, kernel_type):
"""Sets the kernel_type of this KernelPushRequest.
The type of kernel. Cannot be changed once the kernel has been created # noqa: E501
:param kernel_type: The kernel_type of this KernelPushRequest. # noqa: E501
:type: str
"""
if kernel_type is None:
raise ValueError("Invalid value for `kernel_type`, must not be `None`") # noqa: E501
allowed_values = ["script", "notebook"] # noqa: E501
if kernel_type not in allowed_values:
raise ValueError(
"Invalid value for `kernel_type` ({0}), must be one of {1}" # noqa: E501
.format(kernel_type, allowed_values)
)
self._kernel_type = kernel_type
@property
def is_private(self):
"""Gets the is_private of this KernelPushRequest. # noqa: E501
Whether or not the kernel should be private # noqa: E501
:return: The is_private of this KernelPushRequest. # noqa: E501
:rtype: bool
"""
return self._is_private
@is_private.setter
def is_private(self, is_private):
"""Sets the is_private of this KernelPushRequest.
Whether or not the kernel should be private # noqa: E501
:param is_private: The is_private of this KernelPushRequest. # noqa: E501
:type: bool
"""
self._is_private = is_private
@property
def enable_gpu(self):
"""Gets the enable_gpu of this KernelPushRequest. # noqa: E501
Whether or not the kernel should run on a GPU # noqa: E501
:return: The enable_gpu of this KernelPushRequest. # noqa: E501
:rtype: bool
"""
return self._enable_gpu
@enable_gpu.setter
def enable_gpu(self, enable_gpu):
"""Sets the enable_gpu of this KernelPushRequest.
Whether or not the kernel should run on a GPU # noqa: E501
:param enable_gpu: The enable_gpu of this KernelPushRequest. # noqa: E501
:type: bool
"""
self._enable_gpu = enable_gpu
@property
def enable_tpu(self):
"""Gets the enable_tpu of this KernelPushRequest. # noqa: E501
Whether or not the kernel should run on a TPU # noqa: E501
:return: The enable_tpu of this KernelPushRequest. # noqa: E501
:rtype: bool
"""
return self._enable_tpu
@enable_tpu.setter
def enable_tpu(self, enable_tpu):
"""Sets the enable_tpu of this KernelPushRequest.
Whether or not the kernel should run on a TPU # noqa: E501
:param enable_tpu: The enable_tpu of this KernelPushRequest. # noqa: E501
:type: bool
"""
self._enable_tpu = enable_tpu
@property
def enable_internet(self):
"""Gets the enable_internet of this KernelPushRequest. # noqa: E501
Whether or not the kernel should be able to access the internet # noqa: E501
:return: The enable_internet of this KernelPushRequest. # noqa: E501
:rtype: bool
"""
return self._enable_internet
@enable_internet.setter
def enable_internet(self, enable_internet):
"""Sets the enable_internet of this KernelPushRequest.
Whether or not the kernel should be able to access the internet # noqa: E501
:param enable_internet: The enable_internet of this KernelPushRequest. # noqa: E501
:type: bool
"""
self._enable_internet = enable_internet
@property
def dataset_data_sources(self):
"""Gets the dataset_data_sources of this KernelPushRequest. # noqa: E501
A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` # noqa: E501
:return: The dataset_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
"""
return self._dataset_data_sources
@dataset_data_sources.setter
def dataset_data_sources(self, dataset_data_sources):
"""Sets the dataset_data_sources of this KernelPushRequest.
A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` # noqa: E501
:param dataset_data_sources: The dataset_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
"""
self._dataset_data_sources = dataset_data_sources
@property
def competition_data_sources(self):
"""Gets the competition_data_sources of this KernelPushRequest. # noqa: E501
A list of competition data sources that the kernel should use # noqa: E501
:return: The competition_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
"""
return self._competition_data_sources
@competition_data_sources.setter
def competition_data_sources(self, competition_data_sources):
"""Sets the competition_data_sources of this KernelPushRequest.
A list of competition data sources that the kernel should use # noqa: E501
:param competition_data_sources: The competition_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
"""
self._competition_data_sources = competition_data_sources
@property
def kernel_data_sources(self):
"""Gets the kernel_data_sources of this KernelPushRequest. # noqa: E501
A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` # noqa: E501
:return: The kernel_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
"""
return self._kernel_data_sources
@kernel_data_sources.setter
def kernel_data_sources(self, kernel_data_sources):
"""Sets the kernel_data_sources of this KernelPushRequest.
A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` # noqa: E501
:param kernel_data_sources: The kernel_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
"""
self._kernel_data_sources = kernel_data_sources
@property
def model_data_sources(self):
"""Gets the model_data_sources of this KernelPushRequest. # noqa: E501
A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` # noqa: E501
:return: The model_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
"""
return self._model_data_sources
@model_data_sources.setter
def model_data_sources(self, model_data_sources):
"""Sets the model_data_sources of this KernelPushRequest.
A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` # noqa: E501
:param model_data_sources: The model_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
"""
self._model_data_sources = model_data_sources
@property
def category_ids(self):
"""Gets the category_ids of this KernelPushRequest. # noqa: E501
A list of tag IDs to associated with the kernel # noqa: E501
:return: The category_ids of this KernelPushRequest. # noqa: E501
:rtype: list[str]
"""
return self._category_ids
@category_ids.setter
def category_ids(self, category_ids):
"""Sets the category_ids of this KernelPushRequest.
A list of tag IDs to associated with the kernel # noqa: E501
:param category_ids: The category_ids of this KernelPushRequest. # noqa: E501
:type: list[str]
"""
self._category_ids = category_ids
@property
def docker_image_pinning_type(self):
"""Gets the docker_image_pinning_type of this KernelPushRequest. # noqa: E501
Which docker image to use for executing new versions going forward. # noqa: E501
:return: The docker_image_pinning_type of this KernelPushRequest. # noqa: E501
:rtype: str
"""
return self._docker_image_pinning_type
@docker_image_pinning_type.setter
def docker_image_pinning_type(self, docker_image_pinning_type):
"""Sets the docker_image_pinning_type of this KernelPushRequest.
Which docker image to use for executing new versions going forward. # noqa: E501
:param docker_image_pinning_type: The docker_image_pinning_type of this KernelPushRequest. # noqa: E501
:type: str
"""
allowed_values = ["original", "latest"] # noqa: E501
if docker_image_pinning_type not in allowed_values:
raise ValueError(
"Invalid value for `docker_image_pinning_type` ({0}), must be one of {1}" # noqa: E501
.format(docker_image_pinning_type, allowed_values)
)
self._docker_image_pinning_type = docker_image_pinning_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, KernelPushRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class KernelPushRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, id=None, slug=None, new_title=None, text=None, language=None, kernel_type=None, is_private=None, enable_gpu=None, enable_tpu=None, enable_internet=None, dataset_data_sources=None, competition_data_sources=None, kernel_data_sources=None, model_data_sources=None, category_ids=None, docker_image_pinning_type=None):
pass
@property
def id(self):
'''Gets the id of this KernelPushRequest. # noqa: E501
The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:return: The id of this KernelPushRequest. # noqa: E501
:rtype: int
'''
pass
@id.setter
def id(self):
'''Sets the id of this KernelPushRequest.
The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:param id: The id of this KernelPushRequest. # noqa: E501
:type: int
'''
pass
@property
def slug(self):
'''Gets the slug of this KernelPushRequest. # noqa: E501
The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:return: The slug of this KernelPushRequest. # noqa: E501
:rtype: str
'''
pass
@slug.setter
def slug(self):
'''Sets the slug of this KernelPushRequest.
The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501
:param slug: The slug of this KernelPushRequest. # noqa: E501
:type: str
'''
pass
@property
def new_title(self):
'''Gets the new_title of this KernelPushRequest. # noqa: E501
The title to be set on the kernel # noqa: E501
:return: The new_title of this KernelPushRequest. # noqa: E501
:rtype: str
'''
pass
@new_title.setter
def new_title(self):
'''Sets the new_title of this KernelPushRequest.
The title to be set on the kernel # noqa: E501
:param new_title: The new_title of this KernelPushRequest. # noqa: E501
:type: str
'''
pass
@property
def text(self):
'''Gets the text of this KernelPushRequest. # noqa: E501
The kernel's source code # noqa: E501
:return: The text of this KernelPushRequest. # noqa: E501
:rtype: str
'''
pass
@text.setter
def text(self):
'''Sets the text of this KernelPushRequest.
The kernel's source code # noqa: E501
:param text: The text of this KernelPushRequest. # noqa: E501
:type: str
'''
pass
@property
def language(self):
'''Gets the language of this KernelPushRequest. # noqa: E501
The language that the kernel is written in # noqa: E501
:return: The language of this KernelPushRequest. # noqa: E501
:rtype: str
'''
pass
@language.setter
def language(self):
'''Sets the language of this KernelPushRequest.
The language that the kernel is written in # noqa: E501
:param language: The language of this KernelPushRequest. # noqa: E501
:type: str
'''
pass
@property
def kernel_type(self):
'''Gets the kernel_type of this KernelPushRequest. # noqa: E501
The type of kernel. Cannot be changed once the kernel has been created # noqa: E501
:return: The kernel_type of this KernelPushRequest. # noqa: E501
:rtype: str
'''
pass
@kernel_type.setter
def kernel_type(self):
'''Sets the kernel_type of this KernelPushRequest.
The type of kernel. Cannot be changed once the kernel has been created # noqa: E501
:param kernel_type: The kernel_type of this KernelPushRequest. # noqa: E501
:type: str
'''
pass
@property
def is_private(self):
'''Gets the is_private of this KernelPushRequest. # noqa: E501
Whether or not the kernel should be private # noqa: E501
:return: The is_private of this KernelPushRequest. # noqa: E501
:rtype: bool
'''
pass
@is_private.setter
def is_private(self):
'''Sets the is_private of this KernelPushRequest.
Whether or not the kernel should be private # noqa: E501
:param is_private: The is_private of this KernelPushRequest. # noqa: E501
:type: bool
'''
pass
@property
def enable_gpu(self):
'''Gets the enable_gpu of this KernelPushRequest. # noqa: E501
Whether or not the kernel should run on a GPU # noqa: E501
:return: The enable_gpu of this KernelPushRequest. # noqa: E501
:rtype: bool
'''
pass
@enable_gpu.setter
def enable_gpu(self):
'''Sets the enable_gpu of this KernelPushRequest.
Whether or not the kernel should run on a GPU # noqa: E501
:param enable_gpu: The enable_gpu of this KernelPushRequest. # noqa: E501
:type: bool
'''
pass
@property
def enable_tpu(self):
'''Gets the enable_tpu of this KernelPushRequest. # noqa: E501
Whether or not the kernel should run on a TPU # noqa: E501
:return: The enable_tpu of this KernelPushRequest. # noqa: E501
:rtype: bool
'''
pass
@enable_tpu.setter
def enable_tpu(self):
'''Sets the enable_tpu of this KernelPushRequest.
Whether or not the kernel should run on a TPU # noqa: E501
:param enable_tpu: The enable_tpu of this KernelPushRequest. # noqa: E501
:type: bool
'''
pass
@property
def enable_internet(self):
'''Gets the enable_internet of this KernelPushRequest. # noqa: E501
Whether or not the kernel should be able to access the internet # noqa: E501
:return: The enable_internet of this KernelPushRequest. # noqa: E501
:rtype: bool
'''
pass
@enable_internet.setter
def enable_internet(self):
'''Sets the enable_internet of this KernelPushRequest.
Whether or not the kernel should be able to access the internet # noqa: E501
:param enable_internet: The enable_internet of this KernelPushRequest. # noqa: E501
:type: bool
'''
pass
@property
def dataset_data_sources(self):
'''Gets the dataset_data_sources of this KernelPushRequest. # noqa: E501
A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` # noqa: E501
:return: The dataset_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
'''
pass
@dataset_data_sources.setter
def dataset_data_sources(self):
'''Sets the dataset_data_sources of this KernelPushRequest.
A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` # noqa: E501
:param dataset_data_sources: The dataset_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def competition_data_sources(self):
'''Gets the competition_data_sources of this KernelPushRequest. # noqa: E501
A list of competition data sources that the kernel should use # noqa: E501
:return: The competition_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
'''
pass
@competition_data_sources.setter
def competition_data_sources(self):
'''Sets the competition_data_sources of this KernelPushRequest.
A list of competition data sources that the kernel should use # noqa: E501
:param competition_data_sources: The competition_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def kernel_data_sources(self):
'''Gets the kernel_data_sources of this KernelPushRequest. # noqa: E501
A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` # noqa: E501
:return: The kernel_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
'''
pass
@kernel_data_sources.setter
def kernel_data_sources(self):
'''Sets the kernel_data_sources of this KernelPushRequest.
A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` # noqa: E501
:param kernel_data_sources: The kernel_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def model_data_sources(self):
'''Gets the model_data_sources of this KernelPushRequest. # noqa: E501
A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` # noqa: E501
:return: The model_data_sources of this KernelPushRequest. # noqa: E501
:rtype: list[str]
'''
pass
@model_data_sources.setter
def model_data_sources(self):
'''Sets the model_data_sources of this KernelPushRequest.
A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` # noqa: E501
:param model_data_sources: The model_data_sources of this KernelPushRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def category_ids(self):
'''Gets the category_ids of this KernelPushRequest. # noqa: E501
A list of tag IDs to associated with the kernel # noqa: E501
:return: The category_ids of this KernelPushRequest. # noqa: E501
:rtype: list[str]
'''
pass
@category_ids.setter
def category_ids(self):
'''Sets the category_ids of this KernelPushRequest.
A list of tag IDs to associated with the kernel # noqa: E501
:param category_ids: The category_ids of this KernelPushRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def docker_image_pinning_type(self):
'''Gets the docker_image_pinning_type of this KernelPushRequest. # noqa: E501
Which docker image to use for executing new versions going forward. # noqa: E501
:return: The docker_image_pinning_type of this KernelPushRequest. # noqa: E501
:rtype: str
'''
pass
@docker_image_pinning_type.setter
def docker_image_pinning_type(self):
'''Sets the docker_image_pinning_type of this KernelPushRequest.
Which docker image to use for executing new versions going forward. # noqa: E501
:param docker_image_pinning_type: The docker_image_pinning_type of this KernelPushRequest. # noqa: E501
:type: str
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 71 | 38 | 11 | 2 | 4 | 5 | 2 | 0.78 | 1 | 4 | 0 | 0 | 38 | 17 | 38 | 38 | 530 | 124 | 234 | 96 | 163 | 182 | 149 | 64 | 110 | 14 | 1 | 2 | 62 |
141,017 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/model_instance_new_version_request.py
|
src.kaggle.models.model_instance_new_version_request.ModelInstanceNewVersionRequest
|
class ModelInstanceNewVersionRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'version_notes': 'str',
'files': 'list[UploadFile]'
}
attribute_map = {
'version_notes': 'versionNotes',
'files': 'files'
}
def __init__(self, version_notes=None, files=None): # noqa: E501
self._version_notes = None
self._files = None
self.discriminator = None
if version_notes is not None:
self.version_notes = version_notes
self.files = files
@property
def version_notes(self):
"""Gets the version_notes of this ModelInstanceNewVersionRequest. # noqa: E501
The version notes for the model instance version # noqa: E501
:return: The version_notes of this ModelInstanceNewVersionRequest. # noqa: E501
:rtype: str
"""
return self._version_notes
@version_notes.setter
def version_notes(self, version_notes):
"""Sets the version_notes of this ModelInstanceNewVersionRequest.
The version notes for the model instance version # noqa: E501
:param version_notes: The version_notes of this ModelInstanceNewVersionRequest. # noqa: E501
:type: str
"""
self._version_notes = version_notes
@property
def files(self):
"""Gets the files of this ModelInstanceNewVersionRequest. # noqa: E501
A list of files that should be associated with the model instance version # noqa: E501
:return: The files of this ModelInstanceNewVersionRequest. # noqa: E501
:rtype: list[UploadFile]
"""
return self._files
@files.setter
def files(self, files):
"""Sets the files of this ModelInstanceNewVersionRequest.
A list of files that should be associated with the model instance version # noqa: E501
:param files: The files of this ModelInstanceNewVersionRequest. # noqa: E501
:type: list[UploadFile]
"""
if files is None:
raise ValueError("Invalid value for `files`, must not be `None`") # noqa: E501
self._files = files
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelInstanceNewVersionRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class ModelInstanceNewVersionRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, version_notes=None, files=None):
pass
@property
def version_notes(self):
'''Gets the version_notes of this ModelInstanceNewVersionRequest. # noqa: E501
The version notes for the model instance version # noqa: E501
:return: The version_notes of this ModelInstanceNewVersionRequest. # noqa: E501
:rtype: str
'''
pass
@version_notes.setter
def version_notes(self):
'''Sets the version_notes of this ModelInstanceNewVersionRequest.
The version notes for the model instance version # noqa: E501
:param version_notes: The version_notes of this ModelInstanceNewVersionRequest. # noqa: E501
:type: str
'''
pass
@property
def files(self):
'''Gets the files of this ModelInstanceNewVersionRequest. # noqa: E501
A list of files that should be associated with the model instance version # noqa: E501
:return: The files of this ModelInstanceNewVersionRequest. # noqa: E501
:rtype: list[UploadFile]
'''
pass
@files.setter
def files(self):
'''Sets the files of this ModelInstanceNewVersionRequest.
A list of files that should be associated with the model instance version # noqa: E501
:param files: The files of this ModelInstanceNewVersionRequest. # noqa: E501
:type: list[UploadFile]
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 15 | 10 | 9 | 2 | 5 | 3 | 2 | 0.57 | 1 | 4 | 0 | 0 | 10 | 3 | 10 | 10 | 118 | 26 | 60 | 23 | 45 | 34 | 40 | 19 | 29 | 5 | 1 | 2 | 17 |
141,018 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/model_instance_update_request.py
|
src.kaggle.models.model_instance_update_request.ModelInstanceUpdateRequest
|
class ModelInstanceUpdateRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'overview': 'str',
'usage': 'str',
'license_name': 'str',
'fine_tunable': 'bool',
'training_data': 'list[str]',
'model_instance_type': 'str',
'base_model_instance': 'str',
'external_base_model_url': 'int',
'update_mask': 'str'
}
attribute_map = {
'overview': 'overview',
'usage': 'usage',
'license_name': 'licenseName',
'fine_tunable': 'fineTunable',
'training_data': 'trainingData',
'model_instance_type': 'modelInstanceType',
'base_model_instance': 'baseModelInstance',
'external_base_model_url': 'externalBaseModelUrl',
'update_mask': 'updateMask'
}
def __init__(self, overview=None, usage=None, license_name='Apache 2.0', fine_tunable=True, training_data=None, model_instance_type=None, base_model_instance=None, external_base_model_url=None, update_mask=None): # noqa: E501
self._overview = None
self._usage = None
self._license_name = None
self._fine_tunable = None
self._training_data = None
self._model_instance_type = None
self._base_model_instance = None
self._external_base_model_url = None
self._update_mask = None
self.discriminator = None
if overview is not None:
self.overview = overview
if usage is not None:
self.usage = usage
if license_name is not None:
self.license_name = license_name
if fine_tunable is not None:
self.fine_tunable = fine_tunable
if training_data is not None:
self.training_data = training_data
if model_instance_type is not None:
self.model_instance_type = model_instance_type
if base_model_instance is not None:
self.base_model_instance = base_model_instance
if external_base_model_url is not None:
self.external_base_model_url = external_base_model_url
self.update_mask = update_mask
@property
def overview(self):
"""Gets the overview of this ModelInstanceUpdateRequest. # noqa: E501
The overview of the model instance (markdown) # noqa: E501
:return: The overview of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
"""
return self._overview
@overview.setter
def overview(self, overview):
"""Sets the overview of this ModelInstanceUpdateRequest.
The overview of the model instance (markdown) # noqa: E501
:param overview: The overview of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
"""
self._overview = overview
@property
def usage(self):
"""Gets the usage of this ModelInstanceUpdateRequest. # noqa: E501
The description of how to use the model instance (markdown) # noqa: E501
:return: The usage of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
"""
return self._usage
@usage.setter
def usage(self, usage):
"""Sets the usage of this ModelInstanceUpdateRequest.
The description of how to use the model instance (markdown) # noqa: E501
:param usage: The usage of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
"""
self._usage = usage
@property
def license_name(self):
"""Gets the license_name of this ModelInstanceUpdateRequest. # noqa: E501
The license that should be associated with the model instance # noqa: E501
:return: The license_name of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
"""
return self._license_name
@license_name.setter
def license_name(self, license_name):
"""Sets the license_name of this ModelInstanceUpdateRequest.
The license that should be associated with the model instance # noqa: E501
:param license_name: The license_name of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
"""
allowed_values = ["CC0 1.0", "CC BY-NC-SA 4.0", "Unknown", "CC BY-SA 4.0", "GPL 2", "CC BY-SA 3.0", "Other", "Other (specified in description)", "CC BY 4.0", "Attribution 4.0 International (CC BY 4.0)", "CC BY-NC 4.0", "Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)", "PDDL", "ODC Public Domain Dedication and Licence (PDDL)", "CC BY 3.0", "Attribution 3.0 Unported (CC BY 3.0)", "CC BY 3.0 IGO", "Attribution 3.0 IGO (CC BY 3.0 IGO)", "CC BY-NC-SA 3.0 IGO", "Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO)", "CDLA Permissive 1.0", "Community Data License Agreement - Permissive - Version 1.0", "CDLA Sharing 1.0", "Community Data License Agreement - Sharing - Version 1.0", "CC BY-ND 4.0", "Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)", "CC BY-NC-ND 4.0", "Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)", "ODC-BY 1.0", "ODC Attribution License (ODC-By)", "LGPL 3.0", "GNU Lesser General Public License 3.0", "AGPL 3.0", "GNU Affero General Public License 3.0", "FDL 1.3", "GNU Free Documentation License 1.3", "apache-2.0", "Apache 2.0", "mit", "MIT", "bsd-3-clause", "BSD-3-Clause", "Llama 2", "Llama 2 Community License", "Gemma", "gpl-3", "GPL 3", "RAIL-M", "AI Pubs Open RAIL-M License", "AIPubs Research-Use RAIL-M", "AI Pubs Research-Use RAIL-M License", "BigScience OpenRAIL-M", "BigScience Open RAIL-M License", "RAIL", "RAIL (specified in description)", "Llama 3", "Llama 3 Community License"] # noqa: E501
if license_name not in allowed_values:
raise ValueError(
"Invalid value for `license_name` ({0}), must be one of {1}" # noqa: E501
.format(license_name, allowed_values)
)
self._license_name = license_name
@property
def fine_tunable(self):
"""Gets the fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501
Whether the model instance is fine tunable # noqa: E501
:return: The fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: bool
"""
return self._fine_tunable
@fine_tunable.setter
def fine_tunable(self, fine_tunable):
"""Sets the fine_tunable of this ModelInstanceUpdateRequest.
Whether the model instance is fine tunable # noqa: E501
:param fine_tunable: The fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501
:type: bool
"""
self._fine_tunable = fine_tunable
@property
def training_data(self):
"""Gets the training_data of this ModelInstanceUpdateRequest. # noqa: E501
A list of training data (urls or names) # noqa: E501
:return: The training_data of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: list[str]
"""
return self._training_data
@training_data.setter
def training_data(self, training_data):
"""Sets the training_data of this ModelInstanceUpdateRequest.
A list of training data (urls or names) # noqa: E501
:param training_data: The training_data of this ModelInstanceUpdateRequest. # noqa: E501
:type: list[str]
"""
self._training_data = training_data
@property
def model_instance_type(self):
"""Gets the model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:return: The model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
"""
return self._model_instance_type
@model_instance_type.setter
def model_instance_type(self, model_instance_type):
"""Sets the model_instance_type of this ModelInstanceUpdateRequest.
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:param model_instance_type: The model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
"""
allowed_values = ["Unspecified", "BaseModel", "KaggleVariant", "ExternalVariant"] # noqa: E501
if model_instance_type not in allowed_values:
raise ValueError(
"Invalid value for `model_instance_type` ({0}), must be one of {1}" # noqa: E501
.format(model_instance_type, allowed_values)
)
self._model_instance_type = model_instance_type
@property
def base_model_instance(self):
"""Gets the base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:return: The base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
"""
return self._base_model_instance
@base_model_instance.setter
def base_model_instance(self, base_model_instance):
"""Sets the base_model_instance of this ModelInstanceUpdateRequest.
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:param base_model_instance: The base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
"""
self._base_model_instance = base_model_instance
@property
def external_base_model_url(self):
"""Gets the external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501
If this is an external variant, a URL to the base model # noqa: E501
:return: The external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: int
"""
return self._external_base_model_url
@external_base_model_url.setter
def external_base_model_url(self, external_base_model_url):
"""Sets the external_base_model_url of this ModelInstanceUpdateRequest.
If this is an external variant, a URL to the base model # noqa: E501
:param external_base_model_url: The external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501
:type: int
"""
self._external_base_model_url = external_base_model_url
@property
def update_mask(self):
"""Gets the update_mask of this ModelInstanceUpdateRequest. # noqa: E501
Describes which fields to update # noqa: E501
:return: The update_mask of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
"""
return self._update_mask
@update_mask.setter
def update_mask(self, update_mask):
"""Sets the update_mask of this ModelInstanceUpdateRequest.
Describes which fields to update # noqa: E501
:param update_mask: The update_mask of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
"""
if update_mask is None:
raise ValueError("Invalid value for `update_mask`, must not be `None`") # noqa: E501
self._update_mask = update_mask
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelInstanceUpdateRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class ModelInstanceUpdateRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, overview=None, usage=None, license_name='Apache 2.0', fine_tunable=True, training_data=None, model_instance_type=None, base_model_instance=None, external_base_model_url=None, update_mask=None):
pass
@property
def overview(self):
'''Gets the overview of this ModelInstanceUpdateRequest. # noqa: E501
The overview of the model instance (markdown) # noqa: E501
:return: The overview of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@overview.setter
def overview(self):
'''Sets the overview of this ModelInstanceUpdateRequest.
The overview of the model instance (markdown) # noqa: E501
:param overview: The overview of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def usage(self):
'''Gets the usage of this ModelInstanceUpdateRequest. # noqa: E501
The description of how to use the model instance (markdown) # noqa: E501
:return: The usage of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@usage.setter
def usage(self):
'''Sets the usage of this ModelInstanceUpdateRequest.
The description of how to use the model instance (markdown) # noqa: E501
:param usage: The usage of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def license_name(self):
'''Gets the license_name of this ModelInstanceUpdateRequest. # noqa: E501
The license that should be associated with the model instance # noqa: E501
:return: The license_name of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@license_name.setter
def license_name(self):
'''Sets the license_name of this ModelInstanceUpdateRequest.
The license that should be associated with the model instance # noqa: E501
:param license_name: The license_name of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def fine_tunable(self):
'''Gets the fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501
Whether the model instance is fine tunable # noqa: E501
:return: The fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: bool
'''
pass
@fine_tunable.setter
def fine_tunable(self):
'''Sets the fine_tunable of this ModelInstanceUpdateRequest.
Whether the model instance is fine tunable # noqa: E501
:param fine_tunable: The fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501
:type: bool
'''
pass
@property
def training_data(self):
'''Gets the training_data of this ModelInstanceUpdateRequest. # noqa: E501
A list of training data (urls or names) # noqa: E501
:return: The training_data of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: list[str]
'''
pass
@training_data.setter
def training_data(self):
'''Sets the training_data of this ModelInstanceUpdateRequest.
A list of training data (urls or names) # noqa: E501
:param training_data: The training_data of this ModelInstanceUpdateRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def model_instance_type(self):
'''Gets the model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:return: The model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@model_instance_type.setter
def model_instance_type(self):
'''Sets the model_instance_type of this ModelInstanceUpdateRequest.
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:param model_instance_type: The model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def base_model_instance(self):
'''Gets the base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:return: The base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@base_model_instance.setter
def base_model_instance(self):
'''Sets the base_model_instance of this ModelInstanceUpdateRequest.
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:param base_model_instance: The base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def external_base_model_url(self):
'''Gets the external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501
If this is an external variant, a URL to the base model # noqa: E501
:return: The external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: int
'''
pass
@external_base_model_url.setter
def external_base_model_url(self):
'''Sets the external_base_model_url of this ModelInstanceUpdateRequest.
If this is an external variant, a URL to the base model # noqa: E501
:param external_base_model_url: The external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501
:type: int
'''
pass
@property
def update_mask(self):
'''Gets the update_mask of this ModelInstanceUpdateRequest. # noqa: E501
Describes which fields to update # noqa: E501
:return: The update_mask of this ModelInstanceUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@update_mask.setter
def update_mask(self):
'''Sets the update_mask of this ModelInstanceUpdateRequest.
Describes which fields to update # noqa: E501
:param update_mask: The update_mask of this ModelInstanceUpdateRequest. # noqa: E501
:type: str
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 43 | 24 | 11 | 2 | 5 | 4 | 2 | 0.72 | 1 | 4 | 0 | 0 | 24 | 10 | 24 | 24 | 326 | 75 | 149 | 60 | 106 | 108 | 95 | 42 | 70 | 9 | 1 | 2 | 40 |
141,019 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/model_new_instance_request.py
|
src.kaggle.models.model_new_instance_request.ModelNewInstanceRequest
|
class ModelNewInstanceRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'instance_slug': 'str',
'framework': 'str',
'overview': 'str',
'usage': 'str',
'license_name': 'str',
'fine_tunable': 'bool',
'training_data': 'list[str]',
'model_instance_type': 'str',
'base_model_instance': 'str',
'external_base_model_url': 'int',
'files': 'list[UploadFile]'
}
attribute_map = {
'instance_slug': 'instanceSlug',
'framework': 'framework',
'overview': 'overview',
'usage': 'usage',
'license_name': 'licenseName',
'fine_tunable': 'fineTunable',
'training_data': 'trainingData',
'model_instance_type': 'modelInstanceType',
'base_model_instance': 'baseModelInstance',
'external_base_model_url': 'externalBaseModelUrl',
'files': 'files'
}
def __init__(self, instance_slug=None, framework=None, overview=None, usage=None, license_name='Apache 2.0', fine_tunable=True, training_data=None, model_instance_type=None, base_model_instance=None, external_base_model_url=None, files=None): # noqa: E501
self._instance_slug = None
self._framework = None
self._overview = None
self._usage = None
self._license_name = None
self._fine_tunable = None
self._training_data = None
self._model_instance_type = None
self._base_model_instance = None
self._external_base_model_url = None
self._files = None
self.discriminator = None
self.instance_slug = instance_slug
self.framework = framework
if overview is not None:
self.overview = overview
if usage is not None:
self.usage = usage
self.license_name = license_name
if fine_tunable is not None:
self.fine_tunable = fine_tunable
if training_data is not None:
self.training_data = training_data
if model_instance_type is not None:
self.model_instance_type = model_instance_type
if base_model_instance is not None:
self.base_model_instance = base_model_instance
if external_base_model_url is not None:
self.external_base_model_url = external_base_model_url
if files is not None:
self.files = files
@property
def instance_slug(self):
"""Gets the instance_slug of this ModelNewInstanceRequest. # noqa: E501
The slug that the model instance should be created with # noqa: E501
:return: The instance_slug of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._instance_slug
@instance_slug.setter
def instance_slug(self, instance_slug):
"""Sets the instance_slug of this ModelNewInstanceRequest.
The slug that the model instance should be created with # noqa: E501
:param instance_slug: The instance_slug of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
if instance_slug is None:
raise ValueError("Invalid value for `instance_slug`, must not be `None`") # noqa: E501
self._instance_slug = instance_slug
@property
def framework(self):
"""Gets the framework of this ModelNewInstanceRequest. # noqa: E501
The framework of the model instance # noqa: E501
:return: The framework of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._framework
@framework.setter
def framework(self, framework):
"""Sets the framework of this ModelNewInstanceRequest.
The framework of the model instance # noqa: E501
:param framework: The framework of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
if framework is None:
raise ValueError("Invalid value for `framework`, must not be `None`") # noqa: E501
allowed_values = ["tensorFlow1", "tensorFlow2", "tfLite", "tfJs", "pyTorch", "jax", "flax", "pax", "maxText", "gemmaCpp", "tensorRtLlm", "ggml", "gguf", "coral", "scikitLearn", "mxnet", "onnx", "keras", "transformers", "triton", "api", "triton", "tensorRtLlm","other"] # noqa: E501
if framework not in allowed_values:
raise ValueError(
"Invalid value for `framework` ({0}), must be one of {1}" # noqa: E501
.format(framework, allowed_values)
)
self._framework = framework
@property
def overview(self):
"""Gets the overview of this ModelNewInstanceRequest. # noqa: E501
The overview of the model instance (markdown) # noqa: E501
:return: The overview of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._overview
@overview.setter
def overview(self, overview):
"""Sets the overview of this ModelNewInstanceRequest.
The overview of the model instance (markdown) # noqa: E501
:param overview: The overview of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
self._overview = overview
@property
def usage(self):
"""Gets the usage of this ModelNewInstanceRequest. # noqa: E501
The description of how to use the model instance (markdown) # noqa: E501
:return: The usage of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._usage
@usage.setter
def usage(self, usage):
"""Sets the usage of this ModelNewInstanceRequest.
The description of how to use the model instance (markdown) # noqa: E501
:param usage: The usage of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
self._usage = usage
@property
def license_name(self):
"""Gets the license_name of this ModelNewInstanceRequest. # noqa: E501
The license that should be associated with the model instance # noqa: E501
:return: The license_name of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._license_name
@license_name.setter
def license_name(self, license_name):
"""Sets the license_name of this ModelNewInstanceRequest.
The license that should be associated with the model instance # noqa: E501
:param license_name: The license_name of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
if license_name is None:
raise ValueError("Invalid value for `license_name`, must not be `None`") # noqa: E501
allowed_values = ["CC0 1.0", "CC BY-NC-SA 4.0", "Unknown", "CC BY-SA 4.0", "GPL 2", "CC BY-SA 3.0", "Other", "Other (specified in description)", "CC BY 4.0", "Attribution 4.0 International (CC BY 4.0)", "CC BY-NC 4.0", "Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)", "PDDL", "ODC Public Domain Dedication and Licence (PDDL)", "CC BY 3.0", "Attribution 3.0 Unported (CC BY 3.0)", "CC BY 3.0 IGO", "Attribution 3.0 IGO (CC BY 3.0 IGO)", "CC BY-NC-SA 3.0 IGO", "Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO)", "CDLA Permissive 1.0", "Community Data License Agreement - Permissive - Version 1.0", "CDLA Sharing 1.0", "Community Data License Agreement - Sharing - Version 1.0", "CC BY-ND 4.0", "Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)", "CC BY-NC-ND 4.0", "Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)", "ODC-BY 1.0", "ODC Attribution License (ODC-By)", "LGPL 3.0", "GNU Lesser General Public License 3.0", "AGPL 3.0", "GNU Affero General Public License 3.0", "FDL 1.3", "GNU Free Documentation License 1.3", "apache-2.0", "Apache 2.0", "mit", "MIT", "bsd-3-clause", "BSD-3-Clause", "Llama 2", "Llama 2 Community License", "Gemma", "gpl-3", "GPL 3", "RAIL-M", "AI Pubs Open RAIL-M License", "AIPubs Research-Use RAIL-M", "AI Pubs Research-Use RAIL-M License", "BigScience OpenRAIL-M", "BigScience Open RAIL-M License", "RAIL", "RAIL (specified in description)", "Llama 3", "Llama 3 Community License"] # noqa: E501
if license_name not in allowed_values:
raise ValueError(
"Invalid value for `license_name` ({0}), must be one of {1}" # noqa: E501
.format(license_name, allowed_values)
)
self._license_name = license_name
@property
def fine_tunable(self):
"""Gets the fine_tunable of this ModelNewInstanceRequest. # noqa: E501
Whether the model instance is fine tunable # noqa: E501
:return: The fine_tunable of this ModelNewInstanceRequest. # noqa: E501
:rtype: bool
"""
return self._fine_tunable
@fine_tunable.setter
def fine_tunable(self, fine_tunable):
"""Sets the fine_tunable of this ModelNewInstanceRequest.
Whether the model instance is fine tunable # noqa: E501
:param fine_tunable: The fine_tunable of this ModelNewInstanceRequest. # noqa: E501
:type: bool
"""
self._fine_tunable = fine_tunable
@property
def training_data(self):
"""Gets the training_data of this ModelNewInstanceRequest. # noqa: E501
A list of training data (urls or names) # noqa: E501
:return: The training_data of this ModelNewInstanceRequest. # noqa: E501
:rtype: list[str]
"""
return self._training_data
@training_data.setter
def training_data(self, training_data):
"""Sets the training_data of this ModelNewInstanceRequest.
A list of training data (urls or names) # noqa: E501
:param training_data: The training_data of this ModelNewInstanceRequest. # noqa: E501
:type: list[str]
"""
self._training_data = training_data
@property
def model_instance_type(self):
"""Gets the model_instance_type of this ModelNewInstanceRequest. # noqa: E501
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:return: The model_instance_type of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._model_instance_type
@model_instance_type.setter
def model_instance_type(self, model_instance_type):
"""Sets the model_instance_type of this ModelNewInstanceRequest.
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:param model_instance_type: The model_instance_type of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
allowed_values = ["Unspecified", "BaseModel", "KaggleVariant", "ExternalVariant"] # noqa: E501
if model_instance_type not in allowed_values:
raise ValueError(
"Invalid value for `model_instance_type` ({0}), must be one of {1}" # noqa: E501
.format(model_instance_type, allowed_values)
)
self._model_instance_type = model_instance_type
@property
def base_model_instance(self):
"""Gets the base_model_instance of this ModelNewInstanceRequest. # noqa: E501
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:return: The base_model_instance of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
"""
return self._base_model_instance
@base_model_instance.setter
def base_model_instance(self, base_model_instance):
"""Sets the base_model_instance of this ModelNewInstanceRequest.
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:param base_model_instance: The base_model_instance of this ModelNewInstanceRequest. # noqa: E501
:type: str
"""
self._base_model_instance = base_model_instance
@property
def external_base_model_url(self):
"""Gets the external_base_model_url of this ModelNewInstanceRequest. # noqa: E501
If this is an external variant, a URL to the base model # noqa: E501
:return: The external_base_model_url of this ModelNewInstanceRequest. # noqa: E501
:rtype: int
"""
return self._external_base_model_url
@external_base_model_url.setter
def external_base_model_url(self, external_base_model_url):
"""Sets the external_base_model_url of this ModelNewInstanceRequest.
If this is an external variant, a URL to the base model # noqa: E501
:param external_base_model_url: The external_base_model_url of this ModelNewInstanceRequest. # noqa: E501
:type: int
"""
self._external_base_model_url = external_base_model_url
@property
def files(self):
"""Gets the files of this ModelNewInstanceRequest. # noqa: E501
A list of files that should be associated with the model instance version # noqa: E501
:return: The files of this ModelNewInstanceRequest. # noqa: E501
:rtype: list[UploadFile]
"""
return self._files
@files.setter
def files(self, files):
"""Sets the files of this ModelNewInstanceRequest.
A list of files that should be associated with the model instance version # noqa: E501
:param files: The files of this ModelNewInstanceRequest. # noqa: E501
:type: list[UploadFile]
"""
self._files = files
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelNewInstanceRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class ModelNewInstanceRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, instance_slug=None, framework=None, overview=None, usage=None, license_name='Apache 2.0', fine_tunable=True, training_data=None, model_instance_type=None, base_model_instance=None, external_base_model_url=None, files=None):
pass
@property
def instance_slug(self):
'''Gets the instance_slug of this ModelNewInstanceRequest. # noqa: E501
The slug that the model instance should be created with # noqa: E501
:return: The instance_slug of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@instance_slug.setter
def instance_slug(self):
'''Sets the instance_slug of this ModelNewInstanceRequest.
The slug that the model instance should be created with # noqa: E501
:param instance_slug: The instance_slug of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def framework(self):
'''Gets the framework of this ModelNewInstanceRequest. # noqa: E501
The framework of the model instance # noqa: E501
:return: The framework of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@framework.setter
def framework(self):
'''Sets the framework of this ModelNewInstanceRequest.
The framework of the model instance # noqa: E501
:param framework: The framework of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def overview(self):
'''Gets the overview of this ModelNewInstanceRequest. # noqa: E501
The overview of the model instance (markdown) # noqa: E501
:return: The overview of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@overview.setter
def overview(self):
'''Sets the overview of this ModelNewInstanceRequest.
The overview of the model instance (markdown) # noqa: E501
:param overview: The overview of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def usage(self):
'''Gets the usage of this ModelNewInstanceRequest. # noqa: E501
The description of how to use the model instance (markdown) # noqa: E501
:return: The usage of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@usage.setter
def usage(self):
'''Sets the usage of this ModelNewInstanceRequest.
The description of how to use the model instance (markdown) # noqa: E501
:param usage: The usage of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def license_name(self):
'''Gets the license_name of this ModelNewInstanceRequest. # noqa: E501
The license that should be associated with the model instance # noqa: E501
:return: The license_name of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@license_name.setter
def license_name(self):
'''Sets the license_name of this ModelNewInstanceRequest.
The license that should be associated with the model instance # noqa: E501
:param license_name: The license_name of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def fine_tunable(self):
'''Gets the fine_tunable of this ModelNewInstanceRequest. # noqa: E501
Whether the model instance is fine tunable # noqa: E501
:return: The fine_tunable of this ModelNewInstanceRequest. # noqa: E501
:rtype: bool
'''
pass
@fine_tunable.setter
def fine_tunable(self):
'''Sets the fine_tunable of this ModelNewInstanceRequest.
Whether the model instance is fine tunable # noqa: E501
:param fine_tunable: The fine_tunable of this ModelNewInstanceRequest. # noqa: E501
:type: bool
'''
pass
@property
def training_data(self):
'''Gets the training_data of this ModelNewInstanceRequest. # noqa: E501
A list of training data (urls or names) # noqa: E501
:return: The training_data of this ModelNewInstanceRequest. # noqa: E501
:rtype: list[str]
'''
pass
@training_data.setter
def training_data(self):
'''Sets the training_data of this ModelNewInstanceRequest.
A list of training data (urls or names) # noqa: E501
:param training_data: The training_data of this ModelNewInstanceRequest. # noqa: E501
:type: list[str]
'''
pass
@property
def model_instance_type(self):
'''Gets the model_instance_type of this ModelNewInstanceRequest. # noqa: E501
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:return: The model_instance_type of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@model_instance_type.setter
def model_instance_type(self):
'''Sets the model_instance_type of this ModelNewInstanceRequest.
Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501
:param model_instance_type: The model_instance_type of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def base_model_instance(self):
'''Gets the base_model_instance of this ModelNewInstanceRequest. # noqa: E501
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:return: The base_model_instance of this ModelNewInstanceRequest. # noqa: E501
:rtype: str
'''
pass
@base_model_instance.setter
def base_model_instance(self):
'''Sets the base_model_instance of this ModelNewInstanceRequest.
If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501
:param base_model_instance: The base_model_instance of this ModelNewInstanceRequest. # noqa: E501
:type: str
'''
pass
@property
def external_base_model_url(self):
'''Gets the external_base_model_url of this ModelNewInstanceRequest. # noqa: E501
If this is an external variant, a URL to the base model # noqa: E501
:return: The external_base_model_url of this ModelNewInstanceRequest. # noqa: E501
:rtype: int
'''
pass
@external_base_model_url.setter
def external_base_model_url(self):
'''Sets the external_base_model_url of this ModelNewInstanceRequest.
If this is an external variant, a URL to the base model # noqa: E501
:param external_base_model_url: The external_base_model_url of this ModelNewInstanceRequest. # noqa: E501
:type: int
'''
pass
@property
def files(self):
'''Gets the files of this ModelNewInstanceRequest. # noqa: E501
A list of files that should be associated with the model instance version # noqa: E501
:return: The files of this ModelNewInstanceRequest. # noqa: E501
:rtype: list[UploadFile]
'''
pass
@files.setter
def files(self):
'''Sets the files of this ModelNewInstanceRequest.
A list of files that should be associated with the model instance version # noqa: E501
:param files: The files of this ModelNewInstanceRequest. # noqa: E501
:type: list[UploadFile]
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 51 | 28 | 11 | 2 | 5 | 4 | 2 | 0.74 | 1 | 4 | 0 | 0 | 28 | 12 | 28 | 28 | 390 | 89 | 179 | 71 | 128 | 132 | 114 | 49 | 85 | 9 | 1 | 2 | 47 |
141,020 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/model_new_request.py
|
src.kaggle.models.model_new_request.ModelNewRequest
|
class ModelNewRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'owner_slug': 'str',
'slug': 'str',
'title': 'str',
'subtitle': 'str',
'is_private': 'bool',
'description': 'str',
'publish_time': 'date',
'provenance_sources': 'str'
}
attribute_map = {
'owner_slug': 'ownerSlug',
'slug': 'slug',
'title': 'title',
'subtitle': 'subtitle',
'is_private': 'isPrivate',
'description': 'description',
'publish_time': 'publishTime',
'provenance_sources': 'provenanceSources'
}
def __init__(self, owner_slug=None, slug=None, title=None, subtitle=None, is_private=True, description='', publish_time=None, provenance_sources=''): # noqa: E501
self._owner_slug = None
self._slug = None
self._title = None
self._subtitle = None
self._is_private = None
self._description = None
self._publish_time = None
self._provenance_sources = None
self.discriminator = None
self.owner_slug = owner_slug
self.slug = slug
self.title = title
if subtitle is not None:
self.subtitle = subtitle
self.is_private = is_private
if description is not None:
self.description = description
if publish_time is not None:
self.publish_time = publish_time
if provenance_sources is not None:
self.provenance_sources = provenance_sources
@property
def owner_slug(self):
"""Gets the owner_slug of this ModelNewRequest. # noqa: E501
The owner's slug # noqa: E501
:return: The owner_slug of this ModelNewRequest. # noqa: E501
:rtype: str
"""
return self._owner_slug
@owner_slug.setter
def owner_slug(self, owner_slug):
"""Sets the owner_slug of this ModelNewRequest.
The owner's slug # noqa: E501
:param owner_slug: The owner_slug of this ModelNewRequest. # noqa: E501
:type: str
"""
if owner_slug is None:
raise ValueError("Invalid value for `owner_slug`, must not be `None`") # noqa: E501
self._owner_slug = owner_slug
@property
def slug(self):
"""Gets the slug of this ModelNewRequest. # noqa: E501
The slug that the model should be created with # noqa: E501
:return: The slug of this ModelNewRequest. # noqa: E501
:rtype: str
"""
return self._slug
@slug.setter
def slug(self, slug):
"""Sets the slug of this ModelNewRequest.
The slug that the model should be created with # noqa: E501
:param slug: The slug of this ModelNewRequest. # noqa: E501
:type: str
"""
if slug is None:
raise ValueError("Invalid value for `slug`, must not be `None`") # noqa: E501
self._slug = slug
@property
def title(self):
"""Gets the title of this ModelNewRequest. # noqa: E501
The title of the new model # noqa: E501
:return: The title of this ModelNewRequest. # noqa: E501
:rtype: str
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this ModelNewRequest.
The title of the new model # noqa: E501
:param title: The title of this ModelNewRequest. # noqa: E501
:type: str
"""
if title is None:
raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501
self._title = title
@property
def subtitle(self):
"""Gets the subtitle of this ModelNewRequest. # noqa: E501
The subtitle of the new model # noqa: E501
:return: The subtitle of this ModelNewRequest. # noqa: E501
:rtype: str
"""
return self._subtitle
@subtitle.setter
def subtitle(self, subtitle):
"""Sets the subtitle of this ModelNewRequest.
The subtitle of the new model # noqa: E501
:param subtitle: The subtitle of this ModelNewRequest. # noqa: E501
:type: str
"""
self._subtitle = subtitle
@property
def is_private(self):
"""Gets the is_private of this ModelNewRequest. # noqa: E501
Whether or not the model should be private # noqa: E501
:return: The is_private of this ModelNewRequest. # noqa: E501
:rtype: bool
"""
return self._is_private
@is_private.setter
def is_private(self, is_private):
"""Sets the is_private of this ModelNewRequest.
Whether or not the model should be private # noqa: E501
:param is_private: The is_private of this ModelNewRequest. # noqa: E501
:type: bool
"""
if is_private is None:
raise ValueError("Invalid value for `is_private`, must not be `None`") # noqa: E501
self._is_private = is_private
@property
def description(self):
"""Gets the description of this ModelNewRequest. # noqa: E501
The description to be set on the model # noqa: E501
:return: The description of this ModelNewRequest. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this ModelNewRequest.
The description to be set on the model # noqa: E501
:param description: The description of this ModelNewRequest. # noqa: E501
:type: str
"""
self._description = description
@property
def publish_time(self):
"""Gets the publish_time of this ModelNewRequest. # noqa: E501
When the model was initially published # noqa: E501
:return: The publish_time of this ModelNewRequest. # noqa: E501
:rtype: date
"""
return self._publish_time
@publish_time.setter
def publish_time(self, publish_time):
"""Sets the publish_time of this ModelNewRequest.
When the model was initially published # noqa: E501
:param publish_time: The publish_time of this ModelNewRequest. # noqa: E501
:type: date
"""
self._publish_time = publish_time
@property
def provenance_sources(self):
"""Gets the provenance_sources of this ModelNewRequest. # noqa: E501
The provenance sources to be set on the model # noqa: E501
:return: The provenance_sources of this ModelNewRequest. # noqa: E501
:rtype: str
"""
return self._provenance_sources
@provenance_sources.setter
def provenance_sources(self, provenance_sources):
"""Sets the provenance_sources of this ModelNewRequest.
The provenance sources to be set on the model # noqa: E501
:param provenance_sources: The provenance_sources of this ModelNewRequest. # noqa: E501
:type: str
"""
self._provenance_sources = provenance_sources
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelNewRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class ModelNewRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, owner_slug=None, slug=None, title=None, subtitle=None, is_private=True, description='', publish_time=None, provenance_sources=''):
pass
@property
def owner_slug(self):
'''Gets the owner_slug of this ModelNewRequest. # noqa: E501
The owner's slug # noqa: E501
:return: The owner_slug of this ModelNewRequest. # noqa: E501
:rtype: str
'''
pass
@owner_slug.setter
def owner_slug(self):
'''Sets the owner_slug of this ModelNewRequest.
The owner's slug # noqa: E501
:param owner_slug: The owner_slug of this ModelNewRequest. # noqa: E501
:type: str
'''
pass
@property
def slug(self):
'''Gets the slug of this ModelNewRequest. # noqa: E501
The slug that the model should be created with # noqa: E501
:return: The slug of this ModelNewRequest. # noqa: E501
:rtype: str
'''
pass
@slug.setter
def slug(self):
'''Sets the slug of this ModelNewRequest.
The slug that the model should be created with # noqa: E501
:param slug: The slug of this ModelNewRequest. # noqa: E501
:type: str
'''
pass
@property
def title(self):
'''Gets the title of this ModelNewRequest. # noqa: E501
The title of the new model # noqa: E501
:return: The title of this ModelNewRequest. # noqa: E501
:rtype: str
'''
pass
@title.setter
def title(self):
'''Sets the title of this ModelNewRequest.
The title of the new model # noqa: E501
:param title: The title of this ModelNewRequest. # noqa: E501
:type: str
'''
pass
@property
def subtitle(self):
'''Gets the subtitle of this ModelNewRequest. # noqa: E501
The subtitle of the new model # noqa: E501
:return: The subtitle of this ModelNewRequest. # noqa: E501
:rtype: str
'''
pass
@subtitle.setter
def subtitle(self):
'''Sets the subtitle of this ModelNewRequest.
The subtitle of the new model # noqa: E501
:param subtitle: The subtitle of this ModelNewRequest. # noqa: E501
:type: str
'''
pass
@property
def is_private(self):
'''Gets the is_private of this ModelNewRequest. # noqa: E501
Whether or not the model should be private # noqa: E501
:return: The is_private of this ModelNewRequest. # noqa: E501
:rtype: bool
'''
pass
@is_private.setter
def is_private(self):
'''Sets the is_private of this ModelNewRequest.
Whether or not the model should be private # noqa: E501
:param is_private: The is_private of this ModelNewRequest. # noqa: E501
:type: bool
'''
pass
@property
def description(self):
'''Gets the description of this ModelNewRequest. # noqa: E501
The description to be set on the model # noqa: E501
:return: The description of this ModelNewRequest. # noqa: E501
:rtype: str
'''
pass
@description.setter
def description(self):
'''Sets the description of this ModelNewRequest.
The description to be set on the model # noqa: E501
:param description: The description of this ModelNewRequest. # noqa: E501
:type: str
'''
pass
@property
def publish_time(self):
'''Gets the publish_time of this ModelNewRequest. # noqa: E501
When the model was initially published # noqa: E501
:return: The publish_time of this ModelNewRequest. # noqa: E501
:rtype: date
'''
pass
@publish_time.setter
def publish_time(self):
'''Sets the publish_time of this ModelNewRequest.
When the model was initially published # noqa: E501
:param publish_time: The publish_time of this ModelNewRequest. # noqa: E501
:type: date
'''
pass
@property
def provenance_sources(self):
'''Gets the provenance_sources of this ModelNewRequest. # noqa: E501
The provenance sources to be set on the model # noqa: E501
:return: The provenance_sources of this ModelNewRequest. # noqa: E501
:rtype: str
'''
pass
@provenance_sources.setter
def provenance_sources(self):
'''Sets the provenance_sources of this ModelNewRequest.
The provenance sources to be set on the model # noqa: E501
:param provenance_sources: The provenance_sources of this ModelNewRequest. # noqa: E501
:type: str
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 39 | 22 | 10 | 2 | 4 | 4 | 2 | 0.75 | 1 | 4 | 0 | 0 | 22 | 9 | 22 | 22 | 289 | 68 | 129 | 53 | 90 | 97 | 85 | 37 | 62 | 5 | 1 | 2 | 35 |
141,021 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/model_update_request.py
|
src.kaggle.models.model_update_request.ModelUpdateRequest
|
class ModelUpdateRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'title': 'str',
'subtitle': 'str',
'is_private': 'bool',
'description': 'str',
'publish_time': 'date',
'provenance_sources': 'str',
'update_mask': 'str'
}
attribute_map = {
'title': 'title',
'subtitle': 'subtitle',
'is_private': 'isPrivate',
'description': 'description',
'publish_time': 'publishTime',
'provenance_sources': 'provenanceSources',
'update_mask': 'updateMask'
}
def __init__(self, title=None, subtitle=None, is_private=True, description='', publish_time=None, provenance_sources='', update_mask=None): # noqa: E501
self._title = None
self._subtitle = None
self._is_private = None
self._description = None
self._publish_time = None
self._provenance_sources = None
self._update_mask = None
self.discriminator = None
if title is not None:
self.title = title
if subtitle is not None:
self.subtitle = subtitle
if is_private is not None:
self.is_private = is_private
if description is not None:
self.description = description
if publish_time is not None:
self.publish_time = publish_time
if provenance_sources is not None:
self.provenance_sources = provenance_sources
if update_mask is not None:
self.update_mask = update_mask
@property
def title(self):
"""Gets the title of this ModelUpdateRequest. # noqa: E501
The title of the new model # noqa: E501
:return: The title of this ModelUpdateRequest. # noqa: E501
:rtype: str
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this ModelUpdateRequest.
The title of the new model # noqa: E501
:param title: The title of this ModelUpdateRequest. # noqa: E501
:type: str
"""
self._title = title
@property
def subtitle(self):
"""Gets the subtitle of this ModelUpdateRequest. # noqa: E501
The subtitle of the new model # noqa: E501
:return: The subtitle of this ModelUpdateRequest. # noqa: E501
:rtype: str
"""
return self._subtitle
@subtitle.setter
def subtitle(self, subtitle):
"""Sets the subtitle of this ModelUpdateRequest.
The subtitle of the new model # noqa: E501
:param subtitle: The subtitle of this ModelUpdateRequest. # noqa: E501
:type: str
"""
self._subtitle = subtitle
@property
def is_private(self):
"""Gets the is_private of this ModelUpdateRequest. # noqa: E501
Whether or not the model should be private # noqa: E501
:return: The is_private of this ModelUpdateRequest. # noqa: E501
:rtype: bool
"""
return self._is_private
@is_private.setter
def is_private(self, is_private):
"""Sets the is_private of this ModelUpdateRequest.
Whether or not the model should be private # noqa: E501
:param is_private: The is_private of this ModelUpdateRequest. # noqa: E501
:type: bool
"""
self._is_private = is_private
@property
def description(self):
"""Gets the description of this ModelUpdateRequest. # noqa: E501
The description to be set on the model # noqa: E501
:return: The description of this ModelUpdateRequest. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this ModelUpdateRequest.
The description to be set on the model # noqa: E501
:param description: The description of this ModelUpdateRequest. # noqa: E501
:type: str
"""
self._description = description
@property
def publish_time(self):
"""Gets the publish_time of this ModelUpdateRequest. # noqa: E501
When the model was initially published # noqa: E501
:return: The publish_time of this ModelUpdateRequest. # noqa: E501
:rtype: date
"""
return self._publish_time
@publish_time.setter
def publish_time(self, publish_time):
"""Sets the publish_time of this ModelUpdateRequest.
When the model was initially published # noqa: E501
:param publish_time: The publish_time of this ModelUpdateRequest. # noqa: E501
:type: date
"""
self._publish_time = publish_time
@property
def provenance_sources(self):
"""Gets the provenance_sources of this ModelUpdateRequest. # noqa: E501
The provenance sources to be set on the model # noqa: E501
:return: The provenance_sources of this ModelUpdateRequest. # noqa: E501
:rtype: str
"""
return self._provenance_sources
@provenance_sources.setter
def provenance_sources(self, provenance_sources):
"""Sets the provenance_sources of this ModelUpdateRequest.
The provenance sources to be set on the model # noqa: E501
:param provenance_sources: The provenance_sources of this ModelUpdateRequest. # noqa: E501
:type: str
"""
self._provenance_sources = provenance_sources
@property
def update_mask(self):
"""Gets the update_mask of this ModelUpdateRequest. # noqa: E501
Describes which fields to update # noqa: E501
:return: The update_mask of this ModelUpdateRequest. # noqa: E501
:rtype: str
"""
return self._update_mask
@update_mask.setter
def update_mask(self, update_mask):
"""Sets the update_mask of this ModelUpdateRequest.
Describes which fields to update # noqa: E501
:param update_mask: The update_mask of this ModelUpdateRequest. # noqa: E501
:type: str
"""
self._update_mask = update_mask
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModelUpdateRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class ModelUpdateRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, title=None, subtitle=None, is_private=True, description='', publish_time=None, provenance_sources='', update_mask=None):
pass
@property
def title(self):
'''Gets the title of this ModelUpdateRequest. # noqa: E501
The title of the new model # noqa: E501
:return: The title of this ModelUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@title.setter
def title(self):
'''Sets the title of this ModelUpdateRequest.
The title of the new model # noqa: E501
:param title: The title of this ModelUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def subtitle(self):
'''Gets the subtitle of this ModelUpdateRequest. # noqa: E501
The subtitle of the new model # noqa: E501
:return: The subtitle of this ModelUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@subtitle.setter
def subtitle(self):
'''Sets the subtitle of this ModelUpdateRequest.
The subtitle of the new model # noqa: E501
:param subtitle: The subtitle of this ModelUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def is_private(self):
'''Gets the is_private of this ModelUpdateRequest. # noqa: E501
Whether or not the model should be private # noqa: E501
:return: The is_private of this ModelUpdateRequest. # noqa: E501
:rtype: bool
'''
pass
@is_private.setter
def is_private(self):
'''Sets the is_private of this ModelUpdateRequest.
Whether or not the model should be private # noqa: E501
:param is_private: The is_private of this ModelUpdateRequest. # noqa: E501
:type: bool
'''
pass
@property
def description(self):
'''Gets the description of this ModelUpdateRequest. # noqa: E501
The description to be set on the model # noqa: E501
:return: The description of this ModelUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@description.setter
def description(self):
'''Sets the description of this ModelUpdateRequest.
The description to be set on the model # noqa: E501
:param description: The description of this ModelUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def publish_time(self):
'''Gets the publish_time of this ModelUpdateRequest. # noqa: E501
When the model was initially published # noqa: E501
:return: The publish_time of this ModelUpdateRequest. # noqa: E501
:rtype: date
'''
pass
@publish_time.setter
def publish_time(self):
'''Sets the publish_time of this ModelUpdateRequest.
When the model was initially published # noqa: E501
:param publish_time: The publish_time of this ModelUpdateRequest. # noqa: E501
:type: date
'''
pass
@property
def provenance_sources(self):
'''Gets the provenance_sources of this ModelUpdateRequest. # noqa: E501
The provenance sources to be set on the model # noqa: E501
:return: The provenance_sources of this ModelUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@provenance_sources.setter
def provenance_sources(self):
'''Sets the provenance_sources of this ModelUpdateRequest.
The provenance sources to be set on the model # noqa: E501
:param provenance_sources: The provenance_sources of this ModelUpdateRequest. # noqa: E501
:type: str
'''
pass
@property
def update_mask(self):
'''Gets the update_mask of this ModelUpdateRequest. # noqa: E501
Describes which fields to update # noqa: E501
:return: The update_mask of this ModelUpdateRequest. # noqa: E501
:rtype: str
'''
pass
@update_mask.setter
def update_mask(self):
'''Sets the update_mask of this ModelUpdateRequest.
Describes which fields to update # noqa: E501
:param update_mask: The update_mask of this ModelUpdateRequest. # noqa: E501
:type: str
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 35 | 20 | 10 | 2 | 4 | 4 | 2 | 0.73 | 1 | 3 | 0 | 0 | 20 | 8 | 20 | 20 | 257 | 61 | 114 | 48 | 79 | 83 | 74 | 34 | 53 | 8 | 1 | 2 | 32 |
141,022 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/start_blob_upload_request.py
|
src.kaggle.models.start_blob_upload_request.StartBlobUploadRequest
|
class StartBlobUploadRequest(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {
'type': 'object',
'name': 'str',
'content_length': 'int',
'content_type': 'str',
'last_modified_epoch_seconds': 'int'
}
attribute_map = {
'type': 'type',
'name': 'name',
'content_length': 'contentLength',
'content_type': 'contentType',
'last_modified_epoch_seconds': 'lastModifiedEpochSeconds'
}
def __init__(self,
type=None,
name=None,
content_length=None,
content_type=None,
last_modified_epoch_seconds=None): # noqa: E501
"""StartBlobUploadRequest - a model defined in Swagger""" # noqa: E501
self._type = None
self._name = None
self._content_length = None
self._content_type = None
self._last_modified_epoch_seconds = None
self.discriminator = None
if type is not None:
self.type = type
self.name = name
self.content_length = content_length
if content_type is not None:
self.content_type = content_type
if last_modified_epoch_seconds is not None:
self.last_modified_epoch_seconds = last_modified_epoch_seconds
@property
def type(self):
"""Gets the type of this StartBlobUploadRequest. # noqa: E501
The type of the blob (one of \"dataset\", \"model\", \"inbox\") # noqa: E501
:return: The type of this StartBlobUploadRequest. # noqa: E501
:rtype: object
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this StartBlobUploadRequest.
The type of the blob (one of \"dataset\", \"model\", \"inbox\") # noqa: E501
:param type: The type of this StartBlobUploadRequest. # noqa: E501
:type: object
"""
self._type = type
@property
def name(self):
"""Gets the name of this StartBlobUploadRequest. # noqa: E501
Name of the file # noqa: E501
:return: The name of this StartBlobUploadRequest. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this StartBlobUploadRequest.
Name of the file # noqa: E501
:param name: The name of this StartBlobUploadRequest. # noqa: E501
:type: str
"""
if name is None:
raise ValueError(
"Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def content_length(self):
"""Gets the content_length of this StartBlobUploadRequest. # noqa: E501
Content length of the file in bytes # noqa: E501
:return: The content_length of this StartBlobUploadRequest. # noqa: E501
:rtype: int
"""
return self._content_length
@content_length.setter
def content_length(self, content_length):
"""Sets the content_length of this StartBlobUploadRequest.
Content length of the file in bytes # noqa: E501
:param content_length: The content_length of this StartBlobUploadRequest. # noqa: E501
:type: int
"""
if content_length is None:
raise ValueError("Invalid value for `content_length`, must not be `None`"
) # noqa: E501
self._content_length = content_length
@property
def content_type(self):
"""Gets the content_type of this StartBlobUploadRequest. # noqa: E501
Content/MIME type (e.g. \"text/plain\") of the file # noqa: E501
:return: The content_type of this StartBlobUploadRequest. # noqa: E501
:rtype: str
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this StartBlobUploadRequest.
Content/MIME type (e.g. \"text/plain\") of the file # noqa: E501
:param content_type: The content_type of this StartBlobUploadRequest. # noqa: E501
:type: str
"""
self._content_type = content_type
@property
def last_modified_epoch_seconds(self):
"""Gets the last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501
Last modified date of file in seconds since epoch in UTC # noqa: E501
:return: The last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501
:rtype: int
"""
return self._last_modified_epoch_seconds
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self, last_modified_epoch_seconds):
"""Sets the last_modified_epoch_seconds of this StartBlobUploadRequest.
Last modified date of file in seconds since epoch in UTC # noqa: E501
:param last_modified_epoch_seconds: The last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501
:type: int
"""
self._last_modified_epoch_seconds = last_modified_epoch_seconds
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StartBlobUploadRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class StartBlobUploadRequest(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self,
type=None,
name=None,
content_length=None,
content_type=None,
last_modified_epoch_seconds=None):
'''StartBlobUploadRequest - a model defined in Swagger'''
pass
@property
def type(self):
'''Gets the type of this StartBlobUploadRequest. # noqa: E501
The type of the blob (one of "dataset", "model", "inbox") # noqa: E501
:return: The type of this StartBlobUploadRequest. # noqa: E501
:rtype: object
'''
pass
@type.setter
def type(self):
'''Sets the type of this StartBlobUploadRequest.
The type of the blob (one of "dataset", "model", "inbox") # noqa: E501
:param type: The type of this StartBlobUploadRequest. # noqa: E501
:type: object
'''
pass
@property
def name(self):
'''Gets the name of this StartBlobUploadRequest. # noqa: E501
Name of the file # noqa: E501
:return: The name of this StartBlobUploadRequest. # noqa: E501
:rtype: str
'''
pass
@name.setter
def name(self):
'''Sets the name of this StartBlobUploadRequest.
Name of the file # noqa: E501
:param name: The name of this StartBlobUploadRequest. # noqa: E501
:type: str
'''
pass
@property
def content_length(self):
'''Gets the content_length of this StartBlobUploadRequest. # noqa: E501
Content length of the file in bytes # noqa: E501
:return: The content_length of this StartBlobUploadRequest. # noqa: E501
:rtype: int
'''
pass
@content_length.setter
def content_length(self):
'''Sets the content_length of this StartBlobUploadRequest.
Content length of the file in bytes # noqa: E501
:param content_length: The content_length of this StartBlobUploadRequest. # noqa: E501
:type: int
'''
pass
@property
def content_type(self):
'''Gets the content_type of this StartBlobUploadRequest. # noqa: E501
Content/MIME type (e.g. "text/plain") of the file # noqa: E501
:return: The content_type of this StartBlobUploadRequest. # noqa: E501
:rtype: str
'''
pass
@content_type.setter
def content_type(self):
'''Sets the content_type of this StartBlobUploadRequest.
Content/MIME type (e.g. "text/plain") of the file # noqa: E501
:param content_type: The content_type of this StartBlobUploadRequest. # noqa: E501
:type: str
'''
pass
@property
def last_modified_epoch_seconds(self):
'''Gets the last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501
Last modified date of file in seconds since epoch in UTC # noqa: E501
:return: The last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501
:rtype: int
'''
pass
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self):
'''Sets the last_modified_epoch_seconds of this StartBlobUploadRequest.
Last modified date of file in seconds since epoch in UTC # noqa: E501
:param last_modified_epoch_seconds: The last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501
:type: int
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 27 | 17 | 10 | 2 | 5 | 4 | 2 | 0.67 | 1 | 4 | 0 | 0 | 16 | 6 | 16 | 16 | 208 | 47 | 98 | 43 | 66 | 66 | 62 | 28 | 45 | 5 | 1 | 2 | 26 |
141,023 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/start_blob_upload_response.py
|
src.kaggle.models.start_blob_upload_response.StartBlobUploadResponse
|
class StartBlobUploadResponse(object):
"""
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
project_types = {'token': 'str', 'create_url': 'str'}
attribute_map = {'token': 'token', 'create_url': 'createUrl'}
def __init__(self, token=None, create_url=None): # noqa: E501
"""StartBlobUploadResponse - a model defined in Swagger""" # noqa: E501
self._token = None
self._create_url = None
self.discriminator = None
self.token = token
self.create_url = create_url
@property
def token(self):
"""Gets the token of this StartBlobUploadResponse. # noqa: E501
Opaque string token used to reference the new blob/file. # noqa: E501
:return: The token of this StartBlobUploadResponse. # noqa: E501
:rtype: str
"""
return self._token
@token.setter
def token(self, token):
"""Sets the token of this StartBlobUploadResponse.
Opaque string token used to reference the new blob/file. # noqa: E501
:param token: The token of this StartBlobUploadResponse. # noqa: E501
:type: str
"""
if token is None:
raise ValueError(
"Invalid value for `token`, must not be `None`") # noqa: E501
self._token = token
@property
def create_url(self):
"""Gets the create_url of this StartBlobUploadResponse. # noqa: E501
URL to use to start the upload. # noqa: E501
:return: The create_url of this StartBlobUploadResponse. # noqa: E501
:rtype: str
"""
return self._create_url
@create_url.setter
def create_url(self, create_url):
"""Sets the create_url of this StartBlobUploadResponse.
URL to use to start the upload. # noqa: E501
:param create_url: The create_url of this StartBlobUploadResponse. # noqa: E501
:type: str
"""
if create_url is None:
raise ValueError(
"Invalid value for `create_url`, must not be `None`") # noqa: E501
self._create_url = create_url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.project_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StartBlobUploadResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class StartBlobUploadResponse(object):
'''
Attributes:
project_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, token=None, create_url=None):
'''StartBlobUploadResponse - a model defined in Swagger'''
pass
@property
def token(self):
'''Gets the token of this StartBlobUploadResponse. # noqa: E501
Opaque string token used to reference the new blob/file. # noqa: E501
:return: The token of this StartBlobUploadResponse. # noqa: E501
:rtype: str
'''
pass
@token.setter
def token(self):
'''Sets the token of this StartBlobUploadResponse.
Opaque string token used to reference the new blob/file. # noqa: E501
:param token: The token of this StartBlobUploadResponse. # noqa: E501
:type: str
'''
pass
@property
def create_url(self):
'''Gets the create_url of this StartBlobUploadResponse. # noqa: E501
URL to use to start the upload. # noqa: E501
:return: The create_url of this StartBlobUploadResponse. # noqa: E501
:rtype: str
'''
pass
@create_url.setter
def create_url(self):
'''Sets the create_url of this StartBlobUploadResponse.
URL to use to start the upload. # noqa: E501
:param create_url: The create_url of this StartBlobUploadResponse. # noqa: E501
:type: str
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 15 | 11 | 9 | 2 | 5 | 3 | 2 | 0.67 | 1 | 4 | 0 | 0 | 10 | 3 | 10 | 10 | 113 | 26 | 54 | 23 | 39 | 36 | 41 | 19 | 30 | 5 | 1 | 2 | 17 |
141,024 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/upload_file.py
|
src.kaggle.models.upload_file.UploadFile
|
class UploadFile(object):
"""
Attributes:
column_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
column_types = {
'token': 'str',
'description': 'str',
'columns': 'list[DatasetColumn]'
}
attribute_map = {
'token': 'token',
'description': 'description',
'columns': 'columns'
}
def __init__(self, token=None, description=None, columns=None): # noqa: E501
"""UploadFile - a model defined in Swagger""" # noqa: E501
self._token = None
self._description = None
self._columns = None
self.discriminator = None
if token is not None:
self.token = token
if description is not None:
self.description = description
if columns is not None:
self.columns = columns
@property
def token(self):
"""Gets the token of this UploadFile. # noqa: E501
A token referencing a specific file upload that can be used across requests # noqa: E501
:return: The token of this UploadFile. # noqa: E501
:rtype: str
"""
return self._token
@token.setter
def token(self, token):
"""Sets the token of this UploadFile.
A token referencing a specific file upload that can be used across requests # noqa: E501
:param token: The token of this UploadFile. # noqa: E501
:type: str
"""
self._token = token
@property
def description(self):
"""Gets the description of this UploadFile. # noqa: E501
The file description # noqa: E501
:return: The description of this UploadFile. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this UploadFile.
The file description # noqa: E501
:param description: The description of this UploadFile. # noqa: E501
:type: str
"""
self._description = description
@property
def columns(self):
"""Gets the columns of this UploadFile. # noqa: E501
A list of dataset column metadata # noqa: E501
:return: The columns of this UploadFile. # noqa: E501
:rtype: list[DatasetColumn]
"""
return self._columns
@columns.setter
def columns(self, columns):
"""Sets the columns of this UploadFile.
A list of dataset column metadata # noqa: E501
:param columns: The columns of this UploadFile. # noqa: E501
:type: list[DatasetColumn]
"""
self._columns = columns
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.column_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UploadFile):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
class UploadFile(object):
'''
Attributes:
column_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
'''
def __init__(self, token=None, description=None, columns=None):
'''UploadFile - a model defined in Swagger'''
pass
@property
def token(self):
'''Gets the token of this UploadFile. # noqa: E501
A token referencing a specific file upload that can be used across requests # noqa: E501
:return: The token of this UploadFile. # noqa: E501
:rtype: str
'''
pass
@token.setter
def token(self):
'''Sets the token of this UploadFile.
A token referencing a specific file upload that can be used across requests # noqa: E501
:param token: The token of this UploadFile. # noqa: E501
:type: str
'''
pass
@property
def description(self):
'''Gets the description of this UploadFile. # noqa: E501
The file description # noqa: E501
:return: The description of this UploadFile. # noqa: E501
:rtype: str
'''
pass
@description.setter
def description(self):
'''Sets the description of this UploadFile.
The file description # noqa: E501
:param description: The description of this UploadFile. # noqa: E501
:type: str
'''
pass
@property
def columns(self):
'''Gets the columns of this UploadFile. # noqa: E501
A list of dataset column metadata # noqa: E501
:return: The columns of this UploadFile. # noqa: E501
:rtype: list[DatasetColumn]
'''
pass
@columns.setter
def columns(self):
'''Sets the columns of this UploadFile.
A list of dataset column metadata # noqa: E501
:param columns: The columns of this UploadFile. # noqa: E501
:type: list[DatasetColumn]
'''
pass
def to_dict(self):
'''Returns the model properties as a dict'''
pass
def to_str(self):
'''Returns the string representation of the model'''
pass
def __repr__(self):
'''For `print` and `pprint`'''
pass
def __eq__(self, other):
'''Returns true if both objects are equal'''
pass
def __ne__(self, other):
'''Returns true if both objects are not equal'''
pass
| 19 | 13 | 9 | 2 | 4 | 3 | 2 | 0.66 | 1 | 3 | 0 | 0 | 12 | 4 | 12 | 12 | 143 | 33 | 67 | 28 | 48 | 44 | 46 | 22 | 33 | 5 | 1 | 2 | 20 |
141,025 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/test/test_authenticate.py
|
src.kaggle.test.test_authenticate.TestAuthenticate
|
class TestAuthenticate(unittest.TestCase):
def setUp(self):
print("setup class:%s" % self)
def tearDown(self):
print("teardown class:TestStuff")
# Environment
def test_environment_variables(self):
os.environ['KAGGLE_USERNAME'] = 'dinosaur'
os.environ['KAGGLE_KEY'] = 'xxxxxxxxxxxx'
api = KaggleApi()
# We haven't authenticated yet
self.assertTrue("key" not in api.config_values)
self.assertTrue("username" not in api.config_values)
api.authenticate()
# Should be set from the environment
self.assertEqual(api.config_values['key'], 'xxxxxxxxxxxx')
self.assertEqual(api.config_values['username'], 'dinosaur')
# Configuration Actions
def test_config_actions(self):
api = KaggleApi()
self.assertTrue(api.config_dir.endswith('kaggle'))
self.assertEqual(api.get_config_value('doesntexist'), None)
|
class TestAuthenticate(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_environment_variables(self):
pass
def test_config_actions(self):
pass
| 5 | 0 | 6 | 1 | 4 | 1 | 1 | 0.22 | 1 | 1 | 1 | 0 | 4 | 0 | 4 | 76 | 31 | 9 | 18 | 7 | 13 | 4 | 18 | 7 | 13 | 1 | 2 | 0 | 4 |
141,026 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/admin/services/inbox_file_service.py
|
src.kagglesdk.admin.services.inbox_file_service.InboxFileClient
|
class InboxFileClient(object):
"""File drop/pickup functionality."""
def __init__(self, client: KaggleHttpClient):
self._client = client
def create_inbox_file(self, request: CreateInboxFileRequest = None) -> CreateInboxFileResponse:
r"""
Creates (aka 'drops') a new file into the inbox.
Args:
request (CreateInboxFileRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = CreateInboxFileRequest()
return self._client.call("admin.InboxFileService", "CreateInboxFile", request, CreateInboxFileResponse)
|
class InboxFileClient(object):
'''File drop/pickup functionality.'''
def __init__(self, client: KaggleHttpClient):
pass
def create_inbox_file(self, request: CreateInboxFileRequest = None) -> CreateInboxFileResponse:
'''
Creates (aka 'drops') a new file into the inbox.
Args:
request (CreateInboxFileRequest):
The request object; initialized to empty instance if not specified.
'''
pass
| 3 | 2 | 8 | 2 | 3 | 3 | 2 | 1 | 1 | 3 | 3 | 0 | 2 | 1 | 2 | 2 | 19 | 5 | 7 | 4 | 4 | 7 | 7 | 4 | 4 | 2 | 1 | 1 | 3 |
141,027 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/kaggle_models_extended.py
|
src.kaggle.models.kaggle_models_extended.Tag
|
class Tag(object):
def __init__(self, init_dict):
parsed_dict = {k: parse(v) for k, v in init_dict.items()}
self.__dict__.update(parsed_dict)
def __repr__(self):
return self.ref
|
class Tag(object):
def __init__(self, init_dict):
pass
def __repr__(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 8 | 2 | 6 | 4 | 3 | 0 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
141,028 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/admin/types/inbox_file_service.py
|
src.kagglesdk.admin.types.inbox_file_service.CreateInboxFileRequest
|
class CreateInboxFileRequest(KaggleObject):
r"""
Attributes:
virtual_directory (str)
Directory name used for tagging the uploaded file.
blob_file_token (str)
Token representing the uploaded file.
"""
def __init__(self):
self._virtual_directory = ""
self._blob_file_token = ""
self._freeze()
@property
def virtual_directory(self) -> str:
"""Directory name used for tagging the uploaded file."""
return self._virtual_directory
@virtual_directory.setter
def virtual_directory(self, virtual_directory: str):
if virtual_directory is None:
del self.virtual_directory
return
if not isinstance(virtual_directory, str):
raise TypeError('virtual_directory must be of type str')
self._virtual_directory = virtual_directory
@property
def blob_file_token(self) -> str:
"""Token representing the uploaded file."""
return self._blob_file_token
@blob_file_token.setter
def blob_file_token(self, blob_file_token: str):
if blob_file_token is None:
del self.blob_file_token
return
if not isinstance(blob_file_token, str):
raise TypeError('blob_file_token must be of type str')
self._blob_file_token = blob_file_token
def endpoint(self):
path = '/api/v1/inbox/files/create'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class CreateInboxFileRequest(KaggleObject):
'''
Attributes:
virtual_directory (str)
Directory name used for tagging the uploaded file.
blob_file_token (str)
Token representing the uploaded file.
'''
def __init__(self):
pass
@property
def virtual_directory(self) -> str:
'''Directory name used for tagging the uploaded file.'''
pass
@virtual_directory.setter
def virtual_directory(self) -> str:
pass
@property
def blob_file_token(self) -> str:
'''Token representing the uploaded file.'''
pass
@blob_file_token.setter
def blob_file_token(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 15 | 3 | 4 | 0 | 4 | 0 | 2 | 0.25 | 1 | 2 | 0 | 0 | 6 | 2 | 8 | 25 | 54 | 9 | 36 | 18 | 21 | 9 | 30 | 12 | 21 | 3 | 2 | 1 | 12 |
141,029 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/blobs/services/blob_api_service.py
|
src.kagglesdk.blobs.services.blob_api_service.BlobApiClient
|
class BlobApiClient(object):
r"""
Binary Large OBject (BLOB) service used for uploading files to Google Cloud
Storage (GCS).
"""
def __init__(self, client: KaggleHttpClient):
self._client = client
def start_blob_upload(self, request: ApiStartBlobUploadRequest = None) -> ApiStartBlobUploadResponse:
r"""
Starts a blob upload (i.e. reserves a spot for the upload on GCS).
Args:
request (ApiStartBlobUploadRequest):
The request object; initialized to empty instance if not specified.
"""
if request is None:
request = ApiStartBlobUploadRequest()
return self._client.call("blobs.BlobApiService", "ApiStartBlobUpload", request, ApiStartBlobUploadResponse)
|
class BlobApiClient(object):
'''
Binary Large OBject (BLOB) service used for uploading files to Google Cloud
Storage (GCS).
'''
def __init__(self, client: KaggleHttpClient):
pass
def start_blob_upload(self, request: ApiStartBlobUploadRequest = None) -> ApiStartBlobUploadResponse:
'''
Starts a blob upload (i.e. reserves a spot for the upload on GCS).
Args:
request (ApiStartBlobUploadRequest):
The request object; initialized to empty instance if not specified.
'''
pass
| 3 | 2 | 8 | 2 | 3 | 3 | 2 | 1.43 | 1 | 3 | 3 | 0 | 2 | 1 | 2 | 2 | 22 | 5 | 7 | 4 | 4 | 10 | 7 | 4 | 4 | 2 | 1 | 1 | 3 |
141,030 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/blobs/types/blob_api_service.py
|
src.kagglesdk.blobs.types.blob_api_service.ApiBlobType
|
class ApiBlobType(enum.Enum):
API_BLOB_TYPE_UNSPECIFIED = 0
DATASET = 1
MODEL = 2
INBOX = 3
|
class ApiBlobType(enum.Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 5 | 0 | 5 | 5 | 4 | 0 | 5 | 5 | 4 | 0 | 4 | 0 | 0 |
141,031 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/blobs/types/blob_api_service.py
|
src.kagglesdk.blobs.types.blob_api_service.ApiStartBlobUploadRequest
|
class ApiStartBlobUploadRequest(KaggleObject):
r"""
Attributes:
type (ApiBlobType)
The type of the blob.
name (str)
Name (e.g. file name) of the blob.
content_type (str)
Content/MIME type (e.g. 'text/plain').
content_length (int)
Size in bytes of the blob.
last_modified_epoch_seconds (int)
Optional user-reported time when the blob was last updated/modified.
"""
def __init__(self):
self._type = ApiBlobType.API_BLOB_TYPE_UNSPECIFIED
self._name = ""
self._content_type = None
self._content_length = 0
self._last_modified_epoch_seconds = None
self._freeze()
@property
def type(self) -> 'ApiBlobType':
"""The type of the blob."""
return self._type
@type.setter
def type(self, type: 'ApiBlobType'):
if type is None:
del self.type
return
if not isinstance(type, ApiBlobType):
raise TypeError('type must be of type ApiBlobType')
self._type = type
@property
def name(self) -> str:
"""Name (e.g. file name) of the blob."""
return self._name
@name.setter
def name(self, name: str):
if name is None:
del self.name
return
if not isinstance(name, str):
raise TypeError('name must be of type str')
self._name = name
@property
def content_type(self) -> str:
"""Content/MIME type (e.g. 'text/plain')."""
return self._content_type or ""
@content_type.setter
def content_type(self, content_type: str):
if content_type is None:
del self.content_type
return
if not isinstance(content_type, str):
raise TypeError('content_type must be of type str')
self._content_type = content_type
@property
def content_length(self) -> int:
"""Size in bytes of the blob."""
return self._content_length
@content_length.setter
def content_length(self, content_length: int):
if content_length is None:
del self.content_length
return
if not isinstance(content_length, int):
raise TypeError('content_length must be of type int')
self._content_length = content_length
@property
def last_modified_epoch_seconds(self) -> int:
"""Optional user-reported time when the blob was last updated/modified."""
return self._last_modified_epoch_seconds or 0
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self, last_modified_epoch_seconds: int):
if last_modified_epoch_seconds is None:
del self.last_modified_epoch_seconds
return
if not isinstance(last_modified_epoch_seconds, int):
raise TypeError('last_modified_epoch_seconds must be of type int')
self._last_modified_epoch_seconds = last_modified_epoch_seconds
def endpoint(self):
path = '/api/v1/blobs/upload'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
@staticmethod
def body_fields():
return '*'
|
class ApiStartBlobUploadRequest(KaggleObject):
'''
Attributes:
type (ApiBlobType)
The type of the blob.
name (str)
Name (e.g. file name) of the blob.
content_type (str)
Content/MIME type (e.g. 'text/plain').
content_length (int)
Size in bytes of the blob.
last_modified_epoch_seconds (int)
Optional user-reported time when the blob was last updated/modified.
'''
def __init__(self):
pass
@property
def type(self) -> 'ApiBlobType':
'''The type of the blob.'''
pass
@type.setter
def type(self) -> 'ApiBlobType':
pass
@property
def name(self) -> str:
'''Name (e.g. file name) of the blob.'''
pass
@name.setter
def name(self) -> str:
pass
@property
def content_type(self) -> str:
'''Content/MIME type (e.g. 'text/plain').'''
pass
@content_type.setter
def content_type(self) -> str:
pass
@property
def content_length(self) -> int:
'''Size in bytes of the blob.'''
pass
@content_length.setter
def content_length(self) -> int:
pass
@property
def last_modified_epoch_seconds(self) -> int:
'''Optional user-reported time when the blob was last updated/modified.'''
pass
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self) -> int:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
@staticmethod
def body_fields():
pass
| 27 | 6 | 5 | 0 | 4 | 0 | 2 | 0.25 | 1 | 4 | 1 | 0 | 12 | 5 | 14 | 31 | 105 | 15 | 72 | 33 | 45 | 18 | 60 | 21 | 45 | 3 | 2 | 1 | 24 |
141,032 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/blobs/types/blob_api_service.py
|
src.kagglesdk.blobs.types.blob_api_service.ApiStartBlobUploadResponse
|
class ApiStartBlobUploadResponse(KaggleObject):
r"""
Attributes:
token (str)
Opaque string token used to reference the new blob/file.
create_url (str)
URL to use to start the upload.
"""
def __init__(self):
self._token = ""
self._create_url = ""
self._freeze()
@property
def token(self) -> str:
"""Opaque string token used to reference the new blob/file."""
return self._token
@token.setter
def token(self, token: str):
if token is None:
del self.token
return
if not isinstance(token, str):
raise TypeError('token must be of type str')
self._token = token
@property
def create_url(self) -> str:
"""URL to use to start the upload."""
return self._create_url
@create_url.setter
def create_url(self, create_url: str):
if create_url is None:
del self.create_url
return
if not isinstance(create_url, str):
raise TypeError('create_url must be of type str')
self._create_url = create_url
@property
def createUrl(self):
return self.create_url
|
class ApiStartBlobUploadResponse(KaggleObject):
'''
Attributes:
token (str)
Opaque string token used to reference the new blob/file.
create_url (str)
URL to use to start the upload.
'''
def __init__(self):
pass
@property
def token(self) -> str:
'''Opaque string token used to reference the new blob/file.'''
pass
@token.setter
def token(self) -> str:
pass
@property
def create_url(self) -> str:
'''URL to use to start the upload.'''
pass
@create_url.setter
def create_url(self) -> str:
pass
@property
def createUrl(self):
pass
| 12 | 3 | 4 | 0 | 4 | 0 | 2 | 0.3 | 1 | 2 | 0 | 0 | 6 | 2 | 6 | 23 | 45 | 6 | 30 | 14 | 18 | 9 | 25 | 9 | 18 | 3 | 2 | 1 | 10 |
141,033 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/common/types/file_download.py
|
src.kagglesdk.common.types.file_download.FileDownload
|
class FileDownload(KaggleObject):
r"""
Standard response object representing a file download.
See http://go/kaggle-proto-handler-file-downloads
Some field names/descriptions borrowed from
google3/gdata/rosy/proto/data.proto
Attributes:
content_type (str)
MIME type of the data
TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled
--)
file_name (str)
Original file name
token (str)
A unique fingerprint for the file/media data
content_length (int)
Size of the data, in bytes (if known)
"""
def __init__(self):
self._content_type = ""
self._file_name = ""
self._token = ""
self._content_length = None
self._freeze()
@property
def content_type(self) -> str:
r"""
MIME type of the data
TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled
--)
"""
return self._content_type
@content_type.setter
def content_type(self, content_type: str):
if content_type is None:
del self.content_type
return
if not isinstance(content_type, str):
raise TypeError('content_type must be of type str')
self._content_type = content_type
@property
def file_name(self) -> str:
"""Original file name"""
return self._file_name
@file_name.setter
def file_name(self, file_name: str):
if file_name is None:
del self.file_name
return
if not isinstance(file_name, str):
raise TypeError('file_name must be of type str')
self._file_name = file_name
@property
def token(self) -> str:
"""A unique fingerprint for the file/media data"""
return self._token
@token.setter
def token(self, token: str):
if token is None:
del self.token
return
if not isinstance(token, str):
raise TypeError('token must be of type str')
self._token = token
@property
def content_length(self) -> int:
"""Size of the data, in bytes (if known)"""
return self._content_length or 0
@content_length.setter
def content_length(self, content_length: int):
if content_length is None:
del self.content_length
return
if not isinstance(content_length, int):
raise TypeError('content_length must be of type int')
self._content_length = content_length
@classmethod
def prepare_from(cls, http_response):
return http_response
|
class FileDownload(KaggleObject):
'''
Standard response object representing a file download.
See http://go/kaggle-proto-handler-file-downloads
Some field names/descriptions borrowed from
google3/gdata/rosy/proto/data.proto
Attributes:
content_type (str)
MIME type of the data
TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled
--)
file_name (str)
Original file name
token (str)
A unique fingerprint for the file/media data
content_length (int)
Size of the data, in bytes (if known)
'''
def __init__(self):
pass
@property
def content_type(self) -> str:
'''
MIME type of the data
TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled
--)
'''
pass
@content_type.setter
def content_type(self) -> str:
pass
@property
def file_name(self) -> str:
'''Original file name'''
pass
@file_name.setter
def file_name(self) -> str:
pass
@property
def token(self) -> str:
'''A unique fingerprint for the file/media data'''
pass
@token.setter
def token(self) -> str:
pass
@property
def content_length(self) -> int:
'''Size of the data, in bytes (if known)'''
pass
@content_length.setter
def content_length(self) -> int:
pass
@classmethod
def prepare_from(cls, http_response):
pass
| 20 | 5 | 5 | 0 | 4 | 1 | 2 | 0.46 | 1 | 3 | 0 | 0 | 9 | 4 | 10 | 27 | 90 | 11 | 54 | 24 | 34 | 25 | 45 | 15 | 34 | 3 | 2 | 1 | 18 |
141,034 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/common/types/http_redirect.py
|
src.kagglesdk.common.types.http_redirect.HttpRedirect
|
class HttpRedirect(KaggleObject):
r"""
Represents an HTTP redirect (e.g. 301 or 302) response.
Patterned after ASP.NET MVC's RedirectResult.
Attributes:
url (str)
Destination URL for the redirect.
permanent (bool)
Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP
302)?.
bypass_encoding (bool)
When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`.
Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the
client.
expiry (timedelta)
Specifies how long the redirected url can be cached.
"""
def __init__(self):
self._url = ""
self._permanent = False
self._bypass_encoding = None
self._expiry = None
self._freeze()
@property
def url(self) -> str:
"""Destination URL for the redirect."""
return self._url
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
@property
def permanent(self) -> bool:
r"""
Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP
302)?.
"""
return self._permanent
@permanent.setter
def permanent(self, permanent: bool):
if permanent is None:
del self.permanent
return
if not isinstance(permanent, bool):
raise TypeError('permanent must be of type bool')
self._permanent = permanent
@property
def bypass_encoding(self) -> bool:
r"""
When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`.
Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the
client.
"""
return self._bypass_encoding or False
@bypass_encoding.setter
def bypass_encoding(self, bypass_encoding: bool):
if bypass_encoding is None:
del self.bypass_encoding
return
if not isinstance(bypass_encoding, bool):
raise TypeError('bypass_encoding must be of type bool')
self._bypass_encoding = bypass_encoding
@property
def expiry(self) -> timedelta:
"""Specifies how long the redirected url can be cached."""
return self._expiry
@expiry.setter
def expiry(self, expiry: timedelta):
if expiry is None:
del self.expiry
return
if not isinstance(expiry, timedelta):
raise TypeError('expiry must be of type timedelta')
self._expiry = expiry
@classmethod
def prepare_from(cls, http_response):
return http_response
|
class HttpRedirect(KaggleObject):
'''
Represents an HTTP redirect (e.g. 301 or 302) response.
Patterned after ASP.NET MVC's RedirectResult.
Attributes:
url (str)
Destination URL for the redirect.
permanent (bool)
Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP
302)?.
bypass_encoding (bool)
When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`.
Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the
client.
expiry (timedelta)
Specifies how long the redirected url can be cached.
'''
def __init__(self):
pass
@property
def url(self) -> str:
'''Destination URL for the redirect.'''
pass
@url.setter
def url(self) -> str:
pass
@property
def permanent(self) -> bool:
'''
Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP
302)?.
'''
pass
@permanent.setter
def permanent(self) -> bool:
pass
@property
def bypass_encoding(self) -> bool:
'''
When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`.
Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the
client.
'''
pass
@bypass_encoding.setter
def bypass_encoding(self) -> bool:
pass
@property
def expiry(self) -> timedelta:
'''Specifies how long the redirected url can be cached.'''
pass
@expiry.setter
def expiry(self) -> timedelta:
pass
@classmethod
def prepare_from(cls, http_response):
pass
| 20 | 5 | 6 | 0 | 4 | 1 | 2 | 0.5 | 1 | 4 | 0 | 0 | 9 | 4 | 10 | 27 | 92 | 11 | 54 | 24 | 34 | 27 | 45 | 15 | 34 | 3 | 2 | 1 | 18 |
141,035 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/competitions/types/competition_api_service.py
|
src.kagglesdk.competitions.types.competition_api_service.ApiCategory
|
class ApiCategory(KaggleObject):
r"""
TODO(erdalsivri): Consider reusing with Kaggle.Sdk.Datasets.ApiCategory.
Attributes:
ref (str)
name (str)
description (str)
full_path (str)
competition_count (int)
dataset_count (int)
script_count (int)
total_count (int)
"""
def __init__(self):
self._ref = ""
self._name = None
self._description = None
self._full_path = None
self._competition_count = 0
self._dataset_count = 0
self._script_count = 0
self._total_count = 0
self._freeze()
@property
def ref(self) -> str:
return self._ref
@ref.setter
def ref(self, ref: str):
if ref is None:
del self.ref
return
if not isinstance(ref, str):
raise TypeError('ref must be of type str')
self._ref = ref
@property
def name(self) -> str:
return self._name or ""
@name.setter
def name(self, name: str):
if name is None:
del self.name
return
if not isinstance(name, str):
raise TypeError('name must be of type str')
self._name = name
@property
def description(self) -> str:
return self._description or ""
@description.setter
def description(self, description: str):
if description is None:
del self.description
return
if not isinstance(description, str):
raise TypeError('description must be of type str')
self._description = description
@property
def full_path(self) -> str:
return self._full_path or ""
@full_path.setter
def full_path(self, full_path: str):
if full_path is None:
del self.full_path
return
if not isinstance(full_path, str):
raise TypeError('full_path must be of type str')
self._full_path = full_path
@property
def competition_count(self) -> int:
return self._competition_count
@competition_count.setter
def competition_count(self, competition_count: int):
if competition_count is None:
del self.competition_count
return
if not isinstance(competition_count, int):
raise TypeError('competition_count must be of type int')
self._competition_count = competition_count
@property
def dataset_count(self) -> int:
return self._dataset_count
@dataset_count.setter
def dataset_count(self, dataset_count: int):
if dataset_count is None:
del self.dataset_count
return
if not isinstance(dataset_count, int):
raise TypeError('dataset_count must be of type int')
self._dataset_count = dataset_count
@property
def script_count(self) -> int:
return self._script_count
@script_count.setter
def script_count(self, script_count: int):
if script_count is None:
del self.script_count
return
if not isinstance(script_count, int):
raise TypeError('script_count must be of type int')
self._script_count = script_count
@property
def total_count(self) -> int:
return self._total_count
@total_count.setter
def total_count(self, total_count: int):
if total_count is None:
del self.total_count
return
if not isinstance(total_count, int):
raise TypeError('total_count must be of type int')
self._total_count = total_count
|
class ApiCategory(KaggleObject):
'''
TODO(erdalsivri): Consider reusing with Kaggle.Sdk.Datasets.ApiCategory.
Attributes:
ref (str)
name (str)
description (str)
full_path (str)
competition_count (int)
dataset_count (int)
script_count (int)
total_count (int)
'''
def __init__(self):
pass
@property
def ref(self) -> str:
pass
@ref.setter
def ref(self) -> str:
pass
@property
def name(self) -> str:
pass
@name.setter
def name(self) -> str:
pass
@property
def description(self) -> str:
pass
@description.setter
def description(self) -> str:
pass
@property
def full_path(self) -> str:
pass
@full_path.setter
def full_path(self) -> str:
pass
@property
def competition_count(self) -> int:
pass
@competition_count.setter
def competition_count(self) -> int:
pass
@property
def dataset_count(self) -> int:
pass
@dataset_count.setter
def dataset_count(self) -> int:
pass
@property
def script_count(self) -> int:
pass
@script_count.setter
def script_count(self) -> int:
pass
@property
def total_count(self) -> int:
pass
@total_count.setter
def total_count(self) -> int:
pass
| 34 | 1 | 5 | 0 | 5 | 0 | 2 | 0.12 | 1 | 3 | 0 | 0 | 17 | 8 | 17 | 34 | 129 | 18 | 99 | 42 | 65 | 12 | 83 | 26 | 65 | 3 | 2 | 1 | 33 |
141,036 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/competitions/types/competition_api_service.py
|
src.kagglesdk.competitions.types.competition_api_service.ApiCompetition
|
class ApiCompetition(KaggleObject):
r"""
Attributes:
id (int)
ref (str)
title (str)
url (str)
description (str)
organization_name (str)
organization_ref (str)
category (str)
reward (str)
tags (ApiCategory)
deadline (datetime)
kernel_count (int)
team_count (int)
user_has_entered (bool)
user_rank (int)
merger_deadline (datetime)
new_entrant_deadline (datetime)
enabled_date (datetime)
max_daily_submissions (int)
max_team_size (int)
evaluation_metric (str)
awards_points (bool)
is_kernels_submissions_only (bool)
submissions_disabled (bool)
"""
def __init__(self):
self._id = 0
self._ref = ""
self._title = None
self._url = None
self._description = None
self._organization_name = None
self._organization_ref = None
self._category = None
self._reward = None
self._tags = []
self._deadline = None
self._kernel_count = 0
self._team_count = 0
self._user_has_entered = False
self._user_rank = None
self._merger_deadline = None
self._new_entrant_deadline = None
self._enabled_date = None
self._max_daily_submissions = 0
self._max_team_size = None
self._evaluation_metric = None
self._awards_points = False
self._is_kernels_submissions_only = False
self._submissions_disabled = False
self._freeze()
@property
def id(self) -> int:
return self._id
@id.setter
def id(self, id: int):
if id is None:
del self.id
return
if not isinstance(id, int):
raise TypeError('id must be of type int')
self._id = id
@property
def ref(self) -> str:
return self._ref
@ref.setter
def ref(self, ref: str):
if ref is None:
del self.ref
return
if not isinstance(ref, str):
raise TypeError('ref must be of type str')
self._ref = ref
@property
def title(self) -> str:
return self._title or ""
@title.setter
def title(self, title: str):
if title is None:
del self.title
return
if not isinstance(title, str):
raise TypeError('title must be of type str')
self._title = title
@property
def url(self) -> str:
return self._url or ""
@url.setter
def url(self, url: str):
if url is None:
del self.url
return
if not isinstance(url, str):
raise TypeError('url must be of type str')
self._url = url
@property
def description(self) -> str:
return self._description or ""
@description.setter
def description(self, description: str):
if description is None:
del self.description
return
if not isinstance(description, str):
raise TypeError('description must be of type str')
self._description = description
@property
def organization_name(self) -> str:
return self._organization_name or ""
@organization_name.setter
def organization_name(self, organization_name: str):
if organization_name is None:
del self.organization_name
return
if not isinstance(organization_name, str):
raise TypeError('organization_name must be of type str')
self._organization_name = organization_name
@property
def organization_ref(self) -> str:
return self._organization_ref or ""
@organization_ref.setter
def organization_ref(self, organization_ref: str):
if organization_ref is None:
del self.organization_ref
return
if not isinstance(organization_ref, str):
raise TypeError('organization_ref must be of type str')
self._organization_ref = organization_ref
@property
def category(self) -> str:
return self._category or ""
@category.setter
def category(self, category: str):
if category is None:
del self.category
return
if not isinstance(category, str):
raise TypeError('category must be of type str')
self._category = category
@property
def reward(self) -> str:
return self._reward or ""
@reward.setter
def reward(self, reward: str):
if reward is None:
del self.reward
return
if not isinstance(reward, str):
raise TypeError('reward must be of type str')
self._reward = reward
@property
def tags(self) -> Optional[List[Optional['ApiCategory']]]:
return self._tags
@tags.setter
def tags(self, tags: Optional[List[Optional['ApiCategory']]]):
if tags is None:
del self.tags
return
if not isinstance(tags, list):
raise TypeError('tags must be of type list')
if not all([isinstance(t, ApiCategory) for t in tags]):
raise TypeError('tags must contain only items of type ApiCategory')
self._tags = tags
@property
def deadline(self) -> datetime:
return self._deadline
@deadline.setter
def deadline(self, deadline: datetime):
if deadline is None:
del self.deadline
return
if not isinstance(deadline, datetime):
raise TypeError('deadline must be of type datetime')
self._deadline = deadline
@property
def kernel_count(self) -> int:
return self._kernel_count
@kernel_count.setter
def kernel_count(self, kernel_count: int):
if kernel_count is None:
del self.kernel_count
return
if not isinstance(kernel_count, int):
raise TypeError('kernel_count must be of type int')
self._kernel_count = kernel_count
@property
def team_count(self) -> int:
return self._team_count
@team_count.setter
def team_count(self, team_count: int):
if team_count is None:
del self.team_count
return
if not isinstance(team_count, int):
raise TypeError('team_count must be of type int')
self._team_count = team_count
@property
def user_has_entered(self) -> bool:
return self._user_has_entered
@user_has_entered.setter
def user_has_entered(self, user_has_entered: bool):
if user_has_entered is None:
del self.user_has_entered
return
if not isinstance(user_has_entered, bool):
raise TypeError('user_has_entered must be of type bool')
self._user_has_entered = user_has_entered
@property
def user_rank(self) -> int:
return self._user_rank or 0
@user_rank.setter
def user_rank(self, user_rank: int):
if user_rank is None:
del self.user_rank
return
if not isinstance(user_rank, int):
raise TypeError('user_rank must be of type int')
self._user_rank = user_rank
@property
def merger_deadline(self) -> datetime:
return self._merger_deadline
@merger_deadline.setter
def merger_deadline(self, merger_deadline: datetime):
if merger_deadline is None:
del self.merger_deadline
return
if not isinstance(merger_deadline, datetime):
raise TypeError('merger_deadline must be of type datetime')
self._merger_deadline = merger_deadline
@property
def new_entrant_deadline(self) -> datetime:
return self._new_entrant_deadline
@new_entrant_deadline.setter
def new_entrant_deadline(self, new_entrant_deadline: datetime):
if new_entrant_deadline is None:
del self.new_entrant_deadline
return
if not isinstance(new_entrant_deadline, datetime):
raise TypeError('new_entrant_deadline must be of type datetime')
self._new_entrant_deadline = new_entrant_deadline
@property
def enabled_date(self) -> datetime:
return self._enabled_date
@enabled_date.setter
def enabled_date(self, enabled_date: datetime):
if enabled_date is None:
del self.enabled_date
return
if not isinstance(enabled_date, datetime):
raise TypeError('enabled_date must be of type datetime')
self._enabled_date = enabled_date
@property
def max_daily_submissions(self) -> int:
return self._max_daily_submissions
@max_daily_submissions.setter
def max_daily_submissions(self, max_daily_submissions: int):
if max_daily_submissions is None:
del self.max_daily_submissions
return
if not isinstance(max_daily_submissions, int):
raise TypeError('max_daily_submissions must be of type int')
self._max_daily_submissions = max_daily_submissions
@property
def max_team_size(self) -> int:
return self._max_team_size or 0
@max_team_size.setter
def max_team_size(self, max_team_size: int):
if max_team_size is None:
del self.max_team_size
return
if not isinstance(max_team_size, int):
raise TypeError('max_team_size must be of type int')
self._max_team_size = max_team_size
@property
def evaluation_metric(self) -> str:
return self._evaluation_metric or ""
@evaluation_metric.setter
def evaluation_metric(self, evaluation_metric: str):
if evaluation_metric is None:
del self.evaluation_metric
return
if not isinstance(evaluation_metric, str):
raise TypeError('evaluation_metric must be of type str')
self._evaluation_metric = evaluation_metric
@property
def awards_points(self) -> bool:
return self._awards_points
@awards_points.setter
def awards_points(self, awards_points: bool):
if awards_points is None:
del self.awards_points
return
if not isinstance(awards_points, bool):
raise TypeError('awards_points must be of type bool')
self._awards_points = awards_points
@property
def is_kernels_submissions_only(self) -> bool:
return self._is_kernels_submissions_only
@is_kernels_submissions_only.setter
def is_kernels_submissions_only(self, is_kernels_submissions_only: bool):
if is_kernels_submissions_only is None:
del self.is_kernels_submissions_only
return
if not isinstance(is_kernels_submissions_only, bool):
raise TypeError('is_kernels_submissions_only must be of type bool')
self._is_kernels_submissions_only = is_kernels_submissions_only
@property
def submissions_disabled(self) -> bool:
return self._submissions_disabled
@submissions_disabled.setter
def submissions_disabled(self, submissions_disabled: bool):
if submissions_disabled is None:
del self.submissions_disabled
return
if not isinstance(submissions_disabled, bool):
raise TypeError('submissions_disabled must be of type bool')
self._submissions_disabled = submissions_disabled
|
class ApiCompetition(KaggleObject):
'''
Attributes:
id (int)
ref (str)
title (str)
url (str)
description (str)
organization_name (str)
organization_ref (str)
category (str)
reward (str)
tags (ApiCategory)
deadline (datetime)
kernel_count (int)
team_count (int)
user_has_entered (bool)
user_rank (int)
merger_deadline (datetime)
new_entrant_deadline (datetime)
enabled_date (datetime)
max_daily_submissions (int)
max_team_size (int)
evaluation_metric (str)
awards_points (bool)
is_kernels_submissions_only (bool)
submissions_disabled (bool)
'''
def __init__(self):
pass
@property
def id(self) -> int:
pass
@id.setter
def id(self) -> int:
pass
@property
def ref(self) -> str:
pass
@ref.setter
def ref(self) -> str:
pass
@property
def title(self) -> str:
pass
@title.setter
def title(self) -> str:
pass
@property
def url(self) -> str:
pass
@url.setter
def url(self) -> str:
pass
@property
def description(self) -> str:
pass
@description.setter
def description(self) -> str:
pass
@property
def organization_name(self) -> str:
pass
@organization_name.setter
def organization_name(self) -> str:
pass
@property
def organization_ref(self) -> str:
pass
@organization_ref.setter
def organization_ref(self) -> str:
pass
@property
def category(self) -> str:
pass
@category.setter
def category(self) -> str:
pass
@property
def reward(self) -> str:
pass
@reward.setter
def reward(self) -> str:
pass
@property
def tags(self) -> Optional[List[Optional['ApiCategory']]]:
pass
@tags.setter
def tags(self) -> Optional[List[Optional['ApiCategory']]]:
pass
@property
def deadline(self) -> datetime:
pass
@deadline.setter
def deadline(self) -> datetime:
pass
@property
def kernel_count(self) -> int:
pass
@kernel_count.setter
def kernel_count(self) -> int:
pass
@property
def team_count(self) -> int:
pass
@team_count.setter
def team_count(self) -> int:
pass
@property
def user_has_entered(self) -> bool:
pass
@user_has_entered.setter
def user_has_entered(self) -> bool:
pass
@property
def user_rank(self) -> int:
pass
@user_rank.setter
def user_rank(self) -> int:
pass
@property
def merger_deadline(self) -> datetime:
pass
@merger_deadline.setter
def merger_deadline(self) -> datetime:
pass
@property
def new_entrant_deadline(self) -> datetime:
pass
@new_entrant_deadline.setter
def new_entrant_deadline(self) -> datetime:
pass
@property
def enabled_date(self) -> datetime:
pass
@enabled_date.setter
def enabled_date(self) -> datetime:
pass
@property
def max_daily_submissions(self) -> int:
pass
@max_daily_submissions.setter
def max_daily_submissions(self) -> int:
pass
@property
def max_team_size(self) -> int:
pass
@max_team_size.setter
def max_team_size(self) -> int:
pass
@property
def evaluation_metric(self) -> str:
pass
@evaluation_metric.setter
def evaluation_metric(self) -> str:
pass
@property
def awards_points(self) -> bool:
pass
@awards_points.setter
def awards_points(self) -> bool:
pass
@property
def is_kernels_submissions_only(self) -> bool:
pass
@is_kernels_submissions_only.setter
def is_kernels_submissions_only(self) -> bool:
pass
@property
def submissions_disabled(self) -> bool:
pass
@submissions_disabled.setter
def submissions_disabled(self) -> bool:
pass
| 98 | 1 | 5 | 0 | 5 | 0 | 2 | 0.09 | 1 | 7 | 1 | 0 | 49 | 24 | 49 | 66 | 369 | 49 | 293 | 122 | 195 | 27 | 245 | 74 | 195 | 4 | 2 | 1 | 98 |
141,037 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/datasets/types/dataset_api_service.py
|
src.kagglesdk.datasets.types.dataset_api_service.ApiUploadDatasetFileRequest
|
class ApiUploadDatasetFileRequest(KaggleObject):
r"""
Attributes:
file_name (str)
content_length (int)
last_modified_epoch_seconds (int)
"""
def __init__(self):
self._file_name = ""
self._content_length = 0
self._last_modified_epoch_seconds = 0
self._freeze()
@property
def file_name(self) -> str:
return self._file_name
@file_name.setter
def file_name(self, file_name: str):
if file_name is None:
del self.file_name
return
if not isinstance(file_name, str):
raise TypeError('file_name must be of type str')
self._file_name = file_name
@property
def content_length(self) -> int:
return self._content_length
@content_length.setter
def content_length(self, content_length: int):
if content_length is None:
del self.content_length
return
if not isinstance(content_length, int):
raise TypeError('content_length must be of type int')
self._content_length = content_length
@property
def last_modified_epoch_seconds(self) -> int:
return self._last_modified_epoch_seconds
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self, last_modified_epoch_seconds: int):
if last_modified_epoch_seconds is None:
del self.last_modified_epoch_seconds
return
if not isinstance(last_modified_epoch_seconds, int):
raise TypeError('last_modified_epoch_seconds must be of type int')
self._last_modified_epoch_seconds = last_modified_epoch_seconds
def endpoint(self):
path = '/api/v1/datasets/upload/file/{content_length}/{last_modified_epoch_seconds}'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
|
class ApiUploadDatasetFileRequest(KaggleObject):
'''
Attributes:
file_name (str)
content_length (int)
last_modified_epoch_seconds (int)
'''
def __init__(self):
pass
@property
def file_name(self) -> str:
pass
@file_name.setter
def file_name(self) -> str:
pass
@property
def content_length(self) -> int:
pass
@content_length.setter
def content_length(self) -> int:
pass
@property
def last_modified_epoch_seconds(self) -> int:
pass
@last_modified_epoch_seconds.setter
def last_modified_epoch_seconds(self) -> int:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
| 17 | 1 | 4 | 0 | 4 | 0 | 2 | 0.13 | 1 | 3 | 0 | 0 | 8 | 3 | 9 | 26 | 61 | 10 | 45 | 21 | 28 | 6 | 38 | 14 | 28 | 3 | 2 | 1 | 15 |
141,038 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/competitions/types/competition_api_service.py
|
src.kagglesdk.competitions.types.competition_api_service.ApiCreateSubmissionRequest
|
class ApiCreateSubmissionRequest(KaggleObject):
r"""
Attributes:
competition_name (str)
Competition name. Example: 'titanic'.
blob_file_tokens (str)
Token identifying location of uploaded submission file.
submission_description (str)
Description of competition submission.
"""
def __init__(self):
self._competition_name = ""
self._blob_file_tokens = ""
self._submission_description = None
self._freeze()
@property
def competition_name(self) -> str:
"""Competition name. Example: 'titanic'."""
return self._competition_name
@competition_name.setter
def competition_name(self, competition_name: str):
if competition_name is None:
del self.competition_name
return
if not isinstance(competition_name, str):
raise TypeError('competition_name must be of type str')
self._competition_name = competition_name
@property
def blob_file_tokens(self) -> str:
"""Token identifying location of uploaded submission file."""
return self._blob_file_tokens
@blob_file_tokens.setter
def blob_file_tokens(self, blob_file_tokens: str):
if blob_file_tokens is None:
del self.blob_file_tokens
return
if not isinstance(blob_file_tokens, str):
raise TypeError('blob_file_tokens must be of type str')
self._blob_file_tokens = blob_file_tokens
@property
def submission_description(self) -> str:
"""Description of competition submission."""
return self._submission_description or ""
@submission_description.setter
def submission_description(self, submission_description: str):
if submission_description is None:
del self.submission_description
return
if not isinstance(submission_description, str):
raise TypeError('submission_description must be of type str')
self._submission_description = submission_description
def endpoint(self):
path = '/api/v1/competitions/submissions/submit/{competition_name}'
return path.format_map(self.to_field_map(self))
@staticmethod
def method():
return 'POST'
|
class ApiCreateSubmissionRequest(KaggleObject):
'''
Attributes:
competition_name (str)
Competition name. Example: 'titanic'.
blob_file_tokens (str)
Token identifying location of uploaded submission file.
submission_description (str)
Description of competition submission.
'''
def __init__(self):
pass
@property
def competition_name(self) -> str:
'''Competition name. Example: 'titanic'.'''
pass
@competition_name.setter
def competition_name(self) -> str:
pass
@property
def blob_file_tokens(self) -> str:
'''Token identifying location of uploaded submission file.'''
pass
@blob_file_tokens.setter
def blob_file_tokens(self) -> str:
pass
@property
def submission_description(self) -> str:
'''Description of competition submission.'''
pass
@submission_description.setter
def submission_description(self) -> str:
pass
def endpoint(self):
pass
@staticmethod
def method():
pass
| 17 | 4 | 4 | 0 | 4 | 0 | 2 | 0.27 | 1 | 2 | 0 | 0 | 8 | 3 | 9 | 26 | 67 | 10 | 45 | 21 | 28 | 12 | 38 | 14 | 28 | 3 | 2 | 1 | 15 |
141,039 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kagglesdk/admin/types/inbox_file_service.py
|
src.kagglesdk.admin.types.inbox_file_service.CreateInboxFileResponse
|
class CreateInboxFileResponse(KaggleObject):
r"""
NOTE: This is sent to non-admins, so we're intentionally *NOT* sending back
the full InboxFile (with its URL for a direct download).
"""
pass
|
class CreateInboxFileResponse(KaggleObject):
'''
NOTE: This is sent to non-admins, so we're intentionally *NOT* sending back
the full InboxFile (with its URL for a direct download).
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 17 | 8 | 2 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
141,040 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/kaggle_models_extended.py
|
src.kaggle.models.kaggle_models_extended.SubmitResult
|
class SubmitResult(object):
def __init__(self, init_dict):
parsed_dict = {k: parse(v) for k, v in init_dict.items()}
self.__dict__.update(parsed_dict)
def __repr__(self):
return self.message
|
class SubmitResult(object):
def __init__(self, init_dict):
pass
def __repr__(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 8 | 2 | 6 | 4 | 3 | 0 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
141,041 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/kaggle_models_extended.py
|
src.kaggle.models.kaggle_models_extended.Submission
|
class Submission(object):
def __init__(self, init_dict):
parsed_dict = {k: parse(v) for k, v in init_dict.items()}
self.__dict__.update(parsed_dict)
if self.totalBytes is None:
self.size = None
else:
self.size = File.get_size(self.totalBytes)
def __repr__(self):
return str(self.ref)
|
class Submission(object):
def __init__(self, init_dict):
pass
def __repr__(self):
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 1 | 2 | 1 | 0 | 2 | 1 | 2 | 2 | 12 | 2 | 10 | 5 | 7 | 0 | 9 | 5 | 6 | 2 | 1 | 1 | 3 |
141,042 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/models/kaggle_models_extended.py
|
src.kaggle.models.kaggle_models_extended.ResumableUploadResult
|
class ResumableUploadResult(object):
# Upload was complete, i.e., all bytes were received by the server.
COMPLETE = 1
# There was a non-transient error during the upload or the upload expired.
# The upload cannot be resumed so it should be restarted from scratch
# (i.e., call /api/v1/files/upload to initiate the upload and get the
# create/upload url and token).
FAILED = 2
# Upload was interrupted due to some (transient) failure but it can be
# safely resumed.
INCOMPLETE = 3
def __init__(self, result, bytes_uploaded=None):
self.result = result
self.bytes_uploaded = bytes_uploaded
self.start_at = 0 if bytes_uploaded is None else bytes_uploaded + 1
@staticmethod
def Complete():
return ResumableUploadResult(ResumableUploadResult.COMPLETE)
@staticmethod
def Failed():
return ResumableUploadResult(ResumableUploadResult.FAILED)
@staticmethod
def Incomplete(bytes_uploaded=None):
return ResumableUploadResult(ResumableUploadResult.INCOMPLETE,
bytes_uploaded)
|
class ResumableUploadResult(object):
def __init__(self, result, bytes_uploaded=None):
pass
@staticmethod
def Complete():
pass
@staticmethod
def Failed():
pass
@staticmethod
def Incomplete(bytes_uploaded=None):
pass
| 8 | 0 | 3 | 0 | 3 | 0 | 1 | 0.39 | 1 | 0 | 0 | 0 | 1 | 3 | 4 | 4 | 31 | 6 | 18 | 14 | 10 | 7 | 14 | 11 | 9 | 2 | 1 | 0 | 5 |
141,043 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/api/kaggle_api_extended.py
|
src.kaggle.api.kaggle_api_extended.FileList
|
class FileList(object):
def __init__(self, init_dict):
self.error_message = ''
files = init_dict['files']
if files:
for f in files:
if 'size' in f:
f['totalBytes'] = f['size']
self.files = [File(f) for f in files]
else:
self.files = []
token = init_dict['nextPageToken']
if token:
self.nextPageToken = token
else:
self.nextPageToken = ""
def __repr__(self):
return ''
|
class FileList(object):
def __init__(self, init_dict):
pass
def __repr__(self):
pass
| 3 | 0 | 9 | 0 | 9 | 0 | 3 | 0 | 1 | 1 | 1 | 0 | 2 | 3 | 2 | 2 | 20 | 2 | 18 | 9 | 15 | 0 | 16 | 9 | 13 | 5 | 1 | 3 | 6 |
141,044 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/api/kaggle_api_extended.py
|
src.kaggle.api.kaggle_api_extended.KaggleApi
|
class KaggleApi:
__version__ = '1.7.3b1'
CONFIG_NAME_PROXY = 'proxy'
CONFIG_NAME_COMPETITION = 'competition'
CONFIG_NAME_PATH = 'path'
CONFIG_NAME_USER = 'username'
CONFIG_NAME_KEY = 'key'
CONFIG_NAME_SSL_CA_CERT = 'ssl_ca_cert'
HEADER_API_VERSION = 'X-Kaggle-ApiVersion'
DATASET_METADATA_FILE = 'dataset-metadata.json'
OLD_DATASET_METADATA_FILE = 'datapackage.json'
KERNEL_METADATA_FILE = 'kernel-metadata.json'
MODEL_METADATA_FILE = 'model-metadata.json'
MODEL_INSTANCE_METADATA_FILE = 'model-instance-metadata.json'
MAX_NUM_INBOX_FILES_TO_UPLOAD = 1000
MAX_UPLOAD_RESUME_ATTEMPTS = 10
config_dir = os.environ.get('KAGGLE_CONFIG_DIR')
if not config_dir:
config_dir = os.path.join(expanduser('~'), '.kaggle')
# Use ~/.kaggle if it already exists for backwards compatibility,
# otherwise follow XDG base directory specification
if sys.platform.startswith('linux') and not os.path.exists(config_dir):
config_dir = os.path.join((os.environ.get('XDG_CONFIG_HOME') or
os.path.join(expanduser('~'), '.config')),
'kaggle')
if not os.path.exists(config_dir):
os.makedirs(config_dir)
config_file = 'kaggle.json'
config = os.path.join(config_dir, config_file)
config_values = {}
already_printed_version_warning = False
args = {} # DEBUG Add --local to use localhost
if os.environ.get('KAGGLE_API_ENVIRONMENT') == 'LOCALHOST':
args = {'--verbose','--local'}
# Kernels valid types
valid_push_kernel_types = ['script', 'notebook']
valid_push_language_types = ['python', 'r', 'rmarkdown']
valid_push_pinning_types = ['original', 'latest']
valid_list_languages = ['all', 'python', 'r', 'sqlite', 'julia']
valid_list_kernel_types = ['all', 'script', 'notebook']
valid_list_output_types = ['all', 'visualization', 'data']
valid_list_sort_by = [
'hotness', 'commentCount', 'dateCreated', 'dateRun', 'relevance',
'scoreAscending', 'scoreDescending', 'viewCount', 'voteCount'
]
# Competitions valid types
valid_competition_groups = [
'general', 'entered', 'community', 'hosted', 'unlaunched',
'unlaunched_community'
]
valid_competition_categories = [
'all', 'featured', 'research', 'recruitment', 'gettingStarted', 'masters',
'playground'
]
valid_competition_sort_by = [
'grouped', 'best', 'prize', 'earliestDeadline', 'latestDeadline',
'numberOfTeams', 'relevance', 'recentlyCreated'
]
# Datasets valid types
valid_dataset_file_types = ['all', 'csv', 'sqlite', 'json', 'bigQuery']
valid_dataset_license_names = ['all', 'cc', 'gpl', 'odb', 'other']
valid_dataset_sort_bys = [
'hottest', 'votes', 'updated', 'active', 'published'
]
# Models valid types
valid_model_sort_bys = [
'hotness', 'downloadCount', 'voteCount', 'notebookCount', 'createTime'
]
# Command prefixes that are valid without authentication.
command_prefixes_allowing_anonymous_access = ('datasets download',
'datasets files')
# Attributes
competition_fields = [
'ref', 'deadline', 'category', 'reward', 'teamCount', 'userHasEntered'
]
submission_fields = [
'fileName', 'date', 'description', 'status', 'publicScore', 'privateScore'
]
competition_file_fields = ['name', 'totalBytes', 'creationDate']
competition_file_labels = ['name', 'size', 'creationDate']
competition_leaderboard_fields = [
'teamId', 'teamName', 'submissionDate', 'score'
]
dataset_fields = [
'ref', 'title', 'totalBytes', 'lastUpdated', 'downloadCount', 'voteCount',
'usabilityRating'
]
dataset_labels = [
'ref', 'title', 'size', 'lastUpdated', 'downloadCount', 'voteCount',
'usabilityRating'
]
dataset_file_fields = ['name', 'total_bytes', 'creationDate']
model_fields = ['id', 'ref', 'title', 'subtitle', 'author']
model_all_fields = [
'id', 'ref', 'author', 'slug', 'title', 'subtitle', 'isPrivate',
'description', 'publishTime'
]
model_file_fields = ['name', 'size', 'creationDate']
def _is_retriable(self, e):
return issubclass(type(e), ConnectionError) or \
issubclass(type(e), urllib3_exceptions.ConnectionError) or \
issubclass(type(e), urllib3_exceptions.ConnectTimeoutError) or \
issubclass(type(e), urllib3_exceptions.ProtocolError) or \
issubclass(type(e), requests.exceptions.ConnectionError) or \
issubclass(type(e), requests.exceptions.ConnectTimeout)
def _calculate_backoff_delay(self, attempt, initial_delay_millis,
retry_multiplier, randomness_factor):
delay_ms = initial_delay_millis * (retry_multiplier**attempt)
random_wait_ms = int(random() - 0.5) * 2 * delay_ms * randomness_factor
total_delay = (delay_ms + random_wait_ms) / 1000.0
return total_delay
def with_retry(self,
func,
max_retries=10,
initial_delay_millis=500,
retry_multiplier=1.7,
randomness_factor=0.5):
def retriable_func(*args):
for i in range(1, max_retries + 1):
try:
return func(*args)
except Exception as e:
if self._is_retriable(e) and i < max_retries:
total_delay = self._calculate_backoff_delay(i, initial_delay_millis,
retry_multiplier,
randomness_factor)
print('Request failed: %s. Will retry in %2.1f seconds' %
(e, total_delay))
time.sleep(total_delay)
continue
raise
return retriable_func
## Authentication
def authenticate(self):
"""authenticate the user with the Kaggle API. This method will generate
a configuration, first checking the environment for credential
variables, and falling back to looking for the .kaggle/kaggle.json
configuration file.
"""
config_data = {}
# Ex: 'datasets list', 'competitions files', 'models instances get', etc.
api_command = ' '.join(sys.argv[1:])
# Step 1: try getting username/password from environment
config_data = self.read_config_environment(config_data)
# Step 2: if credentials were not in env read in configuration file
if self.CONFIG_NAME_USER not in config_data \
or self.CONFIG_NAME_KEY not in config_data:
if os.path.exists(self.config):
config_data = self.read_config_file(config_data)
elif self._is_help_or_version_command(api_command) or (len(
sys.argv) > 2 and api_command.startswith(
self.command_prefixes_allowing_anonymous_access)):
# Some API commands should be allowed without authentication.
return
else:
raise IOError('Could not find {}. Make sure it\'s located in'
' {}. Or use the environment method. See setup'
' instructions at'
' https://github.com/Kaggle/kaggle-api/'.format(
self.config_file, self.config_dir))
# Step 3: load into configuration!
self._load_config(config_data)
def _is_help_or_version_command(self, api_command):
"""determines if the string command passed in is for a help or version
command.
Parameters
==========
api_command: a string, 'datasets list', 'competitions files',
'models instances get', etc.
"""
return api_command.endswith(('-h', '--help', '-v', '--version'))
def read_config_environment(self, config_data=None, quiet=False):
"""read_config_environment is the second effort to get a username
and key to authenticate to the Kaggle API. The environment keys
are equivalent to the kaggle.json file, but with "KAGGLE_" prefix
to define a unique namespace.
Parameters
==========
config_data: a partially loaded configuration dictionary (optional)
quiet: suppress verbose print of output (default is False)
"""
# Add all variables that start with KAGGLE_ to config data
if config_data is None:
config_data = {}
for key, val in os.environ.items():
if key.startswith('KAGGLE_'):
config_key = key.replace('KAGGLE_', '', 1).lower()
config_data[config_key] = val
return config_data
## Configuration
def _load_config(self, config_data):
"""the final step of the authenticate steps, where we load the values
from config_data into the Configuration object.
Parameters
==========
config_data: a dictionary with configuration values (keys) to read
into self.config_values
"""
# Username and password are required.
for item in [self.CONFIG_NAME_USER, self.CONFIG_NAME_KEY]:
if item not in config_data:
raise ValueError('Error: Missing %s in configuration.' % item)
configuration = Configuration()
# Add to the final configuration (required)
configuration.username = config_data[self.CONFIG_NAME_USER]
configuration.password = config_data[self.CONFIG_NAME_KEY]
# Proxy
if self.CONFIG_NAME_PROXY in config_data:
configuration.proxy = config_data[self.CONFIG_NAME_PROXY]
# Cert File
if self.CONFIG_NAME_SSL_CA_CERT in config_data:
configuration.ssl_ca_cert = config_data[self.CONFIG_NAME_SSL_CA_CERT]
# Keep config values with class instance, and load api client!
self.config_values = config_data
def read_config_file(self, config_data=None, quiet=False):
"""read_config_file is the first effort to get a username
and key to authenticate to the Kaggle API. Since we can get the
username and password from the environment, it's not required.
Parameters
==========
config_data: the Configuration object to save a username and
password, if defined
quiet: suppress verbose print of output (default is False)
"""
if config_data is None:
config_data = {}
if os.path.exists(self.config):
try:
if os.name != 'nt':
permissions = os.stat(self.config).st_mode
if (permissions & 4) or (permissions & 32):
print('Warning: Your Kaggle API key is readable by other '
'users on this system! To fix this, you can run ' +
'\'chmod 600 {}\''.format(self.config))
with open(self.config) as f:
config_data = json.load(f)
except:
pass
else:
# Warn the user that configuration will be reliant on environment
if not quiet:
print('No Kaggle API config file found, will use environment.')
return config_data
def _read_config_file(self):
"""read in the configuration file, a json file defined at self.config"""
try:
with open(self.config, 'r') as f:
config_data = json.load(f)
except FileNotFoundError:
config_data = {}
return config_data
def _write_config_file(self, config_data, indent=2):
"""write config data to file.
Parameters
==========
config_data: the Configuration object to save a username and
password, if defined
indent: number of tab indentations to use when writing json
"""
with open(self.config, 'w') as f:
json.dump(config_data, f, indent=indent)
def set_config_value(self, name, value, quiet=False):
"""a client helper function to set a configuration value, meaning
reading in the configuration file (if it exists), saving a new
config value, and then writing back
Parameters
==========
name: the name of the value to set (key in dictionary)
value: the value to set at the key
quiet: disable verbose output if True (default is False)
"""
config_data = self._read_config_file()
if value is not None:
# Update the config file with the value
config_data[name] = value
# Update the instance with the value
self.config_values[name] = value
# If defined by client, set and save!
self._write_config_file(config_data)
if not quiet:
self.print_config_value(name, separator=' is now set to: ')
def unset_config_value(self, name, quiet=False):
"""unset a configuration value
Parameters
==========
name: the name of the value to unset (remove key in dictionary)
quiet: disable verbose output if True (default is False)
"""
config_data = self._read_config_file()
if name in config_data:
del config_data[name]
self._write_config_file(config_data)
if not quiet:
self.print_config_value(name, separator=' is now set to: ')
def get_config_value(self, name):
""" return a config value (with key name) if it's in the config_values,
otherwise return None
Parameters
==========
name: the config value key to get
"""
if name in self.config_values:
return self.config_values[name]
def get_default_download_dir(self, *subdirs):
""" Get the download path for a file. If not defined, return default
from config.
Parameters
==========
subdirs: a single (or list of) subfolders under the basepath
"""
# Look up value for key "path" in the config
path = self.get_config_value(self.CONFIG_NAME_PATH)
# If not set in config, default to present working directory
if path is None:
return os.getcwd()
return os.path.join(path, *subdirs)
def print_config_value(self, name, prefix='- ', separator=': '):
"""print a single configuration value, based on a prefix and separator
Parameters
==========
name: the key of the config valur in self.config_values to print
prefix: the prefix to print
separator: the separator to use (default is : )
"""
value_out = 'None'
if name in self.config_values and self.config_values[name] is not None:
value_out = self.config_values[name]
print(prefix + name + separator + value_out)
def print_config_values(self, prefix='- '):
"""a wrapper to print_config_value to print all configuration values
Parameters
==========
prefix: the character prefix to put before the printed config value
defaults to "- "
"""
print('Configuration values from ' + self.config_dir)
self.print_config_value(self.CONFIG_NAME_USER, prefix=prefix)
self.print_config_value(self.CONFIG_NAME_PATH, prefix=prefix)
self.print_config_value(self.CONFIG_NAME_PROXY, prefix=prefix)
self.print_config_value(self.CONFIG_NAME_COMPETITION, prefix=prefix)
def build_kaggle_client(self):
env = KaggleEnv.STAGING if '--staging' in self.args \
else KaggleEnv.ADMIN if '--admin' in self.args \
else KaggleEnv.LOCAL if '--local' in self.args \
else KaggleEnv.PROD
verbose = '--verbose' in self.args or '-v' in self.args
# config = self.api_client.configuration
return KaggleClient(
env=env,
verbose=verbose,
username=self.config_values['username'],
password=self.config_values['key'])
def camel_to_snake(self, name):
"""
:param name: field in camel case
:return: field in snake case
"""
name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', name).lower()
def lookup_enum(self, enum_class, item_name):
item = self.camel_to_snake(item_name).upper()
try:
return enum_class[item]
except KeyError:
prefix = self.camel_to_snake(enum_class.__name__).upper()
return enum_class[f'{prefix}_{self.camel_to_snake(item_name).upper()}']
def short_enum_name(self, value):
full_name = str(value)
names = full_name.split('.')
prefix_len = len(self.camel_to_snake(names[0])) + 1 # underscore
return names[1][prefix_len:].lower()
## Competitions
def competitions_list(self,
group=None,
category=None,
sort_by=None,
page=1,
search=None):
""" Make a call to list competitions, format the response, and return
a list of ApiCompetition instances
Parameters
==========
page: the page to return (default is 1)
search: a search term to use (default is empty string)
sort_by: how to sort the result, see valid_competition_sort_by for options
category: category to filter result to; use 'all' to get closed competitions
group: group to filter result to
"""
if group:
if group not in self.valid_competition_groups:
raise ValueError('Invalid group specified. Valid options are ' +
str(self.valid_competition_groups))
if group == 'all':
group = CompetitionListTab.COMPETITION_LIST_TAB_EVERYTHING
else:
group = self.lookup_enum(CompetitionListTab, group)
if category:
if category not in self.valid_competition_categories:
raise ValueError('Invalid category specified. Valid options are ' +
str(self.valid_competition_categories))
category = self.lookup_enum(HostSegment, category)
if sort_by:
if sort_by not in self.valid_competition_sort_by:
raise ValueError('Invalid sort_by specified. Valid options are ' +
str(self.valid_competition_sort_by))
sort_by = self.lookup_enum(CompetitionSortBy, sort_by)
with self.build_kaggle_client() as kaggle:
request = ApiListCompetitionsRequest()
request.group = group
request.page = page
request.category = category
request.search = search
request.sort_by = sort_by
response = kaggle.competitions.competition_api_client.list_competitions(
request)
return response.competitions
def competitions_list_cli(self,
group=None,
category=None,
sort_by=None,
page=1,
search=None,
csv_display=False):
""" A wrapper for competitions_list for the client.
Parameters
==========
group: group to filter result to
category: category to filter result to
sort_by: how to sort the result, see valid_sort_by for options
page: the page to return (default is 1)
search: a search term to use (default is empty string)
csv_display: if True, print comma separated values
"""
competitions = self.competitions_list(
group=group,
category=category,
sort_by=sort_by,
page=page,
search=search)
if competitions:
if csv_display:
self.print_csv(competitions, self.competition_fields)
else:
self.print_table(competitions, self.competition_fields)
else:
print('No competitions found')
def competition_submit_code(self, file_name, message, competition, kernel_slug=None, kernel_version=None, quiet=False):
""" Submit a competition.
Parameters
==========
file_name: the name of the output file created by the kernel
message: the submission description
competition: the competition name; if not given use the 'competition' config value
kernel_slug: the <owner>/<notebook> of the notebook to use for a code competition
kernel_version: the version number, returned by 'kaggle kernels push ...'
quiet: suppress verbose output (default is False)
"""
if competition is None:
competition = self.get_config_value(self.CONFIG_NAME_COMPETITION)
if competition is not None and not quiet:
print('Using competition: ' + competition)
if competition is None:
raise ValueError('No competition specified')
else:
if kernel_version is None:
raise ValueError('Kernel version must be specified')
with self.build_kaggle_client() as kaggle:
submit_request = ApiCreateCodeSubmissionRequest()
submit_request.file_name = file_name
submit_request.competition_name = competition
submit_request.kernel_slug = kernel_slug
submit_request.kernel_version = kernel_version
submit_request.submission_description = message
submit_response = kaggle.competitions.competition_api_client.create_code_submission(
submit_request)
return submit_response
def competition_submit(self, file_name, message, competition, quiet=False):
""" Submit a competition.
Parameters
==========
file_name: the competition metadata file
message: the submission description
competition: the competition name; if not given use the 'competition' config value
quiet: suppress verbose output (default is False)
"""
if competition is None:
competition = self.get_config_value(self.CONFIG_NAME_COMPETITION)
if competition is not None and not quiet:
print('Using competition: ' + competition)
if competition is None:
raise ValueError('No competition specified')
else:
with self.build_kaggle_client() as kaggle:
request = ApiStartSubmissionUploadRequest()
request.competition_name = competition
request.file_name = os.path.basename(file_name)
request.content_length = os.path.getsize(file_name)
request.last_modified_epoch_seconds = int(os.path.getmtime(file_name))
response = kaggle.competitions.competition_api_client.start_submission_upload(
request)
upload_status = self.upload_complete(file_name, response.create_url,
quiet)
if upload_status != ResumableUploadResult.COMPLETE:
# Actual error is printed during upload_complete. Not
# ideal but changing would not be backwards compatible
return "Could not submit to competition"
submit_request = ApiCreateSubmissionRequest()
submit_request.competition_name = competition
submit_request.blob_file_tokens = response.token
submit_request.submission_description = message
submit_response = kaggle.competitions.competition_api_client.create_submission(
submit_request)
return submit_response
def competition_submit_cli(self,
file_name,
message,
competition,
kernel=None,
version=None,
competition_opt=None,
quiet=False):
""" Submit a competition using the client. Arguments are same as for
competition_submit, except for extra arguments provided here.
Parameters
==========
file_name: the competition metadata file
message: the submission description
competition: the competition name; if not given use the 'competition' config value
kernel: the name of the kernel to submit to a code competition
version: the version of the kernel to submit to a code competition, e.g. '1'
quiet: suppress verbose output (default is False)
competition_opt: an alternative competition option provided by cli
"""
if kernel and not version or version and not kernel:
raise ValueError('Code competition submissions require both the output file name and the version label')
competition = competition or competition_opt
try:
if kernel:
submit_result = self.competition_submit_code(file_name, message, competition,
kernel, version, quiet)
else:
submit_result = self.competition_submit(file_name, message, competition,
quiet)
except RequestException as e:
if e.response and e.response.status_code == 404:
print('Could not find competition - please verify that you '
'entered the correct competition ID and that the '
'competition is still accepting submissions.')
return None
else:
raise e
return submit_result.message
def competition_submissions(self,
competition,
group=None,
sort=None,
page_token=0,
page_size=20):
""" Get the list of Submission for a particular competition.
Parameters
==========
competition: the name of the competition
group: the submission group
sort: the sort-by option
page_token: token for pagination
page_size: the number of items per page
"""
with self.build_kaggle_client() as kaggle:
request = ApiListSubmissionsRequest()
request.competition_name = competition
request.page = page_token
request.group = group
request.sort_by = sort
response = kaggle.competitions.competition_api_client.list_submissions(
request)
return response.submissions
def competition_submissions_cli(self,
competition=None,
competition_opt=None,
csv_display=False,
page_token=None,
page_size=20,
quiet=False):
""" A wrapper to competition_submission, will return either json or csv
to the user. Additional parameters are listed below, see
competition_submissions for rest.
Parameters
==========
competition: the name of the competition. If None, look to config
competition_opt: an alternative competition option provided by cli
csv_display: if True, print comma separated values
page_token: token for pagination
page_size: the number of items per page
quiet: suppress verbose output (default is False)
"""
competition = competition or competition_opt
if competition is None:
competition = self.get_config_value(self.CONFIG_NAME_COMPETITION)
if competition is not None and not quiet:
print('Using competition: ' + competition)
if competition is None:
raise ValueError('No competition specified')
else:
submissions = self.competition_submissions(
competition, page_token=page_token, page_size=page_size)
if submissions:
if csv_display:
self.print_csv(submissions, self.submission_fields)
else:
self.print_table(submissions, self.submission_fields)
else:
print('No submissions found')
def competition_list_files(self, competition, page_token=None, page_size=20):
""" List files for a competition.
Parameters
==========
competition: the name of the competition
page_token: the page token for pagination
page_size: the number of items per page
"""
with self.build_kaggle_client() as kaggle:
request = ApiListDataFilesRequest()
request.competition_name = competition
request.page_token = page_token
request.page_size = page_size
response = kaggle.competitions.competition_api_client.list_data_files(
request)
return response
def competition_list_files_cli(self,
competition,
competition_opt=None,
csv_display=False,
page_token=None,
page_size=20,
quiet=False):
""" List files for a competition, if it exists.
Parameters
==========
competition: the name of the competition. If None, look to config
competition_opt: an alternative competition option provided by cli
csv_display: if True, print comma separated values
page_token: the page token for pagination
page_size: the number of items per page
quiet: suppress verbose output (default is False)
"""
competition = competition or competition_opt
if competition is None:
competition = self.get_config_value(self.CONFIG_NAME_COMPETITION)
if competition is not None and not quiet:
print('Using competition: ' + competition)
if competition is None:
raise ValueError('No competition specified')
else:
result = self.competition_list_files(competition, page_token, page_size)
next_page_token = result.next_page_token
if next_page_token:
print('Next Page Token = {}'.format(next_page_token))
if result:
if csv_display:
self.print_csv(result.files, self.competition_file_fields,
self.competition_file_labels)
else:
self.print_table(result.files, self.competition_file_fields,
self.competition_file_labels)
else:
print('No files found')
def competition_download_file(self,
competition,
file_name,
path=None,
force=False,
quiet=False):
""" Download a competition file to a designated location, or use
a default location.
Parameters
=========
competition: the name of the competition
file_name: the configuration file name
path: a path to download the file to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
"""
if path is None:
effective_path = self.get_default_download_dir('competitions',
competition)
else:
effective_path = path
with self.build_kaggle_client() as kaggle:
request = ApiDownloadDataFileRequest()
request.competition_name = competition
request.file_name = file_name
response = kaggle.competitions.competition_api_client.download_data_file(
request)
url = response.history[0].url
outfile = os.path.join(effective_path, url.split('?')[0].split('/')[-1])
if force or self.download_needed(response, outfile, quiet):
self.download_file(response, outfile, kaggle.http_client(), quiet,
not force)
def competition_download_files(self,
competition,
path=None,
force=False,
quiet=True):
""" Download all competition files.
Parameters
=========
competition: the name of the competition
path: a path to download the file to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
"""
if path is None:
effective_path = self.get_default_download_dir('competitions',
competition)
else:
effective_path = path
with self.build_kaggle_client() as kaggle:
request = ApiDownloadDataFilesRequest()
request.competition_name = competition
response = kaggle.competitions.competition_api_client.download_data_files(
request)
url = response.url.split('?')[0]
outfile = os.path.join(effective_path,
competition + '.' + url.split('.')[-1])
if force or self.download_needed(response, outfile, quiet):
self.download_file(response, outfile, quiet, not force)
def competition_download_cli(self,
competition,
competition_opt=None,
file_name=None,
path=None,
force=False,
quiet=False):
""" A wrapper to competition_download_files, but first will parse input
from API client. Additional parameters are listed here, see
competition_download for remaining.
Parameters
=========
competition: the name of the competition
competition_opt: an alternative competition option provided by cli
file_name: the configuration file name
path: a path to download the file to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
"""
competition = competition or competition_opt
if competition is None:
competition = self.get_config_value(self.CONFIG_NAME_COMPETITION)
if competition is not None and not quiet:
print('Using competition: ' + competition)
if competition is None:
raise ValueError('No competition specified')
else:
if file_name is None:
self.competition_download_files(competition, path, force, quiet)
else:
self.competition_download_file(competition, file_name, path, force,
quiet)
def competition_leaderboard_download(self, competition, path, quiet=True):
""" Download a competition leaderboard.
Parameters
=========
competition: the name of the competition
path: a path to download the file to
quiet: suppress verbose output (default is True)
"""
with self.build_kaggle_client() as kaggle:
request = ApiDownloadLeaderboardRequest()
request.competition_name = competition
response = kaggle.competitions.competition_api_client.download_leaderboard(
request)
if path is None:
effective_path = self.get_default_download_dir('competitions',
competition)
else:
effective_path = path
file_name = competition + '.zip'
outfile = os.path.join(effective_path, file_name)
self.download_file(response, outfile, quiet)
def competition_leaderboard_view(self, competition):
""" View a leaderboard based on a competition name.
Parameters
==========
competition: the competition name to view leadboard for
"""
with self.build_kaggle_client() as kaggle:
request = ApiGetLeaderboardRequest()
request.competition_name = competition
response = kaggle.competitions.competition_api_client.get_leaderboard(
request)
return response.submissions
def competition_leaderboard_cli(self,
competition,
competition_opt=None,
path=None,
view=False,
download=False,
csv_display=False,
quiet=False):
""" A wrapper for competition_leaderbord_view that will print the
results as a table or comma separated values
Parameters
==========
competition: the competition name to view leadboard for
competition_opt: an alternative competition option provided by cli
path: a path to download to, if download is True
view: if True, show the results in the terminal as csv or table
download: if True, download the entire leaderboard
csv_display: if True, print comma separated values instead of table
quiet: suppress verbose output (default is False)
"""
competition = competition or competition_opt
if not view and not download:
raise ValueError('Either --show or --download must be specified')
if competition is None:
competition = self.get_config_value(self.CONFIG_NAME_COMPETITION)
if competition is not None and not quiet:
print('Using competition: ' + competition)
if competition is None:
raise ValueError('No competition specified')
if download:
self.competition_leaderboard_download(competition, path, quiet)
if view:
results = self.competition_leaderboard_view(competition)
if results:
if csv_display:
self.print_csv(results, self.competition_leaderboard_fields)
else:
self.print_table(results, self.competition_leaderboard_fields)
else:
print('No results found')
def dataset_list(self,
sort_by=None,
size=None,
file_type=None,
license_name=None,
tag_ids=None,
search=None,
user=None,
mine=False,
page=1,
max_size=None,
min_size=None):
""" Return a list of datasets.
Parameters
==========
sort_by: how to sort the result, see valid_dataset_sort_bys for options
size: Deprecated
file_type: the format, see valid_dataset_file_types for string options
license_name: string descriptor for license, see valid_dataset_license_names
tag_ids: tag identifiers to filter the search
search: a search term to use (default is empty string)
user: username to filter the search to
mine: boolean if True, group is changed to "my" to return personal
page: the page to return (default is 1)
max_size: the maximum size of the dataset to return (bytes)
min_size: the minimum size of the dataset to return (bytes)
"""
if sort_by:
if sort_by not in self.valid_dataset_sort_bys:
raise ValueError('Invalid sort by specified. Valid options are ' +
str(self.valid_dataset_sort_bys))
else:
sort_by = self.lookup_enum(DatasetSortBy, sort_by)
if size:
raise ValueError(
'The --size parameter has been deprecated. ' +
'Please use --max-size and --min-size to filter dataset sizes.')
if file_type:
if file_type not in self.valid_dataset_file_types:
raise ValueError('Invalid file type specified. Valid options are ' +
str(self.valid_dataset_file_types))
else:
file_type = self.lookup_enum(DatasetFileTypeGroup, file_type)
if license_name:
if license_name not in self.valid_dataset_license_names:
raise ValueError('Invalid license specified. Valid options are ' +
str(self.valid_dataset_license_names))
else:
license_name = self.lookup_enum(DatasetLicenseGroup, license_name)
if int(page) <= 0:
raise ValueError('Page number must be >= 1')
if max_size and min_size:
if int(max_size) < int(min_size):
raise ValueError('Max Size must be max_size >= min_size')
if max_size and int(max_size) <= 0:
raise ValueError('Max Size must be > 0')
elif min_size and int(min_size) < 0:
raise ValueError('Min Size must be >= 0')
group = DatasetSelectionGroup.DATASET_SELECTION_GROUP_PUBLIC
if mine:
group = DatasetSelectionGroup.DATASET_SELECTION_GROUP_MY
if user:
raise ValueError('Cannot specify both mine and a user')
if user:
group = DatasetSelectionGroup.DATASET_SELECTION_GROUP_USER
with self.build_kaggle_client() as kaggle:
request = ApiListDatasetsRequest()
request.group = group
request.sort_by = sort_by
request.file_type = file_type
request.license = license_name
request.tag_ids = tag_ids
request.search = search
request.user = user
request.page = page
request.max_size = max_size
request.min_size = min_size
response = kaggle.datasets.dataset_api_client.list_datasets(request)
return response.datasets
def dataset_list_cli(self,
sort_by=None,
size=None,
file_type=None,
license_name=None,
tag_ids=None,
search=None,
user=None,
mine=False,
page=1,
csv_display=False,
max_size=None,
min_size=None):
""" A wrapper to dataset_list for the client. Additional parameters
are described here, see dataset_list for others.
Parameters
==========
sort_by: how to sort the result, see valid_dataset_sort_bys for options
size: DEPRECATED
file_type: the format, see valid_dataset_file_types for string options
license_name: string descriptor for license, see valid_dataset_license_names
tag_ids: tag identifiers to filter the search
search: a search term to use (default is empty string)
user: username to filter the search to
mine: boolean if True, group is changed to "my" to return personal
page: the page to return (default is 1)
csv_display: if True, print comma separated values instead of table
max_size: the maximum size of the dataset to return (bytes)
min_size: the minimum size of the dataset to return (bytes)
"""
datasets = self.dataset_list(sort_by, size, file_type, license_name,
tag_ids, search, user, mine, page, max_size,
min_size)
if datasets:
if csv_display:
self.print_csv(datasets, self.dataset_fields, self.dataset_labels)
else:
self.print_table(datasets, self.dataset_fields, self.dataset_labels)
else:
print('No datasets found')
def dataset_metadata_prep(self, dataset, path):
if dataset is None:
raise ValueError('A dataset must be specified')
if '/' in dataset:
self.validate_dataset_string(dataset)
dataset_urls = dataset.split('/')
owner_slug = dataset_urls[0]
dataset_slug = dataset_urls[1]
else:
owner_slug = self.get_config_value(self.CONFIG_NAME_USER)
dataset_slug = dataset
if path is None:
effective_path = self.get_default_download_dir('datasets', owner_slug,
dataset_slug)
else:
effective_path = path
return (owner_slug, dataset_slug, effective_path)
def dataset_metadata_update(self, dataset, path):
(owner_slug, dataset_slug,
effective_path) = self.dataset_metadata_prep(dataset, path)
meta_file = self.get_dataset_metadata_file(effective_path)
with open(meta_file, 'r') as f:
s = json.load(f)
metadata = json.loads(s)
update_settings = DatasetSettings()
update_settings.title = metadata.get('title') or ''
update_settings.subtitle = metadata.get('subtitle') or ''
update_settings.description = metadata.get('description') or ''
update_settings.is_private = metadata.get('isPrivate') or False
update_settings.licenses = [
self._new_license(l['name']) for l in metadata['licenses']
] if metadata.get('licenses') else []
update_settings.keywords = metadata.get('keywords')
update_settings.collaborators = [
self._new_collaborator(c['username'], c['role'])
for c in metadata['collaborators']
] if metadata.get('collaborators') else []
update_settings.data = metadata.get('data')
request = ApiUpdateDatasetMetadataRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
request.settings = update_settings
with self.build_kaggle_client() as kaggle:
response = kaggle.datasets.dataset_api_client.update_dataset_metadata(
request)
if len(response.errors) > 0:
[print(e['message']) for e in response.errors]
exit(1)
@staticmethod
def _new_license(name):
l = SettingsLicense()
l.name = name
return l
@staticmethod
def _new_collaborator(name, role):
u = DatasetCollaborator()
u.username = name
u.role = role
return u
def dataset_metadata(self, dataset, path):
(owner_slug, dataset_slug,
effective_path) = self.dataset_metadata_prep(dataset, path)
if not os.path.exists(effective_path):
os.makedirs(effective_path)
with self.build_kaggle_client() as kaggle:
request = ApiGetDatasetMetadataRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
response = kaggle.datasets.dataset_api_client.get_dataset_metadata(
request)
if response.error_message:
raise Exception(response.error_message)
meta_file = os.path.join(effective_path, self.DATASET_METADATA_FILE)
with open(meta_file, 'w') as f:
json.dump(
response.to_json(response.info),
f,
indent=2,
default=lambda o: o.__dict__)
return meta_file
def dataset_metadata_cli(self, dataset, path, update, dataset_opt=None):
dataset = dataset or dataset_opt
if (update):
print('updating dataset metadata')
self.dataset_metadata_update(dataset, path)
print('successfully updated dataset metadata')
else:
meta_file = self.dataset_metadata(dataset, path)
print('Downloaded metadata to ' + meta_file)
def dataset_list_files(self, dataset, page_token=None, page_size=20):
""" List files for a dataset.
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
page_token: the page token for pagination
page_size: the number of items per page
"""
if dataset is None:
raise ValueError('A dataset must be specified')
owner_slug, dataset_slug, dataset_version_number = self.split_dataset_string(
dataset)
with self.build_kaggle_client() as kaggle:
request = ApiListDatasetFilesRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
request.dataset_version_number = dataset_version_number
request.page_token = page_token
request.page_size = page_size
response = kaggle.datasets.dataset_api_client.list_dataset_files(request)
return response
def dataset_list_files_cli(self,
dataset,
dataset_opt=None,
csv_display=False,
page_token=None,
page_size=20):
""" A wrapper to dataset_list_files for the client
(list files for a dataset).
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
dataset_opt: an alternative option to providing a dataset
csv_display: if True, print comma separated values instead of table
page_token: the page token for pagination
page_size: the number of items per page
"""
dataset = dataset or dataset_opt
result = self.dataset_list_files(dataset, page_token, page_size)
if result:
if result.error_message:
print(result.error_message)
else:
next_page_token = result.next_page_token
if next_page_token:
print('Next Page Token = {}'.format(next_page_token))
fields = ['name', 'size', 'creationDate']
ApiDatasetFile.size = ApiDatasetFile.total_bytes
if csv_display:
self.print_csv(result.files, fields)
else:
self.print_table(result.files, fields)
else:
print('No files found')
def dataset_status(self, dataset):
""" Call to get the status of a dataset from the API.
Parameters
==========
dataset: the string identifier of the dataset
should be in format [owner]/[dataset-name]
"""
if dataset is None:
raise ValueError('A dataset must be specified')
if '/' in dataset:
self.validate_dataset_string(dataset)
dataset_urls = dataset.split('/')
owner_slug = dataset_urls[0]
dataset_slug = dataset_urls[1]
else:
owner_slug = self.get_config_value(self.CONFIG_NAME_USER)
dataset_slug = dataset
with self.build_kaggle_client() as kaggle:
request = ApiGetDatasetStatusRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
response = kaggle.datasets.dataset_api_client.get_dataset_status(request)
return response.status.name.lower()
def dataset_status_cli(self, dataset, dataset_opt=None):
""" A wrapper for client for dataset_status, with additional
dataset_opt to get the status of a dataset from the API.
Parameters
==========
dataset_opt: an alternative to dataset
"""
dataset = dataset or dataset_opt
return self.dataset_status(dataset)
def dataset_download_file(self,
dataset,
file_name,
path=None,
force=False,
quiet=True,
licenses=[]):
""" Download a single file for a dataset.
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
file_name: the dataset configuration file
path: if defined, download to this location
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
licenses: a list of license names, e.g. ['CC0-1.0']
"""
if '/' in dataset:
self.validate_dataset_string(dataset)
owner_slug, dataset_slug, dataset_version_number = self.split_dataset_string(
dataset)
else:
owner_slug = self.get_config_value(self.CONFIG_NAME_USER)
dataset_slug = dataset
dataset_version_number = None
if path is None:
effective_path = self.get_default_download_dir('datasets', owner_slug,
dataset_slug)
else:
effective_path = path
self._print_dataset_url_and_license(owner_slug, dataset_slug,
dataset_version_number, licenses)
with self.build_kaggle_client() as kaggle:
request = ApiDownloadDatasetRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
request.dataset_version_number = dataset_version_number
request.file_name = file_name
response = kaggle.datasets.dataset_api_client.download_dataset(request)
url = response.history[0].url
outfile = os.path.join(effective_path, url.split('?')[0].split('/')[-1])
if force or self.download_needed(response, outfile, quiet):
self.download_file(response, outfile, quiet, not force)
return True
else:
return False
def dataset_download_files(self,
dataset,
path=None,
force=False,
quiet=True,
unzip=False,
licenses=[]):
""" Download all files for a dataset.
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
path: the path to download the dataset to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
unzip: if True, unzip files upon download (default is False)
licenses: a list of license names, e.g. ['CC0-1.0']
"""
if dataset is None:
raise ValueError('A dataset must be specified')
owner_slug, dataset_slug, dataset_version_number = self.split_dataset_string(
dataset)
if path is None:
effective_path = self.get_default_download_dir('datasets', owner_slug,
dataset_slug)
else:
effective_path = path
self._print_dataset_url_and_license(owner_slug, dataset_slug,
dataset_version_number, licenses)
with self.build_kaggle_client() as kaggle:
request = ApiDownloadDatasetRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
request.dataset_version_number = dataset_version_number
response = kaggle.datasets.dataset_api_client.download_dataset(request)
outfile = os.path.join(effective_path, dataset_slug + '.zip')
if force or self.download_needed(response, outfile, quiet):
self.download_file(response, outfile, quiet, not force)
downloaded = True
else:
downloaded = False
if downloaded:
outfile = os.path.join(effective_path, dataset_slug + '.zip')
if unzip:
try:
with zipfile.ZipFile(outfile) as z:
z.extractall(effective_path)
except zipfile.BadZipFile as e:
raise ValueError(
f"The file {outfile} is corrupted or not a valid zip file. "
"Please report this issue at https://www.github.com/kaggle/kaggle-api"
)
except FileNotFoundError:
raise FileNotFoundError(
f"The file {outfile} was not found. "
"Please report this issue at https://www.github.com/kaggle/kaggle-api"
)
except Exception as e:
raise RuntimeError(
f"An unexpected error occurred: {e}. "
"Please report this issue at https://www.github.com/kaggle/kaggle-api"
)
try:
os.remove(outfile)
except OSError as e:
print('Could not delete zip file, got %s' % e)
def _print_dataset_url_and_license(self, owner_slug, dataset_slug,
dataset_version_number, licenses):
if dataset_version_number is None:
print('Dataset URL: https://www.kaggle.com/datasets/%s/%s' %
(owner_slug, dataset_slug))
else:
print('Dataset URL: https://www.kaggle.com/datasets/%s/%s/versions/%s' %
(owner_slug, dataset_slug, dataset_version_number))
if len(licenses) > 0:
print('License(s): %s' % (','.join(licenses)))
def dataset_download_cli(self,
dataset,
dataset_opt=None,
file_name=None,
path=None,
unzip=False,
force=False,
quiet=False):
""" Client wrapper for dataset_download_files and download dataset file,
either for a specific file (when file_name is provided),
or all files for a dataset (plural).
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
dataset_opt: an alternative option to providing a dataset
file_name: the dataset configuration file
path: the path to download the dataset to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
unzip: if True, unzip files upon download (default is False)
"""
dataset = dataset or dataset_opt
owner_slug, dataset_slug, _ = self.split_dataset_string(dataset)
metadata = self.process_response(
self.metadata_get_with_http_info(owner_slug, dataset_slug))
if 'info' in metadata and 'licenses' in metadata['info']:
# license_objs format is like: [{ 'name': 'CC0-1.0' }]
license_objs = metadata['info']['licenses']
licenses = [
license_obj['name']
for license_obj in license_objs
if 'name' in license_obj
]
else:
licenses = [
'Error retrieving license. Please visit the Dataset URL to view license information.'
]
if file_name is None:
self.dataset_download_files(
dataset,
path=path,
unzip=unzip,
force=force,
quiet=quiet,
licenses=licenses)
else:
self.dataset_download_file(
dataset,
file_name,
path=path,
force=force,
quiet=quiet,
licenses=licenses)
def _upload_blob(self, path, quiet, blob_type, upload_context):
""" Upload a file.
Parameters
==========
path: the complete path to upload
quiet: suppress verbose output (default is False)
blob_type (ApiBlobType): To which entity the file/blob refers
upload_context (ResumableUploadContext): Context for resumable uploads
"""
file_name = os.path.basename(path)
content_length = os.path.getsize(path)
last_modified_epoch_seconds = int(os.path.getmtime(path))
start_blob_upload_request = ApiStartBlobUploadRequest()
start_blob_upload_request.type = blob_type
start_blob_upload_request.name = file_name
start_blob_upload_request.content_length = content_length
start_blob_upload_request.last_modified_epoch_seconds = last_modified_epoch_seconds
file_upload = upload_context.new_resumable_file_upload(
path, start_blob_upload_request)
for i in range(0, self.MAX_UPLOAD_RESUME_ATTEMPTS):
if file_upload.upload_complete:
return file_upload
if not file_upload.can_resume:
# Initiate upload on Kaggle backend to get the url and token.
with self.build_kaggle_client() as kaggle:
method = kaggle.blobs.blob_api_client.start_blob_upload
start_blob_upload_response = self.with_retry(method)(
file_upload.start_blob_upload_request)
file_upload.upload_initiated(start_blob_upload_response)
upload_result = self.upload_complete(
path,
file_upload.start_blob_upload_response.create_url,
quiet,
resume=file_upload.can_resume)
if upload_result == ResumableUploadResult.INCOMPLETE:
continue # Continue (i.e., retry/resume) only if the upload is incomplete.
if upload_result == ResumableUploadResult.COMPLETE:
file_upload.upload_completed()
break
return file_upload.get_token()
def dataset_create_version(self,
folder,
version_notes,
quiet=False,
convert_to_csv=True,
delete_old_versions=False,
dir_mode='skip'):
""" Create a version of a dataset.
Parameters
==========
folder: the folder with the dataset configuration / data files
version_notes: notes to add for the version
quiet: suppress verbose output (default is False)
convert_to_csv: on upload, if data should be converted to csv
delete_old_versions: if True, do that (default False)
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = self.get_dataset_metadata_file(folder)
# read json
with open(meta_file) as f:
meta_data = json.load(f)
ref = self.get_or_default(meta_data, 'id', None)
id_no = self.get_or_default(meta_data, 'id_no', None)
if not ref and not id_no:
raise ValueError('ID or slug must be specified in the metadata')
elif ref and ref == self.config_values[
self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE':
raise ValueError(
'Default slug detected, please change values before uploading')
subtitle = meta_data.get('subtitle')
if subtitle and (len(subtitle) < 20 or len(subtitle) > 80):
raise ValueError('Subtitle length must be between 20 and 80 characters')
resources = meta_data.get('resources')
if resources:
self.validate_resources(folder, resources)
description = meta_data.get('description')
keywords = self.get_or_default(meta_data, 'keywords', [])
body = ApiCreateDatasetVersionRequestBody()
body.version_notes = version_notes
body.subtitle = subtitle
body.description = description
body.files = []
body.category_ids = keywords
body.delete_old_versions = delete_old_versions
with self.build_kaggle_client() as kaggle:
if id_no:
request = ApiCreateDatasetVersionByIdRequest()
request.id = id_no
message = kaggle.datasets.dataset_api_client.create_dataset_version_by_id
else:
self.validate_dataset_string(ref)
ref_list = ref.split('/')
owner_slug = ref_list[0]
dataset_slug = ref_list[1]
request = ApiCreateDatasetVersionRequest()
request.owner_slug = owner_slug
request.dataset_slug = dataset_slug
message = kaggle.datasets.dataset_api_client.create_dataset_version
request.body = body
with ResumableUploadContext() as upload_context:
self.upload_files(body, resources, folder, ApiBlobType.DATASET,
upload_context, quiet, dir_mode)
request.body.files = [
self._api_dataset_new_file(file) for file in request.body.files
]
response = self.with_retry(message)(request)
return response
def _api_dataset_new_file(self, file):
# TODO Eliminate the need for this conversion
f = ApiDatasetNewFile()
f.token = file.token
return f
def dataset_create_version_cli(self,
folder,
version_notes,
quiet=False,
convert_to_csv=True,
delete_old_versions=False,
dir_mode='skip'):
""" client wrapper for creating a version of a dataset
Parameters
==========
folder: the folder with the dataset configuration / data files
version_notes: notes to add for the version
quiet: suppress verbose output (default is False)
convert_to_csv: on upload, if data should be converted to csv
delete_old_versions: if True, do that (default False)
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
"""
folder = folder or os.getcwd()
result = self.dataset_create_version(
folder,
version_notes,
quiet=quiet,
convert_to_csv=convert_to_csv,
delete_old_versions=delete_old_versions,
dir_mode=dir_mode)
if result is None:
print('Dataset version creation error: See previous output')
elif result.invalidTags:
print(('The following are not valid tags and could not be added to '
'the dataset: ') + str(result.invalidTags))
elif result.status.lower() == 'ok':
print('Dataset version is being created. Please check progress at ' +
result.url)
else:
print('Dataset version creation error: ' + result.error)
def dataset_initialize(self, folder):
""" initialize a folder with a a dataset configuration (metadata) file
Parameters
==========
folder: the folder to initialize the metadata file in
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
ref = self.config_values[self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE'
licenses = []
default_license = {'name': 'CC0-1.0'}
licenses.append(default_license)
meta_data = {'title': 'INSERT_TITLE_HERE', 'id': ref, 'licenses': licenses}
meta_file = os.path.join(folder, self.DATASET_METADATA_FILE)
with open(meta_file, 'w') as f:
json.dump(meta_data, f, indent=2)
print('Data package template written to: ' + meta_file)
return meta_file
def dataset_initialize_cli(self, folder=None):
folder = folder or os.getcwd()
self.dataset_initialize(folder)
def dataset_create_new(self,
folder,
public=False,
quiet=False,
convert_to_csv=True,
dir_mode='skip'):
""" Create a new dataset, meaning the same as creating a version but
with extra metadata like license and user/owner.
Parameters
==========
folder: the folder to get the metadata file from
public: should the dataset be public?
quiet: suppress verbose output (default is False)
convert_to_csv: if True, convert data to comma separated value
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = self.get_dataset_metadata_file(folder)
# read json
with open(meta_file) as f:
meta_data = json.load(f)
ref = self.get_or_fail(meta_data, 'id')
title = self.get_or_fail(meta_data, 'title')
licenses = self.get_or_fail(meta_data, 'licenses')
ref_list = ref.split('/')
owner_slug = ref_list[0]
dataset_slug = ref_list[1]
# validations
if ref == self.config_values[self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE':
raise ValueError(
'Default slug detected, please change values before uploading')
if title == 'INSERT_TITLE_HERE':
raise ValueError(
'Default title detected, please change values before uploading')
if len(licenses) != 1:
raise ValueError('Please specify exactly one license')
if len(dataset_slug) < 6 or len(dataset_slug) > 50:
raise ValueError('The dataset slug must be between 6 and 50 characters')
if len(title) < 6 or len(title) > 50:
raise ValueError('The dataset title must be between 6 and 50 characters')
resources = meta_data.get('resources')
if resources:
self.validate_resources(folder, resources)
license_name = self.get_or_fail(licenses[0], 'name')
description = meta_data.get('description')
keywords = self.get_or_default(meta_data, 'keywords', [])
subtitle = meta_data.get('subtitle')
if subtitle and (len(subtitle) < 20 or len(subtitle) > 80):
raise ValueError('Subtitle length must be between 20 and 80 characters')
request = ApiCreateDatasetRequest()
request.title = title
request.slug = dataset_slug
request.owner_slug = owner_slug
request.license_name = license_name
request.subtitle = subtitle
request.description = description
request.files = []
request.is_private = not public
# request.convert_to_csv=convert_to_csv
request.category_ids = keywords
with ResumableUploadContext() as upload_context:
self.upload_files(request, resources, folder, ApiBlobType.DATASET,
upload_context, quiet, dir_mode)
with self.build_kaggle_client() as kaggle:
retry_request = ApiCreateDatasetRequest()
retry_request.title = title
retry_request.slug = dataset_slug
retry_request.owner_slug = owner_slug
retry_request.license_name = license_name
retry_request.subtitle = subtitle
retry_request.description = description
retry_request.files = [
self._api_dataset_new_file(file) for file in request.files
]
retry_request.is_private = not public
retry_request.category_ids = keywords
response = self.with_retry(
kaggle.datasets.dataset_api_client.create_dataset)(
retry_request)
return response
def dataset_create_new_cli(self,
folder=None,
public=False,
quiet=False,
convert_to_csv=True,
dir_mode='skip'):
""" client wrapper for creating a new dataset
Parameters
==========
folder: the folder to get the metadata file from
public: should the dataset be public?
quiet: suppress verbose output (default is False)
convert_to_csv: if True, convert data to comma separated value
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
"""
folder = folder or os.getcwd()
result = self.dataset_create_new(folder, public, quiet, convert_to_csv,
dir_mode)
if result.invalidTags:
print('The following are not valid tags and could not be added to '
'the dataset: ' + str(result.invalidTags))
if result.status.lower() == 'ok':
if public:
print('Your public Dataset is being created. Please check '
'progress at ' + result.url)
else:
print('Your private Dataset is being created. Please check '
'progress at ' + result.url)
else:
print('Dataset creation error: ' + result.error)
def download_file(self,
response,
outfile,
http_client,
quiet=True,
resume=False,
chunk_size=1048576):
""" download a file to an output file based on a chunk size
Parameters
==========
response: the response to download
outfile: the output file to download to
http_client: the Kaggle http client to use
quiet: suppress verbose output (default is True)
chunk_size: the size of the chunk to stream
resume: whether to resume an existing download
"""
outpath = os.path.dirname(outfile)
if not os.path.exists(outpath):
os.makedirs(outpath)
size = int(response.headers['Content-Length'])
size_read = 0
open_mode = 'wb'
last_modified = response.headers.get('Last-Modified')
if last_modified is None:
remote_date = datetime.now()
else:
remote_date = datetime.strptime(response.headers['Last-Modified'],
'%a, %d %b %Y %H:%M:%S %Z')
remote_date_timestamp = time.mktime(remote_date.timetuple())
if not quiet:
print('Downloading ' + os.path.basename(outfile) + ' to ' + outpath)
file_exists = os.path.isfile(outfile)
resumable = 'Accept-Ranges' in response.headers and response.headers[
'Accept-Ranges'] == 'bytes'
if resume and resumable and file_exists:
size_read = os.path.getsize(outfile)
open_mode = 'ab'
if not quiet:
print("... resuming from %d bytes (%d bytes left) ..." % (
size_read,
size - size_read,
))
request_history = response.history[0]
response = http_client.call(
request_history.request.method,
request_history.headers['location'],
headers={'Range': 'bytes=%d-' % (size_read,)},
_preload_content=False)
with tqdm(
total=size,
initial=size_read,
unit='B',
unit_scale=True,
unit_divisor=1024,
disable=quiet) as pbar:
with open(outfile, open_mode) as out:
# TODO: Delete this test after all API methods are converted.
if type(response).__name__ == 'HTTPResponse':
while True:
data = response.read(chunk_size)
if not data:
break
out.write(data)
os.utime(
outfile,
times=(remote_date_timestamp - 1, remote_date_timestamp - 1))
size_read = min(size, size_read + chunk_size)
pbar.update(len(data))
else:
for data in response.iter_content(chunk_size):
if not data:
break
out.write(data)
os.utime(
outfile,
times=(remote_date_timestamp - 1, remote_date_timestamp - 1))
size_read = min(size, size_read + chunk_size)
pbar.update(len(data))
if not quiet:
print('\n', end='')
os.utime(outfile, times=(remote_date_timestamp, remote_date_timestamp))
def kernels_list(self,
page=1,
page_size=20,
dataset=None,
competition=None,
parent_kernel=None,
search=None,
mine=False,
user=None,
language=None,
kernel_type=None,
output_type=None,
sort_by=None):
""" List kernels based on a set of search criteria.
Parameters
==========
page: the page of results to return (default is 1)
page_size: results per page (default is 20)
dataset: if defined, filter to this dataset (default None)
competition: if defined, filter to this competition (default None)
parent_kernel: if defined, filter to those with specified parent
search: a custom search string to pass to the list query
mine: if true, group is specified as "my" to return personal kernels
user: filter results to a specific user
language: the programming language of the kernel
kernel_type: the type of kernel, one of valid_list_kernel_types (str)
output_type: the output type, one of valid_list_output_types (str)
sort_by: if defined, sort results by this string (valid_list_sort_by)
"""
if int(page) <= 0:
raise ValueError('Page number must be >= 1')
page_size = int(page_size)
if page_size <= 0:
raise ValueError('Page size must be >= 1')
if page_size > 100:
page_size = 100
if language and language not in self.valid_list_languages:
raise ValueError('Invalid language specified. Valid options are ' +
str(self.valid_list_languages))
if kernel_type and kernel_type not in self.valid_list_kernel_types:
raise ValueError('Invalid kernel type specified. Valid options are ' +
str(self.valid_list_kernel_types))
if output_type and output_type not in self.valid_list_output_types:
raise ValueError('Invalid output type specified. Valid options are ' +
str(self.valid_list_output_types))
if sort_by:
if sort_by not in self.valid_list_sort_by:
raise ValueError('Invalid sort by type specified. Valid options are ' +
str(self.valid_list_sort_by))
if sort_by == 'relevance' and search == '':
raise ValueError('Cannot sort by relevance without a search term.')
sort_by = self.lookup_enum(KernelsListSortType, sort_by)
else:
sort_by = KernelsListSortType.HOTNESS
self.validate_dataset_string(dataset)
self.validate_kernel_string(parent_kernel)
group = 'everyone'
if mine:
group = 'profile'
group = self.lookup_enum(KernelsListViewType, group)
with self.build_kaggle_client() as kaggle:
request = ApiListKernelsRequest()
request.page = page
request.page_size = page_size
request.group = group
request.user = user or ''
request.language = language or 'all'
request.kernel_type = kernel_type or 'all'
request.output_type = output_type or 'all'
request.sort_by = sort_by
request.dataset = dataset or ''
request.competition = competition or ''
request.parent_kernel = parent_kernel or ''
request.search = search or ''
return kaggle.kernels.kernels_api_client.list_kernels(request).kernels
kernels_list_result = self.process_response(
self.kernels_list_with_http_info(
page=page,
page_size=page_size,
group=group,
user=user or '',
language=language or 'all',
kernel_type=kernel_type or 'all',
output_type=output_type or 'all',
sort_by=sort_by or 'hotness',
dataset=dataset or '',
competition=competition or '',
parent_kernel=parent_kernel or '',
search=search or ''))
return [Kernel(k) for k in kernels_list_result]
def kernels_list_cli(self,
mine=False,
page=1,
page_size=20,
search=None,
csv_display=False,
parent=None,
competition=None,
dataset=None,
user=None,
language=None,
kernel_type=None,
output_type=None,
sort_by=None):
""" Client wrapper for kernels_list, see this function for arguments.
Additional arguments are provided here.
Parameters
==========
csv_display: if True, print comma separated values instead of table
"""
kernels = self.kernels_list(
page=page,
page_size=page_size,
search=search,
mine=mine,
dataset=dataset,
competition=competition,
parent_kernel=parent,
user=user,
language=language,
kernel_type=kernel_type,
output_type=output_type,
sort_by=sort_by)
fields = ['ref', 'title', 'author', 'lastRunTime', 'totalVotes']
if kernels:
if csv_display:
self.print_csv(kernels, fields)
else:
self.print_table(kernels, fields)
else:
print('Not found')
def kernels_list_files(self, kernel, page_token=None, page_size=20):
""" list files for a kernel
Parameters
==========
kernel: the string identifier of the kernel
should be in format [owner]/[kernel-name]
page_token: the page token for pagination
page_size: the number of items per page
"""
if kernel is None:
raise ValueError('A kernel must be specified')
user_name, kernel_slug, kernel_version_number = self.split_dataset_string(
kernel)
with self.build_kaggle_client() as kaggle:
request = ApiListKernelFilesRequest()
request.kernel_slug = kernel_slug
request.user_name = user_name
request.page_token = page_token
request.page_size = page_size
return kaggle.kernels.kernels_api_client.list_kernel_files(request)
def kernels_list_files_cli(self,
kernel,
kernel_opt=None,
csv_display=False,
page_token=None,
page_size=20):
""" A wrapper to kernel_list_files for the client.
(list files for a kernel)
Parameters
==========
kernel: the string identifier of the kernel
should be in format [owner]/[kernel-name]
kernel_opt: an alternative option to providing a kernel
csv_display: if True, print comma separated values instead of table
page_token: the page token for pagination
page_size: the number of items per page
"""
kernel = kernel or kernel_opt
result = self.kernels_list_files(kernel, page_token, page_size)
if result is None:
print('No files found')
return
if result.error_message:
print(result.error_message)
return
next_page_token = result.nextPageToken
if next_page_token:
print('Next Page Token = {}'.format(next_page_token))
fields = ['name', 'size', 'creationDate']
if csv_display:
self.print_csv(result.files, fields)
else:
self.print_table(result.files, fields)
def kernels_initialize(self, folder):
""" Create a new kernel in a specified folder from a template, including
json metadata that grabs values from the configuration.
Parameters
==========
folder: the path of the folder
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
resources = []
resource = {'path': 'INSERT_SCRIPT_PATH_HERE'}
resources.append(resource)
username = self.get_config_value(self.CONFIG_NAME_USER)
meta_data = {
'id':
username + '/INSERT_KERNEL_SLUG_HERE',
'title':
'INSERT_TITLE_HERE',
'code_file':
'INSERT_CODE_FILE_PATH_HERE',
'language':
'Pick one of: {' +
','.join(x for x in self.valid_push_language_types) + '}',
'kernel_type':
'Pick one of: {' +
','.join(x for x in self.valid_push_kernel_types) + '}',
'is_private':
'true',
'enable_gpu':
'false',
'enable_tpu':
'false',
'enable_internet':
'true',
'dataset_sources': [],
'competition_sources': [],
'kernel_sources': [],
'model_sources': [],
}
meta_file = os.path.join(folder, self.KERNEL_METADATA_FILE)
with open(meta_file, 'w') as f:
json.dump(meta_data, f, indent=2)
return meta_file
def kernels_initialize_cli(self, folder=None):
""" A client wrapper for kernels_initialize. It takes same arguments but
sets default folder to be None. If None, defaults to present
working directory.
Parameters
==========
folder: the path of the folder (None defaults to ${PWD})
"""
folder = folder or os.getcwd()
meta_file = self.kernels_initialize(folder)
print('Kernel metadata template written to: ' + meta_file)
def kernels_push(self, folder, timeout=None) -> ApiSaveKernelResponse:
""" Read the metadata file and kernel files from a notebook, validate
both, and use the Kernel API to push to Kaggle if all is valid.
Parameters
==========
folder: the path of the folder
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = os.path.join(folder, self.KERNEL_METADATA_FILE)
if not os.path.isfile(meta_file):
raise ValueError('Metadata file not found: ' + str(meta_file))
with open(meta_file) as f:
meta_data = json.load(f)
title = self.get_or_default(meta_data, 'title', None)
if title and len(title) < 5:
raise ValueError('Title must be at least five characters')
code_path = self.get_or_default(meta_data, 'code_file', '')
if not code_path:
raise ValueError('A source file must be specified in the metadata')
code_file = os.path.join(folder, code_path)
if not os.path.isfile(code_file):
raise ValueError('Source file not found: ' + str(code_file))
slug = meta_data.get('id')
id_no = meta_data.get('id_no')
if not slug and not id_no:
raise ValueError('ID or slug must be specified in the metadata')
if slug:
self.validate_kernel_string(slug)
if '/' in slug:
kernel_slug = slug.split('/')[1]
else:
kernel_slug = slug
if title:
as_slug = slugify(title)
if kernel_slug.lower() != as_slug:
print('Your kernel title does not resolve to the specified '
'id. This may result in surprising behavior. We '
'suggest making your title something that resolves to '
'the specified id. See %s for more information on '
'how slugs are determined.' %
'https://en.wikipedia.org/wiki/Clean_URL#Slug')
language = self.get_or_default(meta_data, 'language', '')
if language not in self.valid_push_language_types:
raise ValueError(
'A valid language must be specified in the metadata. Valid '
'options are ' + str(self.valid_push_language_types))
kernel_type = self.get_or_default(meta_data, 'kernel_type', '')
if kernel_type not in self.valid_push_kernel_types:
raise ValueError(
'A valid kernel type must be specified in the metadata. Valid '
'options are ' + str(self.valid_push_kernel_types))
if kernel_type == 'notebook' and language == 'rmarkdown':
language = 'r'
dataset_sources = self.get_or_default(meta_data, 'dataset_sources', [])
for source in dataset_sources:
self.validate_dataset_string(source)
kernel_sources = self.get_or_default(meta_data, 'kernel_sources', [])
for source in kernel_sources:
self.validate_kernel_string(source)
model_sources = self.get_or_default(meta_data, 'model_sources', [])
for source in model_sources:
self.validate_model_string(source)
docker_pinning_type = self.get_or_default(meta_data,
'docker_image_pinning_type', None)
if (docker_pinning_type is not None and
docker_pinning_type not in self.valid_push_pinning_types):
raise ValueError('If specified, the docker_image_pinning_type must be '
'one of ' + str(self.valid_push_pinning_types))
with open(code_file) as f:
script_body = f.read()
if kernel_type == 'notebook':
json_body = json.loads(script_body)
if 'cells' in json_body:
for cell in json_body['cells']:
if 'outputs' in cell and cell['cell_type'] == 'code':
cell['outputs'] = []
# The spec allows a list of strings,
# but the server expects just one
if 'source' in cell and isinstance(cell['source'], list):
cell['source'] = ''.join(cell['source'])
script_body = json.dumps(json_body)
with self.build_kaggle_client() as kaggle:
request = ApiSaveKernelRequest()
request.id = id_no
request.slug = slug
request.new_title = self.get_or_default(meta_data, 'title', None)
request.text = script_body
request.language = language
request.kernel_type = kernel_type
request.is_private = self.get_bool(meta_data, 'is_private', True)
request.enable_gpu = self.get_bool(meta_data, 'enable_gpu', False)
request.enable_tpu = self.get_bool(meta_data, 'enable_tpu', False)
request.enable_internet = self.get_bool(meta_data, 'enable_internet',
True)
request.dataset_data_sources = dataset_sources
request.competition_data_sources = self.get_or_default(
meta_data, 'competition_sources', [])
request.kernel_data_sources = kernel_sources
request.model_data_sources = model_sources
request.category_ids = self.get_or_default(meta_data, 'keywords', [])
request.docker_image_pinning_type = docker_pinning_type
if timeout:
request.session_timeout_seconds = int(timeout)
return kaggle.kernels.kernels_api_client.save_kernel(request)
def kernels_push_cli(self, folder, timeout):
""" Client wrapper for kernels_push.
Parameters
==========
folder: the path of the folder
"""
folder = folder or os.getcwd()
result = self.kernels_push(folder, timeout)
if result is None:
print('Kernel push error: see previous output')
elif not result.error:
if result.invalidTags:
print('The following are not valid tags and could not be added '
'to the kernel: ' + str(result.invalidTags))
if result.invalidDatasetSources:
print('The following are not valid dataset sources and could not '
'be added to the kernel: ' + str(result.invalidDatasetSources))
if result.invalidCompetitionSources:
print('The following are not valid competition sources and could '
'not be added to the kernel: ' +
str(result.invalidCompetitionSources))
if result.invalidKernelSources:
print('The following are not valid kernel sources and could not '
'be added to the kernel: ' + str(result.invalidKernelSources))
if result.versionNumber:
print('Kernel version %s successfully pushed. Please check '
'progress at %s' % (result.versionNumber, result.url))
else:
# Shouldn't happen but didn't test exhaustively
print('Kernel version successfully pushed. Please check '
'progress at %s' % result.url)
else:
print('Kernel push error: ' + result.error)
def kernels_pull(self, kernel, path, metadata=False, quiet=True):
""" Pull a kernel, including a metadata file (if metadata is True)
and associated files to a specified path.
Parameters
==========
kernel: the kernel to pull
path: the path to pull files to on the filesystem
metadata: if True, also pull metadata
quiet: suppress verbosity (default is True)
"""
existing_metadata = None
if kernel is None:
if path is None:
existing_metadata_path = os.path.join(os.getcwd(),
self.KERNEL_METADATA_FILE)
else:
existing_metadata_path = os.path.join(path, self.KERNEL_METADATA_FILE)
if os.path.exists(existing_metadata_path):
with open(existing_metadata_path) as f:
existing_metadata = json.load(f)
kernel = existing_metadata['id']
if 'INSERT_KERNEL_SLUG_HERE' in kernel:
raise ValueError('A kernel must be specified')
else:
print('Using kernel ' + kernel)
if '/' in kernel:
self.validate_kernel_string(kernel)
kernel_url_list = kernel.split('/')
owner_slug = kernel_url_list[0]
kernel_slug = kernel_url_list[1]
else:
owner_slug = self.get_config_value(self.CONFIG_NAME_USER)
kernel_slug = kernel
if path is None:
effective_path = self.get_default_download_dir('kernels', owner_slug,
kernel_slug)
else:
effective_path = path
if not os.path.exists(effective_path):
os.makedirs(effective_path)
with self.build_kaggle_client() as kaggle:
request = ApiGetKernelRequest()
request.user_name = owner_slug
request.kernel_slug = kernel_slug
response = kaggle.kernels.kernels_api_client.get_kernel(request)
blob = response.blob
if os.path.isfile(effective_path):
effective_dir = os.path.dirname(effective_path)
else:
effective_dir = effective_path
metadata_path = os.path.join(effective_dir, self.KERNEL_METADATA_FILE)
if not os.path.isfile(effective_path):
language = blob.language.lower()
kernel_type = blob.kernel_type.lower()
file_name = None
if existing_metadata:
file_name = existing_metadata['code_file']
elif os.path.isfile(metadata_path):
with open(metadata_path) as f:
file_name = json.load(f)['code_file']
if not file_name or file_name == "INSERT_CODE_FILE_PATH_HERE":
extension = None
if kernel_type == 'script':
if language == 'python':
extension = '.py'
elif language == 'r':
extension = '.R'
elif language == 'rmarkdown':
extension = '.Rmd'
elif language == 'sqlite':
extension = '.sql'
elif language == 'julia':
extension = '.jl'
elif kernel_type == 'notebook':
if language == 'python':
extension = '.ipynb'
elif language == 'r':
extension = '.irnb'
elif language == 'julia':
extension = '.ijlnb'
file_name = blob.slug + extension
if file_name is None:
print('Unknown language %s + kernel type %s - please report this '
'on the kaggle-api github issues' % (language, kernel_type))
print('Saving as a python file, even though this may not be the '
'correct language')
file_name = 'script.py'
script_path = os.path.join(effective_path, file_name)
else:
script_path = effective_path
file_name = os.path.basename(effective_path)
with open(script_path, 'w', encoding="utf-8") as f:
f.write(blob.source)
if metadata:
data = {}
server_metadata = response.metadata
data['id'] = server_metadata.ref
data['id_no'] = server_metadata.id
data['title'] = server_metadata.title
data['code_file'] = file_name
data['language'] = server_metadata.language
data['kernel_type'] = server_metadata.kernel_type
data['is_private'] = server_metadata.is_private
data['enable_gpu'] = server_metadata.enable_gpu
data['enable_tpu'] = server_metadata.enable_tpu
data['enable_internet'] = server_metadata.enable_internet
data['keywords'] = server_metadata.category_ids
data['dataset_sources'] = server_metadata.dataset_data_sources
data['kernel_sources'] = server_metadata.kernel_data_sources
data['competition_sources'] = server_metadata.competition_data_sources
data['model_sources'] = server_metadata.model_data_sources
with open(metadata_path, 'w') as f:
json.dump(data, f, indent=2)
return effective_dir
else:
return script_path
def kernels_pull_cli(self,
kernel,
kernel_opt=None,
path=None,
metadata=False):
""" Client wrapper for kernels_pull.
"""
kernel = kernel or kernel_opt
effective_path = self.kernels_pull(
kernel, path=path, metadata=metadata, quiet=False)
if metadata:
print('Source code and metadata downloaded to ' + effective_path)
else:
print('Source code downloaded to ' + effective_path)
def kernels_output(self, kernel, path, force=False, quiet=True):
""" Retrieve the output for a specified kernel.
Parameters
==========
kernel: the kernel to output
path: the path to pull files to on the filesystem
force: if output already exists, force overwrite (default False)
quiet: suppress verbosity (default is True)
"""
if kernel is None:
raise ValueError('A kernel must be specified')
if '/' in kernel:
self.validate_kernel_string(kernel)
kernel_url_list = kernel.split('/')
owner_slug = kernel_url_list[0]
kernel_slug = kernel_url_list[1]
else:
owner_slug = self.get_config_value(self.CONFIG_NAME_USER)
kernel_slug = kernel
if path is None:
target_dir = self.get_default_download_dir('kernels', owner_slug,
kernel_slug, 'output')
else:
target_dir = path
if not os.path.exists(target_dir):
os.makedirs(target_dir)
if not os.path.isdir(target_dir):
raise ValueError('You must specify a directory for the kernels output')
token = None
with self.build_kaggle_client() as kaggle:
request = ApiListKernelSessionOutputRequest()
request.user_name = owner_slug
request.kernel_slug = kernel_slug
response = kaggle.kernels.kernels_api_client.list_kernel_session_output(
request)
token = response.next_page_token
outfiles = []
for item in response.files:
outfile = os.path.join(target_dir, item['fileName'])
outfiles.append(outfile)
download_response = requests.get(item['url'], stream=True)
if force or self.download_needed(download_response, outfile, quiet):
os.makedirs(os.path.split(outfile)[0], exist_ok=True)
with open(outfile, 'wb') as out:
out.write(download_response.content)
if not quiet:
print('Output file downloaded to %s' % outfile)
log = response.log
if log:
outfile = os.path.join(target_dir, kernel_slug + '.log')
outfiles.append(outfile)
with open(outfile, 'w') as out:
out.write(log)
if not quiet:
print('Kernel log downloaded to %s ' % outfile)
return outfiles, token # Breaking change, we need to get the token to the UI
def kernels_output_cli(self,
kernel,
kernel_opt=None,
path=None,
force=False,
quiet=False):
""" Client wrapper for kernels_output, with same arguments. Extra
arguments are described below, and see kernels_output for others.
Parameters
==========
kernel_opt: option from client instead of kernel, if not defined
"""
kernel = kernel or kernel_opt
(_, token) = self.kernels_output(kernel, path, force, quiet)
if token:
print(f"Next page token: {token}")
def kernels_status(self, kernel):
""" Call to the api to get the status of a kernel.
Parameters
==========
kernel: the kernel to get the status for
"""
if kernel is None:
raise ValueError('A kernel must be specified')
if '/' in kernel:
self.validate_kernel_string(kernel)
kernel_url_list = kernel.split('/')
owner_slug = kernel_url_list[0]
kernel_slug = kernel_url_list[1]
else:
owner_slug = self.get_config_value(self.CONFIG_NAME_USER)
kernel_slug = kernel
with self.build_kaggle_client() as kaggle:
request = ApiGetKernelSessionStatusRequest()
request.user_name = owner_slug
request.kernel_slug = kernel_slug
return kaggle.kernels.kernels_api_client.get_kernel_session_status(
request)
def kernels_status_cli(self, kernel, kernel_opt=None):
""" Client wrapper for kernel_status.
Parameters
==========
kernel_opt: additional option from the client, if kernel not defined
"""
kernel = kernel or kernel_opt
response = self.kernels_status(kernel)
status = response.status
message = response.failure_message
if message:
print('%s has status "%s"' % (kernel, status))
print('Failure message: "%s"' % message)
else:
print('%s has status "%s"' % (kernel, status))
def model_get(self, model):
""" Get a model.
Parameters
==========
model: the string identifier of the model
should be in format [owner]/[model-name]
"""
owner_slug, model_slug = self.split_model_string(model)
with self.build_kaggle_client() as kaggle:
request = ApiGetModelRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
return kaggle.models.model_api_client.get_model(request)
def model_get_cli(self, model, folder=None):
""" Clent wrapper for model_get, with additional
model_opt to get a model from the API.
Parameters
==========
model: the string identifier of the model
should be in format [owner]/[model-name]
folder: the folder to download the model metadata file
"""
model = self.model_get(model)
if folder is None:
self.print_obj(model)
else:
meta_file = os.path.join(folder, self.MODEL_METADATA_FILE)
data = {}
data['id'] = model.id
model_ref_split = model.ref.split('/')
data['ownerSlug'] = model_ref_split[0]
data['slug'] = model_ref_split[1]
data['title'] = model.title
data['subtitle'] = model.subtitle
data['isPrivate'] = model.isPrivate # TODO Test to ensure True default
data['description'] = model.description
data['publishTime'] = model.publishTime
with open(meta_file, 'w') as f:
json.dump(data, f, indent=2)
print('Metadata file written to {}'.format(meta_file))
def model_list(self,
sort_by=None,
search=None,
owner=None,
page_size=20,
page_token=None):
""" Return a list of models.
Parameters
==========
sort_by: how to sort the result, see valid_model_sort_bys for options
search: a search term to use (default is empty string)
owner: username or organization slug to filter the search to
page_size: the page size to return (default is 20)
page_token: the page token for pagination
"""
if sort_by:
if sort_by not in self.valid_model_sort_bys:
raise ValueError('Invalid sort by specified. Valid options are ' +
str(self.valid_model_sort_bys))
sort_by = self.lookup_enum(ListModelsOrderBy, sort_by)
if int(page_size) <= 0:
raise ValueError('Page size must be >= 1')
with self.build_kaggle_client() as kaggle:
request = ApiListModelsRequest()
request.sort_by = sort_by or ListModelsOrderBy.LIST_MODELS_ORDER_BY_HOTNESS
request.search = search or ''
request.owner = owner or ''
request.page_size = page_size
request.page_token = page_token
response = kaggle.models.model_api_client.list_models(request)
if response.next_page_token:
print('Next Page Token = {}'.format(response.next_page_token))
return response.models
def model_list_cli(self,
sort_by=None,
search=None,
owner=None,
page_size=20,
page_token=None,
csv_display=False):
""" Client wrapper for model_list. Additional parameters
are described here, see model_list for others.
Parameters
==========
sort_by: how to sort the result, see valid_model_sort_bys for options
search: a search term to use (default is empty string)
owner: username or organization slug to filter the search to
page_size: the page size to return (default is 20)
page_token: the page token for pagination
csv_display: if True, print comma separated values instead of table
"""
models = self.model_list(sort_by, search, owner, page_size, page_token)
fields = ['id', 'ref', 'title', 'subtitle', 'author']
if models:
if csv_display:
self.print_csv(models, fields)
else:
self.print_table(models, fields)
else:
print('No models found')
def model_initialize(self, folder):
""" Initialize a folder with a model configuration (metadata) file.
Parameters
==========
folder: the folder to initialize the metadata file in
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_data = {
'ownerSlug':
'INSERT_OWNER_SLUG_HERE',
'title':
'INSERT_TITLE_HERE',
'slug':
'INSERT_SLUG_HERE',
'subtitle':
'',
'isPrivate':
True,
'description':
'''# Model Summary
# Model Characteristics
# Data Overview
# Evaluation Results
''',
'publishTime':
'',
'provenanceSources':
''
}
meta_file = os.path.join(folder, self.MODEL_METADATA_FILE)
with open(meta_file, 'w') as f:
json.dump(meta_data, f, indent=2)
print('Model template written to: ' + meta_file)
return meta_file
def model_initialize_cli(self, folder=None):
folder = folder or os.getcwd()
self.model_initialize(folder)
def model_create_new(self, folder):
""" Create a new model.
Parameters
==========
folder: the folder to get the metadata file from
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = self.get_model_metadata_file(folder)
# read json
with open(meta_file) as f:
meta_data = json.load(f)
owner_slug = self.get_or_fail(meta_data, 'ownerSlug')
slug = self.get_or_fail(meta_data, 'slug')
title = self.get_or_fail(meta_data, 'title')
subtitle = meta_data.get('subtitle')
is_private = self.get_or_fail(meta_data, 'isPrivate')
description = self.sanitize_markdown(
self.get_or_fail(meta_data, 'description'))
publish_time = meta_data.get('publishTime')
provenance_sources = meta_data.get('provenanceSources')
# validations
if owner_slug == 'INSERT_OWNER_SLUG_HERE':
raise ValueError(
'Default ownerSlug detected, please change values before uploading')
if title == 'INSERT_TITLE_HERE':
raise ValueError(
'Default title detected, please change values before uploading')
if slug == 'INSERT_SLUG_HERE':
raise ValueError(
'Default slug detected, please change values before uploading')
if not isinstance(is_private, bool):
raise ValueError('model.isPrivate must be a boolean')
if publish_time:
self.validate_date(publish_time)
else:
publish_time = None
with self.build_kaggle_client() as kaggle:
request = ApiCreateModelRequest()
request.owner_slug = owner_slug
request.slug = slug
request.title = title
request.subtitle = subtitle
request.is_private = is_private
request.description = description
request.publish_time = publish_time
request.provenance_sources = provenance_sources
return kaggle.models.model_api_client.create_model(request)
def model_create_new_cli(self, folder=None):
""" Client wrapper for creating a new model.
Parameters
==========
folder: the folder to get the metadata file from
"""
folder = folder or os.getcwd()
result = self.model_create_new(folder)
if result.hasId:
print('Your model was created. Id={}. Url={}'.format(
result.id, result.url))
else:
print('Model creation error: ' + result.error)
def model_delete(self, model, yes):
""" Delete a modeL.
Parameters
==========
model: the string identifier of the model
should be in format [owner]/[model-name]
yes: automatic confirmation
"""
owner_slug, model_slug = self.split_model_string(model)
if not yes:
if not self.confirmation():
print('Deletion cancelled')
exit(0)
with self.build_kaggle_client() as kaggle:
request = ApiDeleteModelRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
return kaggle.models.model_api_client.delete_model(request)
def model_delete_cli(self, model, yes):
""" Client wrapper for deleting a model.
Parameters
==========
model: the string identified of the model
should be in format [owner]/[model-name]
yes: automatic confirmation
"""
result = self.model_delete(model, yes)
if result.error:
print('Model deletion error: ' + result.error)
else:
print('The model was deleted.')
def model_update(self, folder):
""" Update a model.
Parameters
==========
folder: the folder to get the metadata file from
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = self.get_model_metadata_file(folder)
# read json
with open(meta_file) as f:
meta_data = json.load(f)
owner_slug = self.get_or_fail(meta_data, 'ownerSlug')
slug = self.get_or_fail(meta_data, 'slug')
title = self.get_or_default(meta_data, 'title', None)
subtitle = self.get_or_default(meta_data, 'subtitle', None)
is_private = self.get_or_default(meta_data, 'isPrivate', None)
description = self.get_or_default(meta_data, 'description', None)
publish_time = self.get_or_default(meta_data, 'publishTime', None)
provenance_sources = self.get_or_default(meta_data, 'provenanceSources',
None)
# validations
if owner_slug == 'INSERT_OWNER_SLUG_HERE':
raise ValueError(
'Default ownerSlug detected, please change values before uploading')
if slug == 'INSERT_SLUG_HERE':
raise ValueError(
'Default slug detected, please change values before uploading')
if is_private != None and not isinstance(is_private, bool):
raise ValueError('model.isPrivate must be a boolean')
if publish_time:
self.validate_date(publish_time)
# mask
update_mask = {'paths': []}
if title != None:
update_mask['paths'].append('title')
if subtitle != None:
update_mask['paths'].append('subtitle')
if is_private != None:
update_mask['paths'].append('isPrivate') # is_private
else:
is_private = True # default value, not updated
if description != None:
description = self.sanitize_markdown(description)
update_mask['paths'].append('description')
if publish_time != None and len(publish_time) > 0:
update_mask['paths'].append('publish_time')
else:
publish_time = None
if provenance_sources != None and len(provenance_sources) > 0:
update_mask['paths'].append('provenance_sources')
else:
provenance_sources = None
with self.build_kaggle_client() as kaggle:
fm = field_mask_pb2.FieldMask(paths=update_mask['paths'])
fm = fm.FromJsonString(json.dumps(update_mask))
request = ApiUpdateModelRequest()
request.owner_slug = owner_slug
request.model_slug = slug
request.title = title
request.subtitle = subtitle
request.is_private = is_private
request.description = description
request.publish_time = publish_time
request.provenance_sources = provenance_sources
request.update_mask = fm if len(update_mask['paths']) > 0 else None
return kaggle.models.model_api_client.update_model(request)
def model_update_cli(self, folder=None):
""" Client wrapper for updating a model.
Parameters
==========
folder: the folder to get the metadata file from
"""
folder = folder or os.getcwd()
result = self.model_update(folder)
if result.hasId:
print('Your model was updated. Id={}. Url={}'.format(
result.id, result.url))
else:
print('Model update error: ' + result.error)
def model_instance_get(self, model_instance):
""" Get a model instance.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
"""
if model_instance is None:
raise ValueError('A model instance must be specified')
owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string(
model_instance)
with self.build_kaggle_client() as kaggle:
request = ApiGetModelInstanceRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
return kaggle.models.model_api_client.get_model_instance(request)
def model_instance_get_cli(self, model_instance, folder=None):
""" Client wrapper for model_instance_get.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
folder: the folder to download the model metadata file
"""
mi = self.model_instance_get(model_instance)
if folder is None:
self.print_obj(mi)
else:
meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE)
owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string(
model_instance)
data = {
'id': mi.id,
'ownerSlug': owner_slug,
'modelSlug': model_slug,
'instanceSlug': mi.slug,
'framework': self.short_enum_name(mi.framework),
'overview': mi.overview,
'usage': mi.usage,
'licenseName': mi.license_name,
'fineTunable': mi.fine_tunable,
'trainingData': mi.training_data,
'versionId': mi.version_id,
'versionNumber': mi.version_number,
'modelInstanceType': self.short_enum_name(mi.model_instance_type)
}
if mi.base_model_instance_information is not None:
# TODO Test this.
data['baseModelInstance'] = '{}/{}/{}/{}'.format(
mi.base_model_instance_information['owner']['slug'],
mi.base_model_instance_information['modelSlug'],
mi.base_model_instance_information['framework'],
mi.base_model_instance_information['instanceSlug'])
data['externalBaseModelUrl'] = mi.external_base_model_url
with open(meta_file, 'w') as f:
json.dump(data, f, indent=2)
print('Metadata file written to {}'.format(meta_file))
def model_instance_initialize(self, folder):
""" Initialize a folder with a model instance configuration (metadata) file.
Parameters
==========
folder: the folder to initialize the metadata file in
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_data = {
'ownerSlug':
'INSERT_OWNER_SLUG_HERE',
'modelSlug':
'INSERT_EXISTING_MODEL_SLUG_HERE',
'instanceSlug':
'INSERT_INSTANCE_SLUG_HERE',
'framework':
'INSERT_FRAMEWORK_HERE',
'overview':
'',
'usage':
'''# Model Format
# Training Data
# Model Inputs
# Model Outputs
# Model Usage
# Fine-tuning
# Changelog
''',
'licenseName':
'Apache 2.0',
'fineTunable':
False,
'trainingData': [],
'modelInstanceType':
'Unspecified',
'baseModelInstanceId':
0,
'externalBaseModelUrl':
''
}
meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE)
with open(meta_file, 'w') as f:
json.dump(meta_data, f, indent=2)
print('Model Instance template written to: ' + meta_file)
return meta_file
def model_instance_initialize_cli(self, folder):
folder = folder or os.getcwd()
self.model_instance_initialize(folder)
def model_instance_create(self, folder, quiet=False, dir_mode='skip'):
""" Create a new model instance.
Parameters
==========
folder: the folder to get the metadata file from
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = self.get_model_instance_metadata_file(folder)
# read json
with open(meta_file) as f:
meta_data = json.load(f)
owner_slug = self.get_or_fail(meta_data, 'ownerSlug')
model_slug = self.get_or_fail(meta_data, 'modelSlug')
instance_slug = self.get_or_fail(meta_data, 'instanceSlug')
framework = self.get_or_fail(meta_data, 'framework')
overview = self.sanitize_markdown(
self.get_or_default(meta_data, 'overview', ''))
usage = self.sanitize_markdown(self.get_or_default(meta_data, 'usage', ''))
license_name = self.get_or_fail(meta_data, 'licenseName')
fine_tunable = self.get_or_default(meta_data, 'fineTunable', False)
training_data = self.get_or_default(meta_data, 'trainingData', [])
model_instance_type = self.get_or_default(meta_data, 'modelInstanceType',
'Unspecified')
base_model_instance = self.get_or_default(meta_data, 'baseModelInstance',
'')
external_base_model_url = self.get_or_default(meta_data,
'externalBaseModelUrl', '')
# validations
if owner_slug == 'INSERT_OWNER_SLUG_HERE':
raise ValueError(
'Default ownerSlug detected, please change values before uploading')
if model_slug == 'INSERT_EXISTING_MODEL_SLUG_HERE':
raise ValueError(
'Default modelSlug detected, please change values before uploading')
if instance_slug == 'INSERT_INSTANCE_SLUG_HERE':
raise ValueError(
'Default instanceSlug detected, please change values before uploading'
)
if framework == 'INSERT_FRAMEWORK_HERE':
raise ValueError(
'Default framework detected, please change values before uploading')
if license_name == '':
raise ValueError('Please specify a license')
if not isinstance(fine_tunable, bool):
raise ValueError('modelInstance.fineTunable must be a boolean')
if not isinstance(training_data, list):
raise ValueError('modelInstance.trainingData must be a list')
body = ApiCreateModelInstanceRequestBody()
body.framework = self.lookup_enum(ModelFramework, framework)
body.instance_slug = instance_slug
body.overview = overview
body.usage = usage
body.license_name = license_name
body.fine_tunable = fine_tunable
body.training_data = training_data
body.model_instance_type = self.lookup_enum(ModelInstanceType,
model_instance_type)
body.base_model_instance = base_model_instance
body.external_base_model_url = external_base_model_url
body.files = []
with self.build_kaggle_client() as kaggle:
request = ApiCreateModelInstanceRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.body = body
message = kaggle.models.model_api_client.create_model_instance
with ResumableUploadContext() as upload_context:
self.upload_files(body, None, folder, ApiBlobType.MODEL, upload_context,
quiet, dir_mode)
request.body.files = [
self._api_dataset_new_file(file) for file in request.body.files
]
response = self.with_retry(message)(request)
return response
def model_instance_create_cli(self, folder, quiet=False, dir_mode='skip'):
""" Client wrapper for creating a new model instance.
Parameters
==========
folder: the folder to get the metadata file from
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
"""
folder = folder or os.getcwd()
result = self.model_instance_create(folder, quiet, dir_mode)
if result.hasId:
print('Your model instance was created. Id={}. Url={}'.format(
result.id, result.url))
else:
print('Model instance creation error: ' + result.error)
def model_instance_delete(self, model_instance, yes):
""" Delete a model instance.
Parameters
==========
model_instance: the string identified of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
yes: automatic confirmation
"""
if model_instance is None:
raise ValueError('A model instance must be specified')
owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string(
model_instance)
if not yes:
if not self.confirmation():
print('Deletion cancelled')
exit(0)
with self.build_kaggle_client() as kaggle:
request = ApiDeleteModelInstanceRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
return kaggle.models.model_api_client.delete_model_instance(request)
return res
def model_instance_delete_cli(self, model_instance, yes):
""" Client wrapper for model_instance_delete.
Parameters
==========
model_instance: the string identified of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
yes: automatic confirmation
"""
result = self.model_instance_delete(model_instance, yes)
if len(result.error) > 0:
print('Model instance deletion error: ' + result.error)
else:
print('The model instance was deleted.')
def model_instance_files(self,
model_instance,
page_token=None,
page_size=20,
csv_display=False):
""" List files for the current version of a model instance.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
"""
if model_instance is None:
raise ValueError('A model_instance must be specified')
self.validate_model_instance_string(model_instance)
urls = model_instance.split('/')
[owner_slug, model_slug, framework, instance_slug] = urls
with self.build_kaggle_client() as kaggle:
request = ApiListModelInstanceVersionFilesRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
request.page_size = page_size
request.page_token = page_token
response = kaggle.models.model_api_client.list_model_instance_version_files(
request)
if response:
next_page_token = response.next_page_token
if next_page_token:
print('Next Page Token = {}'.format(next_page_token))
return response
else:
print('No files found')
return FileList({})
def model_instance_files_cli(self,
model_instance,
page_token=None,
page_size=20,
csv_display=False):
""" Client wrapper for model_instance_files.
Parameters
==========
model_instance: the string identified of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
"""
result = self.model_instance_files(
model_instance,
page_token=page_token,
page_size=page_size,
csv_display=csv_display)
if result and result.files is not None:
fields = self.dataset_file_fields
if csv_display:
self.print_csv(result.files, fields)
else:
self.print_table(result.files, fields)
def model_instance_update(self, folder):
""" Update a model instance.
Parameters
==========
folder: the folder to get the metadata file from
"""
if not os.path.isdir(folder):
raise ValueError('Invalid folder: ' + folder)
meta_file = self.get_model_instance_metadata_file(folder)
# read json
with open(meta_file) as f:
meta_data = json.load(f)
owner_slug = self.get_or_fail(meta_data, 'ownerSlug')
model_slug = self.get_or_fail(meta_data, 'modelSlug')
framework = self.get_or_fail(meta_data, 'framework')
instance_slug = self.get_or_fail(meta_data, 'instanceSlug')
overview = self.get_or_default(meta_data, 'overview', '')
usage = self.get_or_default(meta_data, 'usage', '')
license_name = self.get_or_default(meta_data, 'licenseName', None)
fine_tunable = self.get_or_default(meta_data, 'fineTunable', None)
training_data = self.get_or_default(meta_data, 'trainingData', None)
model_instance_type = self.get_or_default(meta_data, 'modelInstanceType',
None)
base_model_instance = self.get_or_default(meta_data, 'baseModelInstance',
None)
external_base_model_url = self.get_or_default(meta_data,
'externalBaseModelUrl', None)
# validations
if owner_slug == 'INSERT_OWNER_SLUG_HERE':
raise ValueError(
'Default ownerSlug detected, please change values before uploading')
if model_slug == 'INSERT_SLUG_HERE':
raise ValueError(
'Default model slug detected, please change values before uploading')
if instance_slug == 'INSERT_INSTANCE_SLUG_HERE':
raise ValueError(
'Default instance slug detected, please change values before uploading'
)
if framework == 'INSERT_FRAMEWORK_HERE':
raise ValueError(
'Default framework detected, please change values before uploading')
if fine_tunable != None and not isinstance(fine_tunable, bool):
raise ValueError('modelInstance.fineTunable must be a boolean')
if training_data != None and not isinstance(training_data, list):
raise ValueError('modelInstance.trainingData must be a list')
if model_instance_type:
model_instance_type = self.lookup_enum(ModelInstanceType,
model_instance_type)
# mask
update_mask = {'paths': []}
if overview != None:
overview = self.sanitize_markdown(overview)
update_mask['paths'].append('overview')
if usage != None:
usage = self.sanitize_markdown(usage)
update_mask['paths'].append('usage')
if license_name != None:
update_mask['paths'].append('licenseName')
else:
license_name = "Apache 2.0" # default value even if not updated
if fine_tunable != None:
update_mask['paths'].append('fineTunable')
if training_data != None:
update_mask['paths'].append('trainingData')
if model_instance_type != None:
update_mask['paths'].append('modelInstanceType')
if base_model_instance != None:
update_mask['paths'].append('baseModelInstance')
if external_base_model_url != None:
update_mask['paths'].append('externalBaseModelUrl')
with self.build_kaggle_client() as kaggle:
fm = field_mask_pb2.FieldMask(paths=update_mask['paths'])
fm = fm.FromJsonString(json.dumps(update_mask))
request = ApiUpdateModelInstanceRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
request.overview = overview
request.usage = usage
request.license_name = license_name
request.fine_tunable = fine_tunable
request.training_data = training_data
request.model_instance_type = model_instance_type
request.base_model_instance = base_model_instance
request.external_base_model_url = external_base_model_url
request.update_mask = fm
request.update_mask = fm if len(update_mask['paths']) > 0 else None
return kaggle.models.model_api_client.update_model_instance(request)
def model_instance_update_cli(self, folder=None):
""" Client wrapper for updating a model instance.
Parameters
==========
folder: the folder to get the metadata file from
"""
folder = folder or os.getcwd()
result = self.model_instance_update(folder)
if len(result.error) == 0:
print('Your model instance was updated. Id={}. Url={}'.format(
result.id, result.url))
else:
print('Model update error: ' + result.error)
def model_instance_version_create(self,
model_instance,
folder,
version_notes='',
quiet=False,
dir_mode='skip'):
""" Create a new model instance version.
Parameters
==========
model_instance: the string identified of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
folder: the folder to get the metadata file from
version_notes: the version notes to record for this new version
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
"""
owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string(
model_instance)
request = ApiCreateModelInstanceVersionRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
body = ApiCreateModelInstanceVersionRequestBody()
body.version_notes = version_notes
request.body = body
with self.build_kaggle_client() as kaggle:
message = kaggle.models.model_api_client.create_model_instance_version
with ResumableUploadContext() as upload_context:
self.upload_files(body, None, folder, ApiBlobType.MODEL, upload_context,
quiet, dir_mode)
request.body.files = [
self._api_dataset_new_file(file) for file in request.body.files
]
response = self.with_retry(message)(request)
return response
def model_instance_version_create_cli(self,
model_instance,
folder,
version_notes='',
quiet=False,
dir_mode='skip'):
""" Client wrapper for creating a new version of a model instance.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
folder: the folder to get the metadata file from
version_notes: the version notes to record for this new version
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
"""
result = self.model_instance_version_create(model_instance, folder,
version_notes, quiet, dir_mode)
if result.id != 0:
print('Your model instance version was created. Url={}'.format(
result.url))
else:
print('Model instance version creation error: ' + result.error)
def model_instance_version_download(self,
model_instance_version,
path=None,
force=False,
quiet=True,
untar=False):
""" Download all files for a model instance version.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
path: the path to download the model instance version to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
untar: if True, untar files upon download (default is False)
"""
if model_instance_version is None:
raise ValueError('A model_instance_version must be specified')
self.validate_model_instance_version_string(model_instance_version)
urls = model_instance_version.split('/')
owner_slug = urls[0]
model_slug = urls[1]
framework = urls[2]
instance_slug = urls[3]
version_number = urls[4]
if path is None:
effective_path = self.get_default_download_dir('models', owner_slug,
model_slug, framework,
instance_slug,
version_number)
else:
effective_path = path
request = ApiDownloadModelInstanceVersionRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
request.version_number = int(version_number)
with self.build_kaggle_client() as kaggle:
response = kaggle.models.model_api_client.download_model_instance_version(
request)
outfile = os.path.join(effective_path, model_slug + '.tar.gz')
if force or self.download_needed(response, outfile, quiet):
self.download_file(response, outfile, quiet, not force)
downloaded = True
else:
downloaded = False
if downloaded:
if untar:
try:
with tarfile.open(outfile, mode='r:gz') as t:
t.extractall(effective_path)
except Exception as e:
raise ValueError(
'Error extracting the tar.gz file, please report on '
'www.github.com/kaggle/kaggle-api', e)
try:
os.remove(outfile)
except OSError as e:
print('Could not delete tar file, got %s' % e)
return outfile
def model_instance_version_download_cli(self,
model_instance_version,
path=None,
untar=False,
force=False,
quiet=False):
""" Client wrapper for model_instance_version_download.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
path: the path to download the model instance version to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
untar: if True, untar files upon download (default is False)
"""
return self.model_instance_version_download(
model_instance_version,
path=path,
untar=untar,
force=force,
quiet=quiet)
def model_instance_version_files(self,
model_instance_version,
page_token=None,
page_size=20,
csv_display=False):
""" List all files for a model instance version.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
"""
if model_instance_version is None:
raise ValueError('A model_instance_version must be specified')
self.validate_model_instance_version_string(model_instance_version)
urls = model_instance_version.split('/')
[owner_slug, model_slug, framework, instance_slug, version_number] = urls
request = ApiListModelInstanceVersionFilesRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
request.version_number = int(version_number)
request.page_size = page_size
request.page_token = page_token
with self.build_kaggle_client() as kaggle:
response = kaggle.models.model_api_client.list_model_instance_version_files(
request)
if response:
next_page_token = response.next_page_token
if next_page_token:
print('Next Page Token = {}'.format(next_page_token))
return response
else:
print('No files found')
def model_instance_version_files_cli(self,
model_instance_version,
page_token=None,
page_size=20,
csv_display=False):
""" Client wrapper for model_instance_version_files.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
"""
result = self.model_instance_version_files(
model_instance_version,
page_token=page_token,
page_size=page_size,
csv_display=csv_display)
if result and result.files is not None:
fields = ['name', 'size', 'creation_date']
labels = ['name', 'size', 'creationDate']
if csv_display:
self.print_csv(result.files, fields, labels)
else:
self.print_table(result.files, fields, labels)
def model_instance_version_delete(self, model_instance_version, yes):
""" Delete a model instance version.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
yes: automatic confirmation
"""
if model_instance_version is None:
raise ValueError('A model instance version must be specified')
self.validate_model_instance_version_string(model_instance_version)
urls = model_instance_version.split('/')
owner_slug = urls[0]
model_slug = urls[1]
framework = urls[2]
instance_slug = urls[3]
version_number = urls[4]
if not yes:
if not self.confirmation():
print('Deletion cancelled')
exit(0)
request = ApiDeleteModelInstanceVersionRequest()
request.owner_slug = owner_slug
request.model_slug = model_slug
request.framework = self.lookup_enum(ModelFramework, framework)
request.instance_slug = instance_slug
request.version_number = int(version_number)
with self.build_kaggle_client() as kaggle:
response = kaggle.models.model_api_client.delete_model_instance_version(
request)
return response
def model_instance_version_delete_cli(self, model_instance_version, yes):
""" Client wrapper for model_instance_version_delete
Parameters
==========
model_instance_version: the string identified of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
yes: automatic confirmation
"""
result = self.model_instance_version_delete(model_instance_version, yes)
if len(result.error) > 0:
print('Model instance version deletion error: ' + result.error)
else:
print('The model instance version was deleted.')
def files_upload_cli(self, local_paths, inbox_path, no_resume, no_compress):
if len(local_paths) > self.MAX_NUM_INBOX_FILES_TO_UPLOAD:
print('Cannot upload more than %d files!' %
self.MAX_NUM_INBOX_FILES_TO_UPLOAD)
return
files_to_create = []
with ResumableUploadContext(no_resume) as upload_context:
for local_path in local_paths:
(upload_file, file_name) = self.file_upload_cli(local_path, inbox_path,
no_compress,
upload_context)
if upload_file is None:
continue
create_inbox_file_request = CreateInboxFileRequest()
create_inbox_file_request.virtual_directory = inbox_path
create_inbox_file_request.blob_file_token = upload_file.token
files_to_create.append((create_inbox_file_request, file_name))
with self.build_kaggle_client() as kaggle:
create_inbox_file = kaggle.admin.inbox_file_client.create_inbox_file
for (create_inbox_file_request, file_name) in files_to_create:
self.with_retry(create_inbox_file)(create_inbox_file_request)
print('Inbox file created:', file_name)
def file_upload_cli(self, local_path, inbox_path, no_compress,
upload_context):
full_path = os.path.abspath(local_path)
parent_path = os.path.dirname(full_path)
file_or_folder_name = os.path.basename(full_path)
dir_mode = 'tar' if no_compress else 'zip'
upload_file = self._upload_file_or_folder(parent_path, file_or_folder_name,
ApiBlobType.INBOX, upload_context,
dir_mode)
return (upload_file, file_or_folder_name)
def print_obj(self, obj, indent=2):
pretty = json.dumps(obj, indent=indent)
print(pretty)
def download_needed(self, response, outfile, quiet=True):
""" determine if a download is needed based on timestamp. Return True
if needed (remote is newer) or False if local is newest.
Parameters
==========
response: the response from the API
outfile: the output file to write to
quiet: suppress verbose output (default is True)
"""
try:
last_modified = response.headers.get('Last-Modified')
if last_modified is None:
remote_date = datetime.now()
else:
remote_date = datetime.strptime(response.headers['Last-Modified'],
'%a, %d %b %Y %H:%M:%S %Z')
file_exists = os.path.isfile(outfile)
if file_exists:
local_date = datetime.fromtimestamp(os.path.getmtime(outfile))
remote_size = int(response.headers['Content-Length'])
local_size = os.path.getsize(outfile)
if local_size < remote_size:
return True
if remote_date <= local_date:
if not quiet:
print(
os.path.basename(outfile) +
': Skipping, found more recently modified local '
'copy (use --force to force download)')
return False
except:
pass
return True
def print_table(self, items, fields, labels=None):
""" print a table of items, for a set of fields defined
Parameters
==========
items: a list of items to print
fields: a list of fields to select from items
labels: labels for the fields, defaults to fields
"""
if labels is None:
labels = fields
formats = []
borders = []
if len(items) == 0:
return
for f in fields:
length = max(
len(f),
max([
len(self.string(getattr(i, self.camel_to_snake(f))))
for i in items
]))
justify = '>' if isinstance(
getattr(items[0], self.camel_to_snake(f)),
int) or f == 'size' or f == 'reward' else '<'
formats.append('{:' + justify + self.string(length + 2) + '}')
borders.append('-' * length + ' ')
row_format = u''.join(formats)
headers = [f + ' ' for f in labels]
print(row_format.format(*headers))
print(row_format.format(*borders))
for i in items:
i_fields = [
self.string(getattr(i, self.camel_to_snake(f))) + ' ' for f in fields
]
try:
print(row_format.format(*i_fields))
except UnicodeEncodeError:
print(row_format.format(*i_fields).encode('utf-8'))
def print_csv(self, items, fields, labels=None):
""" print a set of fields in a set of items using a csv.writer
Parameters
==========
items: a list of items to print
fields: a list of fields to select from items
labels: labels for the fields, defaults to fields
"""
if labels is None:
labels = fields
writer = csv.writer(sys.stdout)
writer.writerow(labels)
for i in items:
i_fields = [
self.string(getattr(i, self.camel_to_snake(f))) for f in fields
]
writer.writerow(i_fields)
def string(self, item):
return item if isinstance(item, str) else str(item)
def get_or_fail(self, data, key):
if key in data:
return data[key]
raise ValueError('Key ' + key + ' not found in data')
def get_or_default(self, data, key, default):
if key in data:
return data[key]
return default
def get_bool(self, data, key, default):
if key in data:
val = data[key]
if isinstance(val, str):
val = val.lower()
if val == 'true':
return True
elif val == 'false':
return False
else:
raise ValueError('Invalid boolean value: ' + val)
if isinstance(val, bool):
return val
raise ValueError('Invalid boolean value: ' + val)
return default
def set_if_present(self, data, key, output, output_key):
if key in data:
output[output_key] = data[key]
def get_dataset_metadata_file(self, folder):
meta_file = os.path.join(folder, self.DATASET_METADATA_FILE)
if not os.path.isfile(meta_file):
meta_file = os.path.join(folder, self.OLD_DATASET_METADATA_FILE)
if not os.path.isfile(meta_file):
raise ValueError('Metadata file not found: ' +
self.DATASET_METADATA_FILE)
return meta_file
def get_model_metadata_file(self, folder):
meta_file = os.path.join(folder, self.MODEL_METADATA_FILE)
if not os.path.isfile(meta_file):
raise ValueError('Metadata file not found: ' + self.MODEL_METADATA_FILE)
return meta_file
def get_model_instance_metadata_file(self, folder):
meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE)
if not os.path.isfile(meta_file):
raise ValueError('Metadata file not found: ' +
self.MODEL_INSTANCE_METADATA_FILE)
return meta_file
def process_response(self, result):
""" process a response from the API. We check the API version against
the client's to see if it's old, and give them a warning (once)
Parameters
==========
result: the result from the API
"""
if len(result) == 3:
data = result[0]
headers = result[2]
if self.HEADER_API_VERSION in headers:
api_version = headers[self.HEADER_API_VERSION]
if (not self.already_printed_version_warning and
not self.is_up_to_date(api_version)):
print(f'Warning: Looks like you\'re using an outdated `kaggle`` '
'version (installed: {self.__version__}, please consider '
'upgrading to the latest version ({api_version})')
self.already_printed_version_warning = True
if isinstance(data, dict) and 'code' in data and data['code'] != 200:
raise Exception(data['message'])
return data
return result
def is_up_to_date(self, server_version):
""" determine if a client (on the local user's machine) is up to date
with the version provided on the server. Return a boolean with True
or False
Parameters
==========
server_version: the server version string to compare to the host
"""
client_split = self.__version__.split('.')
client_len = len(client_split)
server_split = server_version.split('.')
server_len = len(server_split)
# Make both lists the same length
for i in range(client_len, server_len):
client_split.append('0')
for i in range(server_len, client_len):
server_split.append('0')
for i in range(0, client_len):
if 'a' in client_split[i] or 'b' in client_split[i]:
# Using a alpha/beta version, don't check
return True
client = int(client_split[i])
server = int(server_split[i])
if client < server:
return False
elif server < client:
return True
return True
def upload_files(self,
request,
resources,
folder,
blob_type,
upload_context,
quiet=False,
dir_mode='skip'):
""" upload files in a folder
Parameters
==========
request: the prepared request
resources: the files to upload
folder: the folder to upload from
blob_type (ApiBlobType): To which entity the file/blob refers
upload_context (ResumableUploadContext): Context for resumable uploads
quiet: suppress verbose output (default is False)
"""
for file_name in os.listdir(folder):
if (file_name in [
self.DATASET_METADATA_FILE, self.OLD_DATASET_METADATA_FILE,
self.KERNEL_METADATA_FILE, self.MODEL_METADATA_FILE,
self.MODEL_INSTANCE_METADATA_FILE
]):
continue
upload_file = self._upload_file_or_folder(folder, file_name, blob_type,
upload_context, dir_mode, quiet,
resources)
if upload_file is not None:
request.files.append(upload_file)
def _upload_file_or_folder(self,
parent_path,
file_or_folder_name,
blob_type,
upload_context,
dir_mode,
quiet=False,
resources=None):
full_path = os.path.join(parent_path, file_or_folder_name)
if os.path.isfile(full_path):
return self._upload_file(file_or_folder_name, full_path, blob_type,
upload_context, quiet, resources)
elif os.path.isdir(full_path):
if dir_mode in ['zip', 'tar']:
with DirectoryArchive(full_path, dir_mode) as archive:
return self._upload_file(archive.name, archive.path, blob_type,
upload_context, quiet, resources)
elif not quiet:
print("Skipping folder: " + file_or_folder_name +
"; use '--dir-mode' to upload folders")
else:
if not quiet:
print('Skipping: ' + file_or_folder_name)
return None
def _upload_file(self, file_name, full_path, blob_type, upload_context, quiet,
resources):
""" Helper function to upload a single file
Parameters
==========
file_name: name of the file to upload
full_path: path to the file to upload
blob_type (ApiBlobType): To which entity the file/blob refers
upload_context (ResumableUploadContext): Context for resumable uploads
quiet: suppress verbose output
resources: optional file metadata
:return: None - upload unsuccessful; instance of UploadFile - upload successful
"""
if not quiet:
print('Starting upload for file ' + file_name)
content_length = os.path.getsize(full_path)
token = self._upload_blob(full_path, quiet, blob_type, upload_context)
if token is None:
if not quiet:
print('Upload unsuccessful: ' + file_name)
return None
if not quiet:
print('Upload successful: ' + file_name + ' (' +
File.get_size(content_length) + ')')
upload_file = UploadFile()
upload_file.token = token
if resources:
for item in resources:
if file_name == item.get('path'):
upload_file.description = item.get('description')
if 'schema' in item:
fields = self.get_or_default(item['schema'], 'fields', [])
processed = []
count = 0
for field in fields:
processed.append(self.process_column(field))
processed[count].order = count
count += 1
upload_file.columns = processed
return upload_file
def process_column(self, column):
""" process a column, check for the type, and return the processed
column
Parameters
==========
column: a list of values in a column to be processed
"""
processed_column = DatasetColumn(
name=self.get_or_fail(column, 'name'),
description=self.get_or_default(column, 'description', ''))
if 'type' in column:
original_type = column['type'].lower()
processed_column.original_type = original_type
if (original_type == 'string' or original_type == 'date' or
original_type == 'time' or original_type == 'yearmonth' or
original_type == 'duration' or original_type == 'geopoint' or
original_type == 'geojson'):
processed_column.type = 'string'
elif (original_type == 'numeric' or original_type == 'number' or
original_type == 'year'):
processed_column.type = 'numeric'
elif original_type == 'boolean':
processed_column.type = 'boolean'
elif original_type == 'datetime':
processed_column.type = 'datetime'
else:
# Possibly extended data type - not going to try to track those
# here. Will set the type and let the server handle it.
processed_column.type = original_type
return processed_column
def upload_complete(self, path, url, quiet, resume=False):
""" function to complete an upload to retrieve a path from a url
Parameters
==========
path: the path for the upload that is read in
url: the url to send the POST to
quiet: suppress verbose output (default is False)
"""
file_size = os.path.getsize(path)
resumable_upload_result = ResumableUploadResult.Incomplete()
try:
if resume:
resumable_upload_result = self._resume_upload(path, url, file_size,
quiet)
if resumable_upload_result.result != ResumableUploadResult.INCOMPLETE:
return resumable_upload_result.result
start_at = resumable_upload_result.start_at
upload_size = file_size - start_at
with tqdm(
total=upload_size,
unit='B',
unit_scale=True,
unit_divisor=1024,
disable=quiet) as progress_bar:
with io.open(path, 'rb', buffering=0) as fp:
session = requests.Session()
if start_at > 0:
fp.seek(start_at)
session.headers.update({
'Content-Length':
'%d' % upload_size,
'Content-Range':
'bytes %d-%d/%d' % (start_at, file_size - 1, file_size)
})
reader = TqdmBufferedReader(fp, progress_bar)
retries = Retry(total=10, backoff_factor=0.5)
adapter = HTTPAdapter(max_retries=retries)
session.mount('http://', adapter)
session.mount('https://', adapter)
response = session.put(url, data=reader)
if self._is_upload_successful(response):
return ResumableUploadResult.COMPLETE
if response.status_code == 503:
return ResumableUploadResult.INCOMPLETE
# Server returned a non-resumable error so give up.
return ResumableUploadResult.FAILED
except Exception as error:
print(error)
# There is probably some weird bug in our code so try to resume the upload
# in case it works on the next try.
return ResumableUploadResult.INCOMPLETE
def _resume_upload(self, path, url, content_length, quiet):
# Documentation: https://developers.google.com/drive/api/guides/manage-uploads#resume-upload
session = requests.Session()
session.headers.update({
'Content-Length': '0',
'Content-Range': 'bytes */%d' % content_length,
})
response = session.put(url)
if self._is_upload_successful(response):
return ResumableUploadResult.Complete()
if response.status_code == 404:
# Upload expired so need to start from scratch.
if not quiet:
print('Upload of %s expired. Please try again.' % path)
return ResumableUploadResult.Failed()
if response.status_code == 308: # Resume Incomplete
bytes_uploaded = self._get_bytes_already_uploaded(response, quiet)
if bytes_uploaded is None:
# There is an error with the Range header so need to start from scratch.
return ResumableUploadResult.Failed()
result = ResumableUploadResult.Incomplete(bytes_uploaded)
if not quiet:
print('Already uploaded %d bytes. Will resume upload at %d.' %
(result.bytes_uploaded, result.start_at))
return result
else:
if not quiet:
print('Server returned %d. Please try again.' % response.status_code)
return ResumableUploadResult.Failed()
def _is_upload_successful(self, response):
return response.status_code == 200 or response.status_code == 201
def _get_bytes_already_uploaded(self, response, quiet):
range_val = response.headers.get('Range')
if range_val is None:
return 0 # This means server hasn't received anything before.
items = range_val.split('-') # Example: bytes=0-1000 => ['0', '1000']
if len(items) != 2:
if not quiet:
print('Invalid Range header format: %s. Will try again.' % range_val)
return None # Shouldn't happen, something's wrong with Range header format.
bytes_uploaded_str = items[-1] # Example: ['0', '1000'] => '1000'
try:
return int(bytes_uploaded_str) # Example: '1000' => 1000
except ValueError:
if not quiet:
print('Invalid Range header format: %s. Will try again.' % range_val)
return None # Shouldn't happen, something's wrong with Range header format.
def validate_dataset_string(self, dataset):
""" determine if a dataset string is valid, meaning it is in the format
of {username}/{dataset-slug} or {username}/{dataset-slug}/{version-number}.
Parameters
==========
dataset: the dataset name to validate
"""
if dataset:
if '/' not in dataset:
raise ValueError('Dataset must be specified in the form of '
'\'{username}/{dataset-slug}\'')
split = dataset.split('/')
if not split[0] or not split[1] or len(split) > 3:
raise ValueError('Invalid dataset specification ' + dataset)
def split_dataset_string(self, dataset):
""" split a dataset string into owner_slug, dataset_slug,
and optional version_number
Parameters
==========
dataset: the dataset name to split
"""
if '/' in dataset:
self.validate_dataset_string(dataset)
urls = dataset.split('/')
if len(urls) == 3:
return urls[0], urls[1], urls[2]
else:
return urls[0], urls[1], None
else:
return self.get_config_value(self.CONFIG_NAME_USER), dataset, None
def validate_model_string(self, model):
""" determine if a model string is valid, meaning it is in the format
of {owner}/{model-slug}.
Parameters
==========
model: the model name to validate
"""
if model:
if model.count('/') != 1:
raise ValueError('Model must be specified in the form of '
'\'{owner}/{model-slug}\'')
split = model.split('/')
if not split[0] or not split[1]:
raise ValueError('Invalid model specification ' + model)
def split_model_string(self, model):
""" split a model string into owner_slug, model_slug
Parameters
==========
model: the model name to split
"""
if '/' in model:
self.validate_model_string(model)
model_urls = model.split('/')
return model_urls[0], model_urls[1]
else:
return self.get_config_value(self.CONFIG_NAME_USER), model
def validate_model_instance_string(self, model_instance):
""" determine if a model instance string is valid, meaning it is in the format
of {owner}/{model-slug}/{framework}/{instance-slug}.
Parameters
==========
model_instance: the model instance name to validate
"""
if model_instance:
if model_instance.count('/') != 3:
raise ValueError('Model instance must be specified in the form of '
'\'{owner}/{model-slug}/{framework}/{instance-slug}\'')
split = model_instance.split('/')
if not split[0] or not split[1] or not split[2] or not split[3]:
raise ValueError('Invalid model instance specification ' +
model_instance)
def split_model_instance_string(self, model_instance):
""" split a model instance string into owner_slug, model_slug,
framework, instance_slug
Parameters
==========
model_instance: the model instance name to validate
"""
self.validate_model_instance_string(model_instance)
urls = model_instance.split('/')
return urls[0], urls[1], urls[2], urls[3]
def validate_model_instance_version_string(self, model_instance_version):
""" determine if a model instance version string is valid, meaning it is in the format
of {owner}/{model-slug}/{framework}/{instance-slug}/{version-number}.
Parameters
==========
model_instance_version: the model instance version name to validate
"""
if model_instance_version:
if model_instance_version.count('/') != 4:
raise ValueError(
'Model instance version must be specified in the form of '
'\'{owner}/{model-slug}/{framework}/{instance-slug}/{version-number}\''
)
split = model_instance_version.split('/')
if not split[0] or not split[1] or not split[2] or not split[
3] or not split[4]:
raise ValueError('Invalid model instance version specification ' +
model_instance_version)
try:
version_number = int(split[4])
except:
raise ValueError(
'Model instance version\'s version-number must be an integer')
def validate_kernel_string(self, kernel):
""" determine if a kernel string is valid, meaning it is in the format
of {username}/{kernel-slug}.
Parameters
==========
kernel: the kernel name to validate
"""
if kernel:
if '/' not in kernel:
raise ValueError('Kernel must be specified in the form of '
'\'{username}/{kernel-slug}\'')
split = kernel.split('/')
if not split[0] or not split[1]:
raise ValueError('Kernel must be specified in the form of '
'\'{username}/{kernel-slug}\'')
if len(split[1]) < 5:
raise ValueError('Kernel slug must be at least five characters')
def validate_model_string(self, model):
""" determine if a model string is valid, meaning it is in the format
of {username}/{model-slug}/{framework}/{variation-slug}/{version-number}.
Parameters
==========
model: the model name to validate
"""
if model:
if '/' not in model:
raise ValueError(
'Model must be specified in the form of '
'\'{username}/{model-slug}/{framework}/{variation-slug}/{version-number}\''
)
split = model.split('/')
if not split[0] or not split[1]:
raise ValueError('Invalid model specification ' + model)
def validate_resources(self, folder, resources):
""" validate resources is a wrapper to validate the existence of files
and that there are no duplicates for a folder and set of resources.
Parameters
==========
folder: the folder to validate
resources: one or more resources to validate within the folder
"""
self.validate_files_exist(folder, resources)
self.validate_no_duplicate_paths(resources)
def validate_files_exist(self, folder, resources):
""" ensure that one or more resource files exist in a folder
Parameters
==========
folder: the folder to validate
resources: one or more resources to validate within the folder
"""
for item in resources:
file_name = item.get('path')
full_path = os.path.join(folder, file_name)
if not os.path.isfile(full_path):
raise ValueError('%s does not exist' % full_path)
def validate_no_duplicate_paths(self, resources):
""" ensure that the user has not provided duplicate paths in
a list of resources.
Parameters
==========
resources: one or more resources to validate not duplicated
"""
paths = set()
for item in resources:
file_name = item.get('path')
if file_name in paths:
raise ValueError(
'%s path was specified more than once in the metadata' % file_name)
paths.add(file_name)
def convert_to_dataset_file_metadata(self, file_data, path):
""" convert a set of file_data to a metadata file at path
Parameters
==========
file_data: a dictionary of file data to write to file
path: the path to write the metadata to
"""
as_metadata = {
'path': os.path.join(path, file_data['name']),
'description': file_data['description']
}
schema = {}
fields = []
for column in file_data['columns']:
field = {
'name': column['name'],
'title': column['description'],
'type': column['type']
}
fields.append(field)
schema['fields'] = fields
as_metadata['schema'] = schema
return as_metadata
def validate_date(self, date):
datetime.strptime(date, "%Y-%m-%d")
def sanitize_markdown(self, markdown):
return bleach.clean(markdown)
def confirmation(self):
question = "Are you sure?"
prompt = "[yes/no]"
options = {"yes": True, "no": False}
while True:
sys.stdout.write('{} {} '.format(question, prompt))
choice = input().lower()
if choice in options:
return options[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no'.\n")
return False
|
class KaggleApi:
def _is_retriable(self, e):
pass
def _calculate_backoff_delay(self, attempt, initial_delay_millis,
retry_multiplier, randomness_factor):
pass
def with_retry(self,
func,
max_retries=10,
initial_delay_millis=500,
retry_multiplier=1.7,
randomness_factor=0.5):
pass
def retriable_func(*args):
pass
def authenticate(self):
'''authenticate the user with the Kaggle API. This method will generate
a configuration, first checking the environment for credential
variables, and falling back to looking for the .kaggle/kaggle.json
configuration file.
'''
pass
def _is_help_or_version_command(self, api_command):
'''determines if the string command passed in is for a help or version
command.
Parameters
==========
api_command: a string, 'datasets list', 'competitions files',
'models instances get', etc.
'''
pass
def read_config_environment(self, config_data=None, quiet=False):
'''read_config_environment is the second effort to get a username
and key to authenticate to the Kaggle API. The environment keys
are equivalent to the kaggle.json file, but with "KAGGLE_" prefix
to define a unique namespace.
Parameters
==========
config_data: a partially loaded configuration dictionary (optional)
quiet: suppress verbose print of output (default is False)
'''
pass
def _load_config(self, config_data):
'''the final step of the authenticate steps, where we load the values
from config_data into the Configuration object.
Parameters
==========
config_data: a dictionary with configuration values (keys) to read
into self.config_values
'''
pass
def read_config_file(self, config_data=None, quiet=False):
'''read_config_file is the first effort to get a username
and key to authenticate to the Kaggle API. Since we can get the
username and password from the environment, it's not required.
Parameters
==========
config_data: the Configuration object to save a username and
password, if defined
quiet: suppress verbose print of output (default is False)
'''
pass
def _read_config_file(self):
'''read in the configuration file, a json file defined at self.config'''
pass
def _write_config_file(self, config_data, indent=2):
'''write config data to file.
Parameters
==========
config_data: the Configuration object to save a username and
password, if defined
indent: number of tab indentations to use when writing json
'''
pass
def set_config_value(self, name, value, quiet=False):
'''a client helper function to set a configuration value, meaning
reading in the configuration file (if it exists), saving a new
config value, and then writing back
Parameters
==========
name: the name of the value to set (key in dictionary)
value: the value to set at the key
quiet: disable verbose output if True (default is False)
'''
pass
def unset_config_value(self, name, quiet=False):
'''unset a configuration value
Parameters
==========
name: the name of the value to unset (remove key in dictionary)
quiet: disable verbose output if True (default is False)
'''
pass
def get_config_value(self, name):
''' return a config value (with key name) if it's in the config_values,
otherwise return None
Parameters
==========
name: the config value key to get
'''
pass
def get_default_download_dir(self, *subdirs):
''' Get the download path for a file. If not defined, return default
from config.
Parameters
==========
subdirs: a single (or list of) subfolders under the basepath
'''
pass
def print_config_value(self, name, prefix='- ', separator=': '):
'''print a single configuration value, based on a prefix and separator
Parameters
==========
name: the key of the config valur in self.config_values to print
prefix: the prefix to print
separator: the separator to use (default is : )
'''
pass
def print_config_values(self, prefix='- '):
'''a wrapper to print_config_value to print all configuration values
Parameters
==========
prefix: the character prefix to put before the printed config value
defaults to "- "
'''
pass
def build_kaggle_client(self):
pass
def camel_to_snake(self, name):
'''
:param name: field in camel case
:return: field in snake case
'''
pass
def lookup_enum(self, enum_class, item_name):
pass
def short_enum_name(self, value):
pass
def competitions_list(self,
group=None,
category=None,
sort_by=None,
page=1,
search=None):
''' Make a call to list competitions, format the response, and return
a list of ApiCompetition instances
Parameters
==========
page: the page to return (default is 1)
search: a search term to use (default is empty string)
sort_by: how to sort the result, see valid_competition_sort_by for options
category: category to filter result to; use 'all' to get closed competitions
group: group to filter result to
'''
pass
def competitions_list_cli(self,
group=None,
category=None,
sort_by=None,
page=1,
search=None,
csv_display=False):
''' A wrapper for competitions_list for the client.
Parameters
==========
group: group to filter result to
category: category to filter result to
sort_by: how to sort the result, see valid_sort_by for options
page: the page to return (default is 1)
search: a search term to use (default is empty string)
csv_display: if True, print comma separated values
'''
pass
def competition_submit_code(self, file_name, message, competition, kernel_slug=None, kernel_version=None, quiet=False):
''' Submit a competition.
Parameters
==========
file_name: the name of the output file created by the kernel
message: the submission description
competition: the competition name; if not given use the 'competition' config value
kernel_slug: the <owner>/<notebook> of the notebook to use for a code competition
kernel_version: the version number, returned by 'kaggle kernels push ...'
quiet: suppress verbose output (default is False)
'''
pass
def competition_submit_code(self, file_name, message, competition, kernel_slug=None, kernel_version=None, quiet=False):
''' Submit a competition.
Parameters
==========
file_name: the competition metadata file
message: the submission description
competition: the competition name; if not given use the 'competition' config value
quiet: suppress verbose output (default is False)
'''
pass
def competition_submit_cli(self,
file_name,
message,
competition,
kernel=None,
version=None,
competition_opt=None,
quiet=False):
''' Submit a competition using the client. Arguments are same as for
competition_submit, except for extra arguments provided here.
Parameters
==========
file_name: the competition metadata file
message: the submission description
competition: the competition name; if not given use the 'competition' config value
kernel: the name of the kernel to submit to a code competition
version: the version of the kernel to submit to a code competition, e.g. '1'
quiet: suppress verbose output (default is False)
competition_opt: an alternative competition option provided by cli
'''
pass
def competition_submissions(self,
competition,
group=None,
sort=None,
page_token=0,
page_size=20):
''' Get the list of Submission for a particular competition.
Parameters
==========
competition: the name of the competition
group: the submission group
sort: the sort-by option
page_token: token for pagination
page_size: the number of items per page
'''
pass
def competition_submissions_cli(self,
competition=None,
competition_opt=None,
csv_display=False,
page_token=None,
page_size=20,
quiet=False):
''' A wrapper to competition_submission, will return either json or csv
to the user. Additional parameters are listed below, see
competition_submissions for rest.
Parameters
==========
competition: the name of the competition. If None, look to config
competition_opt: an alternative competition option provided by cli
csv_display: if True, print comma separated values
page_token: token for pagination
page_size: the number of items per page
quiet: suppress verbose output (default is False)
'''
pass
def competition_list_files(self, competition, page_token=None, page_size=20):
''' List files for a competition.
Parameters
==========
competition: the name of the competition
page_token: the page token for pagination
page_size: the number of items per page
'''
pass
def competition_list_files_cli(self,
competition,
competition_opt=None,
csv_display=False,
page_token=None,
page_size=20,
quiet=False):
''' List files for a competition, if it exists.
Parameters
==========
competition: the name of the competition. If None, look to config
competition_opt: an alternative competition option provided by cli
csv_display: if True, print comma separated values
page_token: the page token for pagination
page_size: the number of items per page
quiet: suppress verbose output (default is False)
'''
pass
def competition_download_file(self,
competition,
file_name,
path=None,
force=False,
quiet=False):
''' Download a competition file to a designated location, or use
a default location.
Parameters
=========
competition: the name of the competition
file_name: the configuration file name
path: a path to download the file to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
'''
pass
def competition_download_files(self,
competition,
path=None,
force=False,
quiet=True):
''' Download all competition files.
Parameters
=========
competition: the name of the competition
path: a path to download the file to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
'''
pass
def competition_download_cli(self,
competition,
competition_opt=None,
file_name=None,
path=None,
force=False,
quiet=False):
''' A wrapper to competition_download_files, but first will parse input
from API client. Additional parameters are listed here, see
competition_download for remaining.
Parameters
=========
competition: the name of the competition
competition_opt: an alternative competition option provided by cli
file_name: the configuration file name
path: a path to download the file to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
'''
pass
def competition_leaderboard_download(self, competition, path, quiet=True):
''' Download a competition leaderboard.
Parameters
=========
competition: the name of the competition
path: a path to download the file to
quiet: suppress verbose output (default is True)
'''
pass
def competition_leaderboard_view(self, competition):
''' View a leaderboard based on a competition name.
Parameters
==========
competition: the competition name to view leadboard for
'''
pass
def competition_leaderboard_cli(self,
competition,
competition_opt=None,
path=None,
view=False,
download=False,
csv_display=False,
quiet=False):
''' A wrapper for competition_leaderbord_view that will print the
results as a table or comma separated values
Parameters
==========
competition: the competition name to view leadboard for
competition_opt: an alternative competition option provided by cli
path: a path to download to, if download is True
view: if True, show the results in the terminal as csv or table
download: if True, download the entire leaderboard
csv_display: if True, print comma separated values instead of table
quiet: suppress verbose output (default is False)
'''
pass
def dataset_list(self,
sort_by=None,
size=None,
file_type=None,
license_name=None,
tag_ids=None,
search=None,
user=None,
mine=False,
page=1,
max_size=None,
min_size=None):
''' Return a list of datasets.
Parameters
==========
sort_by: how to sort the result, see valid_dataset_sort_bys for options
size: Deprecated
file_type: the format, see valid_dataset_file_types for string options
license_name: string descriptor for license, see valid_dataset_license_names
tag_ids: tag identifiers to filter the search
search: a search term to use (default is empty string)
user: username to filter the search to
mine: boolean if True, group is changed to "my" to return personal
page: the page to return (default is 1)
max_size: the maximum size of the dataset to return (bytes)
min_size: the minimum size of the dataset to return (bytes)
'''
pass
def dataset_list_cli(self,
sort_by=None,
size=None,
file_type=None,
license_name=None,
tag_ids=None,
search=None,
user=None,
mine=False,
page=1,
csv_display=False,
max_size=None,
min_size=None):
''' A wrapper to dataset_list for the client. Additional parameters
are described here, see dataset_list for others.
Parameters
==========
sort_by: how to sort the result, see valid_dataset_sort_bys for options
size: DEPRECATED
file_type: the format, see valid_dataset_file_types for string options
license_name: string descriptor for license, see valid_dataset_license_names
tag_ids: tag identifiers to filter the search
search: a search term to use (default is empty string)
user: username to filter the search to
mine: boolean if True, group is changed to "my" to return personal
page: the page to return (default is 1)
csv_display: if True, print comma separated values instead of table
max_size: the maximum size of the dataset to return (bytes)
min_size: the minimum size of the dataset to return (bytes)
'''
pass
def dataset_metadata_prep(self, dataset, path):
pass
def dataset_metadata_update(self, dataset, path):
pass
@staticmethod
def _new_license(name):
pass
@staticmethod
def _new_collaborator(name, role):
pass
def dataset_metadata_prep(self, dataset, path):
pass
def dataset_metadata_cli(self, dataset, path, update, dataset_opt=None):
pass
def dataset_list_files(self, dataset, page_token=None, page_size=20):
''' List files for a dataset.
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
page_token: the page token for pagination
page_size: the number of items per page
'''
pass
def dataset_list_files_cli(self,
dataset,
dataset_opt=None,
csv_display=False,
page_token=None,
page_size=20):
''' A wrapper to dataset_list_files for the client
(list files for a dataset).
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
dataset_opt: an alternative option to providing a dataset
csv_display: if True, print comma separated values instead of table
page_token: the page token for pagination
page_size: the number of items per page
'''
pass
def dataset_status(self, dataset):
''' Call to get the status of a dataset from the API.
Parameters
==========
dataset: the string identifier of the dataset
should be in format [owner]/[dataset-name]
'''
pass
def dataset_status_cli(self, dataset, dataset_opt=None):
''' A wrapper for client for dataset_status, with additional
dataset_opt to get the status of a dataset from the API.
Parameters
==========
dataset_opt: an alternative to dataset
'''
pass
def dataset_download_file(self,
dataset,
file_name,
path=None,
force=False,
quiet=True,
licenses=[]):
''' Download a single file for a dataset.
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
file_name: the dataset configuration file
path: if defined, download to this location
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
licenses: a list of license names, e.g. ['CC0-1.0']
'''
pass
def dataset_download_files(self,
dataset,
path=None,
force=False,
quiet=True,
unzip=False,
licenses=[]):
''' Download all files for a dataset.
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
path: the path to download the dataset to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
unzip: if True, unzip files upon download (default is False)
licenses: a list of license names, e.g. ['CC0-1.0']
'''
pass
def _print_dataset_url_and_license(self, owner_slug, dataset_slug,
dataset_version_number, licenses):
pass
def dataset_download_cli(self,
dataset,
dataset_opt=None,
file_name=None,
path=None,
unzip=False,
force=False,
quiet=False):
''' Client wrapper for dataset_download_files and download dataset file,
either for a specific file (when file_name is provided),
or all files for a dataset (plural).
Parameters
==========
dataset: the string identified of the dataset
should be in format [owner]/[dataset-name]
dataset_opt: an alternative option to providing a dataset
file_name: the dataset configuration file
path: the path to download the dataset to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
unzip: if True, unzip files upon download (default is False)
'''
pass
def _upload_blob(self, path, quiet, blob_type, upload_context):
''' Upload a file.
Parameters
==========
path: the complete path to upload
quiet: suppress verbose output (default is False)
blob_type (ApiBlobType): To which entity the file/blob refers
upload_context (ResumableUploadContext): Context for resumable uploads
'''
pass
def dataset_create_version(self,
folder,
version_notes,
quiet=False,
convert_to_csv=True,
delete_old_versions=False,
dir_mode='skip'):
''' Create a version of a dataset.
Parameters
==========
folder: the folder with the dataset configuration / data files
version_notes: notes to add for the version
quiet: suppress verbose output (default is False)
convert_to_csv: on upload, if data should be converted to csv
delete_old_versions: if True, do that (default False)
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def _api_dataset_new_file(self, file):
pass
def dataset_create_version_cli(self,
folder,
version_notes,
quiet=False,
convert_to_csv=True,
delete_old_versions=False,
dir_mode='skip'):
''' client wrapper for creating a version of a dataset
Parameters
==========
folder: the folder with the dataset configuration / data files
version_notes: notes to add for the version
quiet: suppress verbose output (default is False)
convert_to_csv: on upload, if data should be converted to csv
delete_old_versions: if True, do that (default False)
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def dataset_initialize(self, folder):
''' initialize a folder with a a dataset configuration (metadata) file
Parameters
==========
folder: the folder to initialize the metadata file in
'''
pass
def dataset_initialize_cli(self, folder=None):
pass
def dataset_create_new(self,
folder,
public=False,
quiet=False,
convert_to_csv=True,
dir_mode='skip'):
''' Create a new dataset, meaning the same as creating a version but
with extra metadata like license and user/owner.
Parameters
==========
folder: the folder to get the metadata file from
public: should the dataset be public?
quiet: suppress verbose output (default is False)
convert_to_csv: if True, convert data to comma separated value
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def dataset_create_new_cli(self,
folder=None,
public=False,
quiet=False,
convert_to_csv=True,
dir_mode='skip'):
''' client wrapper for creating a new dataset
Parameters
==========
folder: the folder to get the metadata file from
public: should the dataset be public?
quiet: suppress verbose output (default is False)
convert_to_csv: if True, convert data to comma separated value
dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def download_file(self,
response,
outfile,
http_client,
quiet=True,
resume=False,
chunk_size=1048576):
''' download a file to an output file based on a chunk size
Parameters
==========
response: the response to download
outfile: the output file to download to
http_client: the Kaggle http client to use
quiet: suppress verbose output (default is True)
chunk_size: the size of the chunk to stream
resume: whether to resume an existing download
'''
pass
def kernels_list(self,
page=1,
page_size=20,
dataset=None,
competition=None,
parent_kernel=None,
search=None,
mine=False,
user=None,
language=None,
kernel_type=None,
output_type=None,
sort_by=None):
''' List kernels based on a set of search criteria.
Parameters
==========
page: the page of results to return (default is 1)
page_size: results per page (default is 20)
dataset: if defined, filter to this dataset (default None)
competition: if defined, filter to this competition (default None)
parent_kernel: if defined, filter to those with specified parent
search: a custom search string to pass to the list query
mine: if true, group is specified as "my" to return personal kernels
user: filter results to a specific user
language: the programming language of the kernel
kernel_type: the type of kernel, one of valid_list_kernel_types (str)
output_type: the output type, one of valid_list_output_types (str)
sort_by: if defined, sort results by this string (valid_list_sort_by)
'''
pass
def kernels_list_cli(self,
mine=False,
page=1,
page_size=20,
search=None,
csv_display=False,
parent=None,
competition=None,
dataset=None,
user=None,
language=None,
kernel_type=None,
output_type=None,
sort_by=None):
''' Client wrapper for kernels_list, see this function for arguments.
Additional arguments are provided here.
Parameters
==========
csv_display: if True, print comma separated values instead of table
'''
pass
def kernels_list_files(self, kernel, page_token=None, page_size=20):
''' list files for a kernel
Parameters
==========
kernel: the string identifier of the kernel
should be in format [owner]/[kernel-name]
page_token: the page token for pagination
page_size: the number of items per page
'''
pass
def kernels_list_files_cli(self,
kernel,
kernel_opt=None,
csv_display=False,
page_token=None,
page_size=20):
''' A wrapper to kernel_list_files for the client.
(list files for a kernel)
Parameters
==========
kernel: the string identifier of the kernel
should be in format [owner]/[kernel-name]
kernel_opt: an alternative option to providing a kernel
csv_display: if True, print comma separated values instead of table
page_token: the page token for pagination
page_size: the number of items per page
'''
pass
def kernels_initialize(self, folder):
''' Create a new kernel in a specified folder from a template, including
json metadata that grabs values from the configuration.
Parameters
==========
folder: the path of the folder
'''
pass
def kernels_initialize_cli(self, folder=None):
''' A client wrapper for kernels_initialize. It takes same arguments but
sets default folder to be None. If None, defaults to present
working directory.
Parameters
==========
folder: the path of the folder (None defaults to ${PWD})
'''
pass
def kernels_push(self, folder, timeout=None) -> ApiSaveKernelResponse:
''' Read the metadata file and kernel files from a notebook, validate
both, and use the Kernel API to push to Kaggle if all is valid.
Parameters
==========
folder: the path of the folder
'''
pass
def kernels_push_cli(self, folder, timeout):
''' Client wrapper for kernels_push.
Parameters
==========
folder: the path of the folder
'''
pass
def kernels_pull(self, kernel, path, metadata=False, quiet=True):
''' Pull a kernel, including a metadata file (if metadata is True)
and associated files to a specified path.
Parameters
==========
kernel: the kernel to pull
path: the path to pull files to on the filesystem
metadata: if True, also pull metadata
quiet: suppress verbosity (default is True)
'''
pass
def kernels_pull_cli(self,
kernel,
kernel_opt=None,
path=None,
metadata=False):
''' Client wrapper for kernels_pull.
'''
pass
def kernels_output(self, kernel, path, force=False, quiet=True):
''' Retrieve the output for a specified kernel.
Parameters
==========
kernel: the kernel to output
path: the path to pull files to on the filesystem
force: if output already exists, force overwrite (default False)
quiet: suppress verbosity (default is True)
'''
pass
def kernels_output_cli(self,
kernel,
kernel_opt=None,
path=None,
force=False,
quiet=False):
''' Client wrapper for kernels_output, with same arguments. Extra
arguments are described below, and see kernels_output for others.
Parameters
==========
kernel_opt: option from client instead of kernel, if not defined
'''
pass
def kernels_status(self, kernel):
''' Call to the api to get the status of a kernel.
Parameters
==========
kernel: the kernel to get the status for
'''
pass
def kernels_status_cli(self, kernel, kernel_opt=None):
''' Client wrapper for kernel_status.
Parameters
==========
kernel_opt: additional option from the client, if kernel not defined
'''
pass
def model_get(self, model):
''' Get a model.
Parameters
==========
model: the string identifier of the model
should be in format [owner]/[model-name]
'''
pass
def model_get_cli(self, model, folder=None):
''' Clent wrapper for model_get, with additional
model_opt to get a model from the API.
Parameters
==========
model: the string identifier of the model
should be in format [owner]/[model-name]
folder: the folder to download the model metadata file
'''
pass
def model_list(self,
sort_by=None,
search=None,
owner=None,
page_size=20,
page_token=None):
''' Return a list of models.
Parameters
==========
sort_by: how to sort the result, see valid_model_sort_bys for options
search: a search term to use (default is empty string)
owner: username or organization slug to filter the search to
page_size: the page size to return (default is 20)
page_token: the page token for pagination
'''
pass
def model_list_cli(self,
sort_by=None,
search=None,
owner=None,
page_size=20,
page_token=None,
csv_display=False):
''' Client wrapper for model_list. Additional parameters
are described here, see model_list for others.
Parameters
==========
sort_by: how to sort the result, see valid_model_sort_bys for options
search: a search term to use (default is empty string)
owner: username or organization slug to filter the search to
page_size: the page size to return (default is 20)
page_token: the page token for pagination
csv_display: if True, print comma separated values instead of table
'''
pass
def model_initialize(self, folder):
''' Initialize a folder with a model configuration (metadata) file.
Parameters
==========
folder: the folder to initialize the metadata file in
'''
pass
def model_initialize_cli(self, folder=None):
pass
def model_create_new(self, folder):
''' Create a new model.
Parameters
==========
folder: the folder to get the metadata file from
'''
pass
def model_create_new_cli(self, folder=None):
''' Client wrapper for creating a new model.
Parameters
==========
folder: the folder to get the metadata file from
'''
pass
def model_delete(self, model, yes):
''' Delete a modeL.
Parameters
==========
model: the string identifier of the model
should be in format [owner]/[model-name]
yes: automatic confirmation
'''
pass
def model_delete_cli(self, model, yes):
''' Client wrapper for deleting a model.
Parameters
==========
model: the string identified of the model
should be in format [owner]/[model-name]
yes: automatic confirmation
'''
pass
def model_update(self, folder):
''' Update a model.
Parameters
==========
folder: the folder to get the metadata file from
'''
pass
def model_update_cli(self, folder=None):
''' Client wrapper for updating a model.
Parameters
==========
folder: the folder to get the metadata file from
'''
pass
def model_instance_get(self, model_instance):
''' Get a model instance.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
'''
pass
def model_instance_get_cli(self, model_instance, folder=None):
''' Client wrapper for model_instance_get.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
folder: the folder to download the model metadata file
'''
pass
def model_instance_initialize(self, folder):
''' Initialize a folder with a model instance configuration (metadata) file.
Parameters
==========
folder: the folder to initialize the metadata file in
'''
pass
def model_instance_initialize_cli(self, folder):
pass
def model_instance_create(self, folder, quiet=False, dir_mode='skip'):
''' Create a new model instance.
Parameters
==========
folder: the folder to get the metadata file from
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def model_instance_create_cli(self, folder, quiet=False, dir_mode='skip'):
''' Client wrapper for creating a new model instance.
Parameters
==========
folder: the folder to get the metadata file from
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def model_instance_delete(self, model_instance, yes):
''' Delete a model instance.
Parameters
==========
model_instance: the string identified of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
yes: automatic confirmation
'''
pass
def model_instance_delete_cli(self, model_instance, yes):
''' Client wrapper for model_instance_delete.
Parameters
==========
model_instance: the string identified of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
yes: automatic confirmation
'''
pass
def model_instance_files(self,
model_instance,
page_token=None,
page_size=20,
csv_display=False):
''' List files for the current version of a model instance.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
'''
pass
def model_instance_files_cli(self,
model_instance,
page_token=None,
page_size=20,
csv_display=False):
''' Client wrapper for model_instance_files.
Parameters
==========
model_instance: the string identified of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
'''
pass
def model_instance_update(self, folder):
''' Update a model instance.
Parameters
==========
folder: the folder to get the metadata file from
'''
pass
def model_instance_update_cli(self, folder=None):
''' Client wrapper for updating a model instance.
Parameters
==========
folder: the folder to get the metadata file from
'''
pass
def model_instance_version_create(self,
model_instance,
folder,
version_notes='',
quiet=False,
dir_mode='skip'):
''' Create a new model instance version.
Parameters
==========
model_instance: the string identified of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
folder: the folder to get the metadata file from
version_notes: the version notes to record for this new version
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def model_instance_version_create_cli(self,
model_instance,
folder,
version_notes='',
quiet=False,
dir_mode='skip'):
''' Client wrapper for creating a new version of a model instance.
Parameters
==========
model_instance: the string identifier of the model instance
should be in format [owner]/[model-name]/[framework]/[instance-slug]
folder: the folder to get the metadata file from
version_notes: the version notes to record for this new version
quiet: suppress verbose output (default is False)
dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload
'''
pass
def model_instance_version_download(self,
model_instance_version,
path=None,
force=False,
quiet=True,
untar=False):
''' Download all files for a model instance version.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
path: the path to download the model instance version to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is True)
untar: if True, untar files upon download (default is False)
'''
pass
def model_instance_version_download_cli(self,
model_instance_version,
path=None,
untar=False,
force=False,
quiet=False):
''' Client wrapper for model_instance_version_download.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
path: the path to download the model instance version to
force: force the download if the file already exists (default False)
quiet: suppress verbose output (default is False)
untar: if True, untar files upon download (default is False)
'''
pass
def model_instance_version_files(self,
model_instance_version,
page_token=None,
page_size=20,
csv_display=False):
''' List all files for a model instance version.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
'''
pass
def model_instance_version_files_cli(self,
model_instance_version,
page_token=None,
page_size=20,
csv_display=False):
''' Client wrapper for model_instance_version_files.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
page_token: token for pagination
page_size: the number of items per page
csv_display: if True, print comma separated values instead of table
'''
pass
def model_instance_version_delete(self, model_instance_version, yes):
''' Delete a model instance version.
Parameters
==========
model_instance_version: the string identifier of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
yes: automatic confirmation
'''
pass
def model_instance_version_delete_cli(self, model_instance_version, yes):
''' Client wrapper for model_instance_version_delete
Parameters
==========
model_instance_version: the string identified of the model instance version
should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number]
yes: automatic confirmation
'''
pass
def files_upload_cli(self, local_paths, inbox_path, no_resume, no_compress):
pass
def file_upload_cli(self, local_path, inbox_path, no_compress,
upload_context):
pass
def print_obj(self, obj, indent=2):
pass
def download_needed(self, response, outfile, quiet=True):
''' determine if a download is needed based on timestamp. Return True
if needed (remote is newer) or False if local is newest.
Parameters
==========
response: the response from the API
outfile: the output file to write to
quiet: suppress verbose output (default is True)
'''
pass
def print_table(self, items, fields, labels=None):
''' print a table of items, for a set of fields defined
Parameters
==========
items: a list of items to print
fields: a list of fields to select from items
labels: labels for the fields, defaults to fields
'''
pass
def print_csv(self, items, fields, labels=None):
''' print a set of fields in a set of items using a csv.writer
Parameters
==========
items: a list of items to print
fields: a list of fields to select from items
labels: labels for the fields, defaults to fields
'''
pass
def string(self, item):
pass
def get_or_fail(self, data, key):
pass
def get_or_default(self, data, key, default):
pass
def get_bool(self, data, key, default):
pass
def set_if_present(self, data, key, output, output_key):
pass
def get_dataset_metadata_file(self, folder):
pass
def get_model_metadata_file(self, folder):
pass
def get_model_instance_metadata_file(self, folder):
pass
def process_response(self, result):
''' process a response from the API. We check the API version against
the client's to see if it's old, and give them a warning (once)
Parameters
==========
result: the result from the API
'''
pass
def is_up_to_date(self, server_version):
''' determine if a client (on the local user's machine) is up to date
with the version provided on the server. Return a boolean with True
or False
Parameters
==========
server_version: the server version string to compare to the host
'''
pass
def upload_files(self,
request,
resources,
folder,
blob_type,
upload_context,
quiet=False,
dir_mode='skip'):
''' upload files in a folder
Parameters
==========
request: the prepared request
resources: the files to upload
folder: the folder to upload from
blob_type (ApiBlobType): To which entity the file/blob refers
upload_context (ResumableUploadContext): Context for resumable uploads
quiet: suppress verbose output (default is False)
'''
pass
def _upload_file_or_folder(self,
parent_path,
file_or_folder_name,
blob_type,
upload_context,
dir_mode,
quiet=False,
resources=None):
pass
def _upload_file_or_folder(self,
parent_path,
file_or_folder_name,
blob_type,
upload_context,
dir_mode,
quiet=False,
resources=None):
''' Helper function to upload a single file
Parameters
==========
file_name: name of the file to upload
full_path: path to the file to upload
blob_type (ApiBlobType): To which entity the file/blob refers
upload_context (ResumableUploadContext): Context for resumable uploads
quiet: suppress verbose output
resources: optional file metadata
:return: None - upload unsuccessful; instance of UploadFile - upload successful
'''
pass
def process_column(self, column):
''' process a column, check for the type, and return the processed
column
Parameters
==========
column: a list of values in a column to be processed
'''
pass
def upload_complete(self, path, url, quiet, resume=False):
''' function to complete an upload to retrieve a path from a url
Parameters
==========
path: the path for the upload that is read in
url: the url to send the POST to
quiet: suppress verbose output (default is False)
'''
pass
def _resume_upload(self, path, url, content_length, quiet):
pass
def _is_upload_successful(self, response):
pass
def _get_bytes_already_uploaded(self, response, quiet):
pass
def validate_dataset_string(self, dataset):
''' determine if a dataset string is valid, meaning it is in the format
of {username}/{dataset-slug} or {username}/{dataset-slug}/{version-number}.
Parameters
==========
dataset: the dataset name to validate
'''
pass
def split_dataset_string(self, dataset):
''' split a dataset string into owner_slug, dataset_slug,
and optional version_number
Parameters
==========
dataset: the dataset name to split
'''
pass
def validate_model_string(self, model):
''' determine if a model string is valid, meaning it is in the format
of {owner}/{model-slug}.
Parameters
==========
model: the model name to validate
'''
pass
def split_model_string(self, model):
''' split a model string into owner_slug, model_slug
Parameters
==========
model: the model name to split
'''
pass
def validate_model_instance_string(self, model_instance):
''' determine if a model instance string is valid, meaning it is in the format
of {owner}/{model-slug}/{framework}/{instance-slug}.
Parameters
==========
model_instance: the model instance name to validate
'''
pass
def split_model_instance_string(self, model_instance):
''' split a model instance string into owner_slug, model_slug,
framework, instance_slug
Parameters
==========
model_instance: the model instance name to validate
'''
pass
def validate_model_instance_version_string(self, model_instance_version):
''' determine if a model instance version string is valid, meaning it is in the format
of {owner}/{model-slug}/{framework}/{instance-slug}/{version-number}.
Parameters
==========
model_instance_version: the model instance version name to validate
'''
pass
def validate_kernel_string(self, kernel):
''' determine if a kernel string is valid, meaning it is in the format
of {username}/{kernel-slug}.
Parameters
==========
kernel: the kernel name to validate
'''
pass
def validate_model_string(self, model):
''' determine if a model string is valid, meaning it is in the format
of {username}/{model-slug}/{framework}/{variation-slug}/{version-number}.
Parameters
==========
model: the model name to validate
'''
pass
def validate_resources(self, folder, resources):
''' validate resources is a wrapper to validate the existence of files
and that there are no duplicates for a folder and set of resources.
Parameters
==========
folder: the folder to validate
resources: one or more resources to validate within the folder
'''
pass
def validate_files_exist(self, folder, resources):
''' ensure that one or more resource files exist in a folder
Parameters
==========
folder: the folder to validate
resources: one or more resources to validate within the folder
'''
pass
def validate_no_duplicate_paths(self, resources):
''' ensure that the user has not provided duplicate paths in
a list of resources.
Parameters
==========
resources: one or more resources to validate not duplicated
'''
pass
def convert_to_dataset_file_metadata(self, file_data, path):
''' convert a set of file_data to a metadata file at path
Parameters
==========
file_data: a dictionary of file data to write to file
path: the path to write the metadata to
'''
pass
def validate_date(self, date):
pass
def sanitize_markdown(self, markdown):
pass
def confirmation(self):
pass
| 150 | 111 | 28 | 2 | 20 | 6 | 4 | 0.32 | 0 | 100 | 74 | 0 | 144 | 0 | 146 | 146 | 4,396 | 483 | 2,982 | 986 | 2,591 | 949 | 2,133 | 664 | 1,985 | 25 | 0 | 5 | 608 |
141,045 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/api/kaggle_api_extended.py
|
src.kaggle.api.kaggle_api_extended.ResumableFileUpload
|
class ResumableFileUpload(object):
# Reference: https://cloud.google.com/storage/docs/resumable-uploads
# A resumable upload must be completed within a week of being initiated
RESUMABLE_UPLOAD_EXPIRY_SECONDS = 6 * 24 * 3600
def __init__(self, path, start_blob_upload_request, context):
self.path = path
self.start_blob_upload_request = start_blob_upload_request
self.context = context
self.timestamp = int(time.time())
self.start_blob_upload_response = None
self.can_resume = False
self.upload_complete = False
if self.context.no_resume:
return
self._upload_info_file_path = self.context.get_upload_info_file_path(path)
def get_token(self):
if self.upload_complete:
return self.start_blob_upload_response.token
return None
def load(self):
if self.context.no_resume:
return
self._load_previous_if_any()
def _load_previous_if_any(self):
if not os.path.exists(self._upload_info_file_path):
return False
try:
with io.open(self._upload_info_file_path, 'r') as f:
previous = ResumableFileUpload.from_dict(json.load(f), self.context)
if self._is_previous_valid(previous):
self.start_blob_upload_response = previous.start_blob_upload_response
self.timestamp = previous.timestamp
self.can_resume = True
except Exception as e:
print('Error while trying to load upload info:', e)
def _is_previous_valid(self, previous):
return previous.path == self.path and \
previous.start_blob_upload_request == self.start_blob_upload_request and \
previous.timestamp > time.time() - ResumableFileUpload.RESUMABLE_UPLOAD_EXPIRY_SECONDS
def upload_initiated(self, start_blob_upload_response):
if self.context.no_resume:
return
self.start_blob_upload_response = start_blob_upload_response
with io.open(self._upload_info_file_path, 'w') as f:
json.dump(self.to_dict(), f, indent=True)
def upload_completed(self):
if self.context.no_resume:
return
self.upload_complete = True
self._save()
def _save(self):
with io.open(self._upload_info_file_path, 'w') as f:
json.dump(self.to_dict(), f, indent=True)
def cleanup(self):
if self.context.no_resume:
return
try:
os.remove(self._upload_info_file_path)
except OSError:
pass
def to_dict(self):
return {
'path':
self.path,
'start_blob_upload_request':
self.start_blob_upload_request.to_dict(),
'timestamp':
self.timestamp,
'start_blob_upload_response':
self.start_blob_upload_response.to_dict()
if self.start_blob_upload_response is not None else None,
'upload_complete':
self.upload_complete,
}
def from_dict(other, context):
req = ApiStartBlobUploadRequest()
req.from_dict(other['start_blob_upload_request'])
new = ResumableFileUpload(
other['path'],
ApiStartBlobUploadRequest(**other['start_blob_upload_request']),
context)
new.timestamp = other.get('timestamp')
start_blob_upload_response = other.get('start_blob_upload_response')
if start_blob_upload_response is not None:
new.start_blob_upload_response = ApiStartBlobUploadResponse(
**start_blob_upload_response)
new.upload_complete = other.get('upload_complete') or False
return new
def to_str(self):
return str(self.to_dict())
def __repr__(self):
return self.to_str()
|
class ResumableFileUpload(object):
def __init__(self, path, start_blob_upload_request, context):
pass
def get_token(self):
pass
def load(self):
pass
def _load_previous_if_any(self):
pass
def _is_previous_valid(self, previous):
pass
def upload_initiated(self, start_blob_upload_response):
pass
def upload_completed(self):
pass
def _save(self):
pass
def cleanup(self):
pass
def to_dict(self):
pass
def from_dict(other, context):
pass
def to_str(self):
pass
def __repr__(self):
pass
| 14 | 0 | 7 | 0 | 7 | 0 | 2 | 0.02 | 1 | 6 | 2 | 0 | 13 | 8 | 13 | 13 | 109 | 17 | 90 | 31 | 76 | 2 | 72 | 27 | 58 | 4 | 1 | 3 | 25 |
141,046 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/api/kaggle_api_extended.py
|
src.kaggle.api.kaggle_api_extended.ResumableUploadContext
|
class ResumableUploadContext(object):
def __init__(self, no_resume=False):
self.no_resume = no_resume
self._temp_dir = os.path.join(tempfile.gettempdir(), '.kaggle/uploads')
self._file_uploads = []
def __enter__(self):
if self.no_resume:
return
self._create_temp_dir()
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if self.no_resume:
return
if exc_type is not None:
# Don't delete the upload file info when there is an error
# to give it a chance to retry/resume on the next invocation.
return
for file_upload in self._file_uploads:
file_upload.cleanup()
def get_upload_info_file_path(self, path):
return os.path.join(
self._temp_dir,
'%s.json' % path.replace(os.path.sep, '_').replace(':', '_'))
def new_resumable_file_upload(self, path, start_blob_upload_request):
file_upload = ResumableFileUpload(path, start_blob_upload_request, self)
self._file_uploads.append(file_upload)
file_upload.load()
return file_upload
def _create_temp_dir(self):
try:
os.makedirs(self._temp_dir)
except FileExistsError:
pass
|
class ResumableUploadContext(object):
def __init__(self, no_resume=False):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, exc_traceback):
pass
def get_upload_info_file_path(self, path):
pass
def new_resumable_file_upload(self, path, start_blob_upload_request):
pass
def _create_temp_dir(self):
pass
| 7 | 0 | 5 | 0 | 5 | 0 | 2 | 0.06 | 1 | 2 | 1 | 0 | 6 | 3 | 6 | 6 | 39 | 6 | 31 | 12 | 24 | 2 | 29 | 12 | 22 | 4 | 1 | 1 | 11 |
141,047 |
Kaggle/kaggle-api
|
Kaggle_kaggle-api/src/kaggle/api/kaggle_api_extended.py
|
src.kaggle.api.kaggle_api_extended.TqdmBufferedReader
|
class TqdmBufferedReader(io.BufferedReader):
def __init__(self, raw, progress_bar):
""" helper class to implement an io.BufferedReader
Parameters
==========
raw: bytes data to pass to the buffered reader
progress_bar: a progress bar to initialize the reader
"""
io.BufferedReader.__init__(self, raw)
self.progress_bar = progress_bar
def read(self, *args, **kwargs):
""" read the buffer, passing named and non named arguments to the
io.BufferedReader function.
"""
buf = io.BufferedReader.read(self, *args, **kwargs)
self.increment(len(buf))
return buf
def increment(self, length):
""" increment the reader by some length
Parameters
==========
length: bytes to increment the reader by
"""
self.progress_bar.update(length)
|
class TqdmBufferedReader(io.BufferedReader):
def __init__(self, raw, progress_bar):
''' helper class to implement an io.BufferedReader
Parameters
==========
raw: bytes data to pass to the buffered reader
progress_bar: a progress bar to initialize the reader
'''
pass
def read(self, *args, **kwargs):
''' read the buffer, passing named and non named arguments to the
io.BufferedReader function.
'''
pass
def increment(self, length):
''' increment the reader by some length
Parameters
==========
length: bytes to increment the reader by
'''
pass
| 4 | 3 | 8 | 0 | 3 | 5 | 1 | 1.4 | 0 | 0 | 0 | 0 | 3 | 1 | 3 | 3 | 28 | 4 | 10 | 6 | 6 | 14 | 10 | 6 | 6 | 1 | 0 | 0 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.