File size: 1,888 Bytes
8ad7857 88519b2 6bfd931 88519b2 6bfd931 88519b2 6bfd931 3c9285e 6bfd931 88519b2 6bfd931 3c9285e 6bfd931 88519b2 6bfd931 88519b2 6bfd931 3c9285e 6bfd931 88519b2 6bfd931 5cb052e 6bfd931 86b2218 6bfd931 e17f7fb 6bfd931 d44a1b8 e17f7fb 8ad7857 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
---
dataset_info:
features:
- name: day
dtype: string
- name: num_downloads
dtype: int64
splits:
- name: accelerate
num_bytes: 26334
num_examples: 1197
- name: datasets
num_bytes: 26334
num_examples: 1197
- name: diffusers
num_bytes: 15708
num_examples: 714
- name: evaluate
num_bytes: 18304
num_examples: 832
- name: gradio
num_bytes: 29700
num_examples: 1350
- name: huggingface_hub
num_bytes: 27214
num_examples: 1237
- name: optimum
num_bytes: 21318
num_examples: 969
- name: peft
num_bytes: 10560
num_examples: 480
- name: pytorch_image_models
num_bytes: 29700
num_examples: 1350
- name: safetensors
num_bytes: 11770
num_examples: 535
- name: tokenizers
num_bytes: 29700
num_examples: 1350
- name: transformers
num_bytes: 30382
num_examples: 1381
- name: sentence_transformers
num_bytes: 4686
num_examples: 213
download_size: 163911
dataset_size: 281710
configs:
- config_name: default
data_files:
- split: accelerate
path: data/accelerate-*
- split: datasets
path: data/datasets-*
- split: diffusers
path: data/diffusers-*
- split: evaluate
path: data/evaluate-*
- split: gradio
path: data/gradio-*
- split: huggingface_hub
path: data/huggingface_hub-*
- split: optimum
path: data/optimum-*
- split: peft
path: data/peft-*
- split: pytorch_image_models
path: data/pytorch_image_models-*
- split: safetensors
path: data/safetensors-*
- split: tokenizers
path: data/tokenizers-*
- split: transformers
path: data/transformers-*
- split: sentence_transformers
path: data/sentence_transformers-*
---
# Dataset Card for "pip"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |