content
stringlengths 1
103k
⌀ | path
stringlengths 8
216
| filename
stringlengths 2
179
| language
stringclasses 15
values | size_bytes
int64 2
189k
| quality_score
float64 0.5
0.95
| complexity
float64 0
1
| documentation_ratio
float64 0
1
| repository
stringclasses 5
values | stars
int64 0
1k
| created_date
stringdate 2023-07-10 19:21:08
2025-07-09 19:11:45
| license
stringclasses 4
values | is_test
bool 2
classes | file_hash
stringlengths 32
32
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
\n\n
|
.venv\Lib\site-packages\datasets\utils\__pycache__\_dill.cpython-313.pyc
|
_dill.cpython-313.pyc
|
Other
| 17,723 | 0.95 | 0.014706 | 0 |
awesome-app
| 446 |
2024-08-27T12:54:13.209530
|
BSD-3-Clause
| false |
07879e8499b203aca9a2c24f703b2690
|
\n\n
|
.venv\Lib\site-packages\datasets\utils\__pycache__\_filelock.cpython-313.pyc
|
_filelock.cpython-313.pyc
|
Other
| 2,840 | 0.8 | 0.047619 | 0 |
react-lib
| 211 |
2025-05-12T21:25:47.648184
|
Apache-2.0
| false |
6cb952bb75cab736376262b46b756db5
|
\n\n
|
.venv\Lib\site-packages\datasets\utils\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 609 | 0.7 | 0 | 0 |
awesome-app
| 219 |
2025-06-02T01:55:19.835677
|
Apache-2.0
| false |
d5aac1d57d481798384a92cfbac26767
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\arrow_reader.cpython-313.pyc
|
arrow_reader.cpython-313.pyc
|
Other
| 27,928 | 0.8 | 0.04321 | 0.013889 |
vue-tools
| 736 |
2025-05-01T06:12:15.319672
|
MIT
| false |
216496a6f991992d93311fe3cc031e59
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\arrow_writer.cpython-313.pyc
|
arrow_writer.cpython-313.pyc
|
Other
| 35,701 | 0.95 | 0.050505 | 0 |
awesome-app
| 619 |
2023-12-26T01:22:24.338787
|
GPL-3.0
| false |
4743634b8d78c8d23f19c823f4d68fcc
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\builder.cpython-313.pyc
|
builder.cpython-313.pyc
|
Other
| 96,461 | 0.75 | 0.086674 | 0.016471 |
react-lib
| 258 |
2023-12-10T18:21:55.443210
|
MIT
| false |
e844da98ef7f807fd83d46f33efd9001
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\combine.cpython-313.pyc
|
combine.cpython-313.pyc
|
Other
| 10,488 | 0.95 | 0.044199 | 0 |
node-utils
| 640 |
2023-09-02T09:36:12.835476
|
GPL-3.0
| false |
4673f6d0bb5314d7367bcec1ed89ad98
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\config.cpython-313.pyc
|
config.cpython-313.pyc
|
Other
| 12,015 | 0.95 | 0 | 0.030769 |
node-utils
| 670 |
2024-12-03T09:23:24.447749
|
GPL-3.0
| false |
5caa9a5bdea7a57b510ef013fe8cce23
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\data_files.cpython-313.pyc
|
data_files.cpython-313.pyc
|
Other
| 37,093 | 0.95 | 0.034137 | 0.004535 |
python-kit
| 848 |
2024-08-30T12:58:25.583347
|
BSD-3-Clause
| false |
c84ba72a69ecf16112a805a220411864
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\distributed.cpython-313.pyc
|
distributed.cpython-313.pyc
|
Other
| 1,772 | 0.8 | 0.088235 | 0 |
vue-tools
| 704 |
2025-05-08T22:26:42.830789
|
Apache-2.0
| false |
2e8d96b5fe026ada1ae7007cc32067c2
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\exceptions.cpython-313.pyc
|
exceptions.cpython-313.pyc
|
Other
| 7,113 | 0.95 | 0.076923 | 0.021739 |
node-utils
| 312 |
2025-03-04T19:46:08.356184
|
MIT
| false |
34cea866c07f27228450141bf1373367
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\fingerprint.cpython-313.pyc
|
fingerprint.cpython-313.pyc
|
Other
| 21,844 | 0.95 | 0.056604 | 0.015152 |
vue-tools
| 116 |
2024-05-17T17:10:51.718285
|
Apache-2.0
| false |
8d03b002c65f5c2dbbd67ef53fdf8f25
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\hub.cpython-313.pyc
|
hub.cpython-313.pyc
|
Other
| 9,777 | 0.95 | 0.059829 | 0 |
python-kit
| 570 |
2025-01-26T11:53:16.435369
|
Apache-2.0
| false |
11955b1637084fd7bcc4f7d85e14c535
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\info.cpython-313.pyc
|
info.cpython-313.pyc
|
Other
| 24,685 | 0.95 | 0.057143 | 0.010582 |
vue-tools
| 742 |
2025-03-04T09:37:51.815040
|
GPL-3.0
| false |
0c18e98a2464b0ea379107e08cbf18df
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\inspect.cpython-313.pyc
|
inspect.cpython-313.pyc
|
Other
| 16,638 | 0.95 | 0.155102 | 0.022831 |
react-lib
| 887 |
2023-11-14T03:24:05.732112
|
Apache-2.0
| false |
dd626aa9942bf4e3ad38a5197940f95d
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\keyhash.cpython-313.pyc
|
keyhash.cpython-313.pyc
|
Other
| 4,861 | 0.95 | 0.122807 | 0 |
awesome-app
| 259 |
2023-12-19T20:26:26.163391
|
GPL-3.0
| false |
9e476b983936d24e401b48cd05f670a8
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\load.cpython-313.pyc
|
load.cpython-313.pyc
|
Other
| 99,741 | 0.75 | 0.060526 | 0.025871 |
react-lib
| 314 |
2023-09-27T15:52:06.505210
|
BSD-3-Clause
| false |
bc42e79070b321992541b606f86072c1
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\naming.cpython-313.pyc
|
naming.cpython-313.pyc
|
Other
| 3,998 | 0.8 | 0.02439 | 0 |
vue-tools
| 38 |
2025-01-08T03:30:31.940466
|
BSD-3-Clause
| false |
6763d77418f44bc40d02c226126e54ea
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\search.cpython-313.pyc
|
search.cpython-313.pyc
|
Other
| 41,300 | 0.95 | 0.064632 | 0.001949 |
awesome-app
| 609 |
2025-05-22T13:07:31.544560
|
GPL-3.0
| false |
70ff0f273dd34953dfe098d1a2142dae
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\splits.cpython-313.pyc
|
splits.cpython-313.pyc
|
Other
| 29,083 | 0.95 | 0.043103 | 0.029605 |
python-kit
| 643 |
2023-08-06T07:17:54.596804
|
Apache-2.0
| false |
52584477cf2746cd69fbed974bb2118f
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\streaming.cpython-313.pyc
|
streaming.cpython-313.pyc
|
Other
| 7,824 | 0.95 | 0.048387 | 0 |
python-kit
| 421 |
2025-03-19T08:20:29.057550
|
MIT
| false |
1fc83d6e6703b323a42cc695d8dbf5b8
|
\n\n
|
.venv\Lib\site-packages\datasets\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 1,447 | 0.8 | 0 | 0 |
python-kit
| 292 |
2025-02-05T21:21:08.616781
|
MIT
| false |
56a74a46c43e0beff027634a4a4fcb7e
|
# This is the list of HuggingFace Datasets authors for copyright purposes.\n#\n# This does not necessarily list everyone who has contributed code, since in\n# some cases, their employer may be the copyright holder. To see the full list\n# of contributors, see the revision history in source control.\n\nGoogle Inc.\nHuggingFace Inc.\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\AUTHORS
|
AUTHORS
|
Other
| 327 | 0.8 | 0.125 | 0.714286 |
node-utils
| 325 |
2024-03-18T07:28:29.628244
|
GPL-3.0
| false |
503d28cfe2cc41a34a41830deb00937d
|
[console_scripts]\ndatasets-cli = datasets.commands.datasets_cli:main\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\entry_points.txt
|
entry_points.txt
|
Other
| 69 | 0.5 | 0 | 0 |
node-utils
| 694 |
2024-08-16T12:18:10.720441
|
GPL-3.0
| false |
9ecb4d00d9fdb436710b5feb741264d9
|
pip\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\INSTALLER
|
INSTALLER
|
Other
| 4 | 0.5 | 0 | 0 |
node-utils
| 512 |
2024-12-22T12:18:34.747607
|
Apache-2.0
| false |
365c9bfeb7d89244f2ce01c1de44cb85
|
\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "[]"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\LICENSE
|
LICENSE
|
Other
| 11,358 | 0.95 | 0.118812 | 0 |
vue-tools
| 975 |
2023-10-20T08:52:23.387541
|
BSD-3-Clause
| false |
3b83ef96387f14655fc854ddc3c6bd57
|
Metadata-Version: 2.2\nName: datasets\nVersion: 3.6.0\nSummary: HuggingFace community-driven open-source library of datasets\nHome-page: https://github.com/huggingface/datasets\nDownload-URL: https://github.com/huggingface/datasets/tags\nAuthor: HuggingFace Inc.\nAuthor-email: thomas@huggingface.co\nLicense: Apache 2.0\nKeywords: datasets machine learning datasets\nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Intended Audience :: Developers\nClassifier: Intended Audience :: Education\nClassifier: Intended Audience :: Science/Research\nClassifier: License :: OSI Approved :: Apache Software License\nClassifier: Operating System :: OS Independent\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3.9\nClassifier: Programming Language :: Python :: 3.10\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Topic :: Scientific/Engineering :: Artificial Intelligence\nRequires-Python: >=3.9.0\nDescription-Content-Type: text/markdown\nLicense-File: LICENSE\nLicense-File: AUTHORS\nRequires-Dist: filelock\nRequires-Dist: numpy>=1.17\nRequires-Dist: pyarrow>=15.0.0\nRequires-Dist: dill<0.3.9,>=0.3.0\nRequires-Dist: pandas\nRequires-Dist: requests>=2.32.2\nRequires-Dist: tqdm>=4.66.3\nRequires-Dist: xxhash\nRequires-Dist: multiprocess<0.70.17\nRequires-Dist: fsspec[http]<=2025.3.0,>=2023.1.0\nRequires-Dist: huggingface-hub>=0.24.0\nRequires-Dist: packaging\nRequires-Dist: pyyaml>=5.1\nProvides-Extra: audio\nRequires-Dist: soundfile>=0.12.1; extra == "audio"\nRequires-Dist: librosa; extra == "audio"\nRequires-Dist: soxr>=0.4.0; extra == "audio"\nProvides-Extra: vision\nRequires-Dist: Pillow>=9.4.0; extra == "vision"\nProvides-Extra: tensorflow\nRequires-Dist: tensorflow>=2.6.0; extra == "tensorflow"\nProvides-Extra: tensorflow-gpu\nRequires-Dist: tensorflow>=2.6.0; extra == "tensorflow-gpu"\nProvides-Extra: torch\nRequires-Dist: torch; extra == "torch"\nProvides-Extra: jax\nRequires-Dist: jax>=0.3.14; extra == "jax"\nRequires-Dist: jaxlib>=0.3.14; extra == "jax"\nProvides-Extra: s3\nRequires-Dist: s3fs; extra == "s3"\nProvides-Extra: streaming\nProvides-Extra: dev\nRequires-Dist: absl-py; extra == "dev"\nRequires-Dist: decorator; extra == "dev"\nRequires-Dist: joblib<1.3.0; extra == "dev"\nRequires-Dist: joblibspark; extra == "dev"\nRequires-Dist: pytest; extra == "dev"\nRequires-Dist: pytest-datadir; extra == "dev"\nRequires-Dist: pytest-xdist; extra == "dev"\nRequires-Dist: aiohttp; extra == "dev"\nRequires-Dist: elasticsearch<8.0.0,>=7.17.12; extra == "dev"\nRequires-Dist: faiss-cpu>=1.8.0.post1; extra == "dev"\nRequires-Dist: jax>=0.3.14; sys_platform != "win32" and extra == "dev"\nRequires-Dist: jaxlib>=0.3.14; sys_platform != "win32" and extra == "dev"\nRequires-Dist: lz4; extra == "dev"\nRequires-Dist: moto[server]; extra == "dev"\nRequires-Dist: pyspark>=3.4; extra == "dev"\nRequires-Dist: py7zr; extra == "dev"\nRequires-Dist: rarfile>=4.0; extra == "dev"\nRequires-Dist: sqlalchemy; extra == "dev"\nRequires-Dist: s3fs>=2021.11.1; extra == "dev"\nRequires-Dist: protobuf<4.0.0; extra == "dev"\nRequires-Dist: tensorflow>=2.6.0; python_version < "3.10" and extra == "dev"\nRequires-Dist: tensorflow>=2.16.0; python_version >= "3.10" and extra == "dev"\nRequires-Dist: tiktoken; extra == "dev"\nRequires-Dist: torch>=2.0.0; extra == "dev"\nRequires-Dist: torchdata; extra == "dev"\nRequires-Dist: soundfile>=0.12.1; extra == "dev"\nRequires-Dist: transformers>=4.42.0; extra == "dev"\nRequires-Dist: zstandard; extra == "dev"\nRequires-Dist: polars[timezone]>=0.20.0; extra == "dev"\nRequires-Dist: torchvision; extra == "dev"\nRequires-Dist: pyav; extra == "dev"\nRequires-Dist: Pillow>=9.4.0; extra == "dev"\nRequires-Dist: soundfile>=0.12.1; extra == "dev"\nRequires-Dist: librosa; extra == "dev"\nRequires-Dist: soxr>=0.4.0; extra == "dev"\nRequires-Dist: ruff>=0.3.0; extra == "dev"\nRequires-Dist: s3fs; extra == "dev"\nRequires-Dist: transformers; extra == "dev"\nRequires-Dist: torch; extra == "dev"\nRequires-Dist: tensorflow>=2.6.0; extra == "dev"\nProvides-Extra: tests\nRequires-Dist: absl-py; extra == "tests"\nRequires-Dist: decorator; extra == "tests"\nRequires-Dist: joblib<1.3.0; extra == "tests"\nRequires-Dist: joblibspark; extra == "tests"\nRequires-Dist: pytest; extra == "tests"\nRequires-Dist: pytest-datadir; extra == "tests"\nRequires-Dist: pytest-xdist; extra == "tests"\nRequires-Dist: aiohttp; extra == "tests"\nRequires-Dist: elasticsearch<8.0.0,>=7.17.12; extra == "tests"\nRequires-Dist: faiss-cpu>=1.8.0.post1; extra == "tests"\nRequires-Dist: jax>=0.3.14; sys_platform != "win32" and extra == "tests"\nRequires-Dist: jaxlib>=0.3.14; sys_platform != "win32" and extra == "tests"\nRequires-Dist: lz4; extra == "tests"\nRequires-Dist: moto[server]; extra == "tests"\nRequires-Dist: pyspark>=3.4; extra == "tests"\nRequires-Dist: py7zr; extra == "tests"\nRequires-Dist: rarfile>=4.0; extra == "tests"\nRequires-Dist: sqlalchemy; extra == "tests"\nRequires-Dist: s3fs>=2021.11.1; extra == "tests"\nRequires-Dist: protobuf<4.0.0; extra == "tests"\nRequires-Dist: tensorflow>=2.6.0; python_version < "3.10" and extra == "tests"\nRequires-Dist: tensorflow>=2.16.0; python_version >= "3.10" and extra == "tests"\nRequires-Dist: tiktoken; extra == "tests"\nRequires-Dist: torch>=2.0.0; extra == "tests"\nRequires-Dist: torchdata; extra == "tests"\nRequires-Dist: soundfile>=0.12.1; extra == "tests"\nRequires-Dist: transformers>=4.42.0; extra == "tests"\nRequires-Dist: zstandard; extra == "tests"\nRequires-Dist: polars[timezone]>=0.20.0; extra == "tests"\nRequires-Dist: torchvision; extra == "tests"\nRequires-Dist: pyav; extra == "tests"\nRequires-Dist: Pillow>=9.4.0; extra == "tests"\nRequires-Dist: soundfile>=0.12.1; extra == "tests"\nRequires-Dist: librosa; extra == "tests"\nRequires-Dist: soxr>=0.4.0; extra == "tests"\nProvides-Extra: tests-numpy2\nRequires-Dist: absl-py; extra == "tests-numpy2"\nRequires-Dist: decorator; extra == "tests-numpy2"\nRequires-Dist: joblib<1.3.0; extra == "tests-numpy2"\nRequires-Dist: joblibspark; extra == "tests-numpy2"\nRequires-Dist: pytest; extra == "tests-numpy2"\nRequires-Dist: pytest-datadir; extra == "tests-numpy2"\nRequires-Dist: pytest-xdist; extra == "tests-numpy2"\nRequires-Dist: aiohttp; extra == "tests-numpy2"\nRequires-Dist: elasticsearch<8.0.0,>=7.17.12; extra == "tests-numpy2"\nRequires-Dist: jax>=0.3.14; sys_platform != "win32" and extra == "tests-numpy2"\nRequires-Dist: jaxlib>=0.3.14; sys_platform != "win32" and extra == "tests-numpy2"\nRequires-Dist: lz4; extra == "tests-numpy2"\nRequires-Dist: moto[server]; extra == "tests-numpy2"\nRequires-Dist: pyspark>=3.4; extra == "tests-numpy2"\nRequires-Dist: py7zr; extra == "tests-numpy2"\nRequires-Dist: rarfile>=4.0; extra == "tests-numpy2"\nRequires-Dist: sqlalchemy; extra == "tests-numpy2"\nRequires-Dist: s3fs>=2021.11.1; extra == "tests-numpy2"\nRequires-Dist: protobuf<4.0.0; extra == "tests-numpy2"\nRequires-Dist: tiktoken; extra == "tests-numpy2"\nRequires-Dist: torch>=2.0.0; extra == "tests-numpy2"\nRequires-Dist: torchdata; extra == "tests-numpy2"\nRequires-Dist: soundfile>=0.12.1; extra == "tests-numpy2"\nRequires-Dist: transformers>=4.42.0; extra == "tests-numpy2"\nRequires-Dist: zstandard; extra == "tests-numpy2"\nRequires-Dist: polars[timezone]>=0.20.0; extra == "tests-numpy2"\nRequires-Dist: torchvision; extra == "tests-numpy2"\nRequires-Dist: pyav; extra == "tests-numpy2"\nRequires-Dist: Pillow>=9.4.0; extra == "tests-numpy2"\nRequires-Dist: soundfile>=0.12.1; extra == "tests-numpy2"\nRequires-Dist: soxr>=0.4.0; extra == "tests-numpy2"\nProvides-Extra: quality\nRequires-Dist: ruff>=0.3.0; extra == "quality"\nProvides-Extra: benchmarks\nRequires-Dist: tensorflow==2.12.0; extra == "benchmarks"\nRequires-Dist: torch==2.0.1; extra == "benchmarks"\nRequires-Dist: transformers==4.30.1; extra == "benchmarks"\nProvides-Extra: docs\nRequires-Dist: s3fs; extra == "docs"\nRequires-Dist: transformers; extra == "docs"\nRequires-Dist: torch; extra == "docs"\nRequires-Dist: tensorflow>=2.6.0; extra == "docs"\nProvides-Extra: pdfs\nRequires-Dist: pdfplumber>=0.11.4; extra == "pdfs"\nDynamic: author\nDynamic: author-email\nDynamic: classifier\nDynamic: description\nDynamic: description-content-type\nDynamic: download-url\nDynamic: home-page\nDynamic: keywords\nDynamic: license\nDynamic: provides-extra\nDynamic: requires-dist\nDynamic: requires-python\nDynamic: summary\n\n<p align="center">\n <picture>\n <source media="(prefers-color-scheme: dark)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-dark.svg">\n <source media="(prefers-color-scheme: light)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-light.svg">\n <img alt="Hugging Face Datasets Library" src="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-light.svg" width="352" height="59" style="max-width: 100%;">\n </picture>\n <br/>\n <br/>\n</p>\n\n<p align="center">\n <a href="https://github.com/huggingface/datasets/actions/workflows/ci.yml?query=branch%3Amain"><img alt="Build" src="https://github.com/huggingface/datasets/actions/workflows/ci.yml/badge.svg?branch=main"></a>\n <a href="https://github.com/huggingface/datasets/blob/main/LICENSE"><img alt="GitHub" src="https://img.shields.io/github/license/huggingface/datasets.svg?color=blue"></a>\n <a href="https://huggingface.co/docs/datasets/index.html"><img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/datasets/index.html.svg?down_color=red&down_message=offline&up_message=online"></a>\n <a href="https://github.com/huggingface/datasets/releases"><img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/datasets.svg"></a>\n <a href="https://huggingface.co/datasets/"><img alt="Number of datasets" src="https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/datasets&color=brightgreen"></a>\n <a href="CODE_OF_CONDUCT.md"><img alt="Contributor Covenant" src="https://img.shields.io/badge/Contributor%20Covenant-2.0-4baaaa.svg"></a>\n <a href="https://zenodo.org/badge/latestdoi/250213286"><img src="https://zenodo.org/badge/250213286.svg" alt="DOI"></a>\n</p>\n\n🤗 Datasets is a lightweight library providing **two** main features:\n\n- **one-line dataloaders for many public datasets**: one-liners to download and pre-process any of the  major public datasets (image datasets, audio datasets, text datasets in 467 languages and dialects, etc.) provided on the [HuggingFace Datasets Hub](https://huggingface.co/datasets). With a simple command like `squad_dataset = load_dataset("rajpurkar/squad")`, get any of these datasets ready to use in a dataloader for training/evaluating a ML model (Numpy/Pandas/PyTorch/TensorFlow/JAX),\n- **efficient data pre-processing**: simple, fast and reproducible data pre-processing for the public datasets as well as your own local datasets in CSV, JSON, text, PNG, JPEG, WAV, MP3, Parquet, etc. With simple commands like `processed_dataset = dataset.map(process_example)`, efficiently prepare the dataset for inspection and ML model evaluation and training.\n\n[🎓 **Documentation**](https://huggingface.co/docs/datasets/) [🔎 **Find a dataset in the Hub**](https://huggingface.co/datasets) [🌟 **Share a dataset on the Hub**](https://huggingface.co/docs/datasets/share)\n\n<h3 align="center">\n <a href="https://hf.co/course"><img src="https://raw.githubusercontent.com/huggingface/datasets/main/docs/source/imgs/course_banner.png"></a>\n</h3>\n\n🤗 Datasets is designed to let the community easily add and share new datasets.\n\n🤗 Datasets has many additional interesting features:\n\n- Thrive on large datasets: 🤗 Datasets naturally frees the user from RAM memory limitation, all datasets are memory-mapped using an efficient zero-serialization cost backend (Apache Arrow).\n- Smart caching: never wait for your data to process several times.\n- Lightweight and fast with a transparent and pythonic API (multi-processing/caching/memory-mapping).\n- Built-in interoperability with NumPy, PyTorch, TensorFlow 2, JAX, Pandas, Polars and more.\n- Native support for audio, image and video data.\n- Enable streaming mode to save disk space and start iterating over the dataset immediately.\n\n🤗 Datasets originated from a fork of the awesome [TensorFlow Datasets](https://github.com/tensorflow/datasets) and the HuggingFace team want to deeply thank the TensorFlow Datasets team for building this amazing library.\n\n# Installation\n\n## With pip\n\n🤗 Datasets can be installed from PyPi and has to be installed in a virtual environment (venv or conda for instance)\n\n```bash\npip install datasets\n```\n\n## With conda\n\n🤗 Datasets can be installed using conda as follows:\n\n```bash\nconda install -c huggingface -c conda-forge datasets\n```\n\nFollow the installation pages of TensorFlow and PyTorch to see how to install them with conda.\n\nFor more details on installation, check the installation page in the documentation: https://huggingface.co/docs/datasets/installation\n\n## Installation to use with Machine Learning & Data frameworks frameworks\n\nIf you plan to use 🤗 Datasets with PyTorch (2.0+), TensorFlow (2.6+) or JAX (3.14+) you should also install PyTorch, TensorFlow or JAX.\n🤗 Datasets is also well integrated with data frameworks like PyArrow, Pandas, Polars and Spark, which should be installed separately.\n\nFor more details on using the library with these frameworks, check the quick start page in the documentation: https://huggingface.co/docs/datasets/quickstart\n\n# Usage\n\n🤗 Datasets is made to be very simple to use - the API is centered around a single function, `datasets.load_dataset(dataset_name, **kwargs)`, that instantiates a dataset.\n\nThis library can be used for text/image/audio/etc. datasets. Here is an example to load a text dataset:\n\nHere is a quick example:\n\n```python\nfrom datasets import load_dataset\n\n# Print all the available datasets\nfrom huggingface_hub import list_datasets\nprint([dataset.id for dataset in list_datasets()])\n\n# Load a dataset and print the first example in the training set\nsquad_dataset = load_dataset('rajpurkar/squad')\nprint(squad_dataset['train'][0])\n\n# Process the dataset - add a column with the length of the context texts\ndataset_with_length = squad_dataset.map(lambda x: {"length": len(x["context"])})\n\n# Process the dataset - tokenize the context texts (using a tokenizer from the 🤗 Transformers library)\nfrom transformers import AutoTokenizer\ntokenizer = AutoTokenizer.from_pretrained('bert-base-cased')\n\ntokenized_dataset = squad_dataset.map(lambda x: tokenizer(x['context']), batched=True)\n```\n\nIf your dataset is bigger than your disk or if you don't want to wait to download the data, you can use streaming:\n\n```python\n# If you want to use the dataset immediately and efficiently stream the data as you iterate over the dataset\nimage_dataset = load_dataset('timm/imagenet-1k-wds', streaming=True)\nfor example in image_dataset["train"]:\n break\n```\n\nFor more details on using the library, check the quick start page in the documentation: https://huggingface.co/docs/datasets/quickstart and the specific pages on:\n\n- Loading a dataset: https://huggingface.co/docs/datasets/loading\n- What's in a Dataset: https://huggingface.co/docs/datasets/access\n- Processing data with 🤗 Datasets: https://huggingface.co/docs/datasets/process\n - Processing audio data: https://huggingface.co/docs/datasets/audio_process\n - Processing image data: https://huggingface.co/docs/datasets/image_process\n - Processing text data: https://huggingface.co/docs/datasets/nlp_process\n- Streaming a dataset: https://huggingface.co/docs/datasets/stream\n- etc.\n\n# Add a new dataset to the Hub\n\nWe have a very detailed step-by-step guide to add a new dataset to the  datasets already provided on the [HuggingFace Datasets Hub](https://huggingface.co/datasets).\n\nYou can find:\n- [how to upload a dataset to the Hub using your web browser or Python](https://huggingface.co/docs/datasets/upload_dataset) and also\n- [how to upload it using Git](https://huggingface.co/docs/datasets/share).\n\n# Disclaimers\n\nYou can use 🤗 Datasets to load datasets based on Python code defined by the dataset authors to parse certain data formats or structures. For security reasons, this feature is disabled by default and requires passing `trust_remote_code=True`. In this case we also ask users that want to load such datasets to:\n- check the dataset scripts they're going to run beforehand and\n- pin the `revision` of the repositories they use.\n\nIf you're a dataset owner and wish to update any part of it (description, citation, license, etc.), or do not want your dataset to be included in the Hugging Face Hub, please get in touch by opening a discussion or a pull request in the Community tab of the dataset page. Thanks for your contribution to the ML community!\n\n## BibTeX\n\nIf you want to cite our 🤗 Datasets library, you can use our [paper](https://arxiv.org/abs/2109.02846):\n\n```bibtex\n@inproceedings{lhoest-etal-2021-datasets,\n title = "Datasets: A Community Library for Natural Language Processing",\n author = "Lhoest, Quentin and\n Villanova del Moral, Albert and\n Jernite, Yacine and\n Thakur, Abhishek and\n von Platen, Patrick and\n Patil, Suraj and\n Chaumond, Julien and\n Drame, Mariama and\n Plu, Julien and\n Tunstall, Lewis and\n Davison, Joe and\n {\v{S}}a{\v{s}}ko, Mario and\n Chhablani, Gunjan and\n Malik, Bhavitvya and\n Brandeis, Simon and\n Le Scao, Teven and\n Sanh, Victor and\n Xu, Canwen and\n Patry, Nicolas and\n McMillan-Major, Angelina and\n Schmid, Philipp and\n Gugger, Sylvain and\n Delangue, Cl{\'e}ment and\n Matussi{\`e}re, Th{\'e}o and\n Debut, Lysandre and\n Bekman, Stas and\n Cistac, Pierric and\n Goehringer, Thibault and\n Mustar, Victor and\n Lagunas, Fran{\c{c}}ois and\n Rush, Alexander and\n Wolf, Thomas",\n booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: System Demonstrations",\n month = nov,\n year = "2021",\n address = "Online and Punta Cana, Dominican Republic",\n publisher = "Association for Computational Linguistics",\n url = "https://aclanthology.org/2021.emnlp-demo.21",\n pages = "175--184",\n abstract = "The scale, variety, and quantity of publicly-available NLP datasets has grown rapidly as researchers propose new tasks, larger models, and novel benchmarks. Datasets is a community library for contemporary NLP designed to support this ecosystem. Datasets aims to standardize end-user interfaces, versioning, and documentation, while providing a lightweight front-end that behaves similarly for small datasets as for internet-scale corpora. The design of the library incorporates a distributed, community-driven approach to adding datasets and documenting usage. After a year of development, the library now includes more than 650 unique datasets, has more than 250 contributors, and has helped support a variety of novel cross-dataset research projects and shared tasks. The library is available at https://github.com/huggingface/datasets.",\n eprint={2109.02846},\n archivePrefix={arXiv},\n primaryClass={cs.CL},\n}\n```\n\nIf you need to cite a specific version of our 🤗 Datasets library for reproducibility, you can use the corresponding version Zenodo DOI from this [list](https://zenodo.org/search?q=conceptrecid:%224817768%22&sort=-version&all_versions=True).\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\METADATA
|
METADATA
|
Other
| 19,747 | 0.95 | 0.054688 | 0.038462 |
python-kit
| 927 |
2024-11-21T12:09:48.387142
|
Apache-2.0
| false |
3850b5a6dcf34df7c91485dfa8bb7caa
|
../../Scripts/datasets-cli.exe,sha256=83uBnrGOdLfTh7UjT3ST28ENcAPFDseah3vmKGpkIg8,108431\ndatasets-3.6.0.dist-info/AUTHORS,sha256=L0FBY23tCNHLmvsOKAbumHn8WZZIK98sH53JYxhAchU,327\ndatasets-3.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\ndatasets-3.6.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358\ndatasets-3.6.0.dist-info/METADATA,sha256=rNyefEqsG6-V8EhpqdNl9Nyt1aVOinIwePAuQUoIk3Q,19747\ndatasets-3.6.0.dist-info/RECORD,,\ndatasets-3.6.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets-3.6.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91\ndatasets-3.6.0.dist-info/entry_points.txt,sha256=iM-h4A7OQCrZqr3L2mwiyMtPeFj8w4HAHzmI45y3tg0,69\ndatasets-3.6.0.dist-info/top_level.txt,sha256=9A857YvCQm_Dg3UjeKkWPz9sDBos0t3zN2pf5krTemQ,9\ndatasets/__init__.py,sha256=2r9PYx352IPBoMHGM3yF1UyV4DpE-ZPKXr3HRbI7_RI,1606\ndatasets/__pycache__/__init__.cpython-313.pyc,,\ndatasets/__pycache__/arrow_dataset.cpython-313.pyc,,\ndatasets/__pycache__/arrow_reader.cpython-313.pyc,,\ndatasets/__pycache__/arrow_writer.cpython-313.pyc,,\ndatasets/__pycache__/builder.cpython-313.pyc,,\ndatasets/__pycache__/combine.cpython-313.pyc,,\ndatasets/__pycache__/config.cpython-313.pyc,,\ndatasets/__pycache__/data_files.cpython-313.pyc,,\ndatasets/__pycache__/dataset_dict.cpython-313.pyc,,\ndatasets/__pycache__/distributed.cpython-313.pyc,,\ndatasets/__pycache__/exceptions.cpython-313.pyc,,\ndatasets/__pycache__/fingerprint.cpython-313.pyc,,\ndatasets/__pycache__/hub.cpython-313.pyc,,\ndatasets/__pycache__/info.cpython-313.pyc,,\ndatasets/__pycache__/inspect.cpython-313.pyc,,\ndatasets/__pycache__/iterable_dataset.cpython-313.pyc,,\ndatasets/__pycache__/keyhash.cpython-313.pyc,,\ndatasets/__pycache__/load.cpython-313.pyc,,\ndatasets/__pycache__/naming.cpython-313.pyc,,\ndatasets/__pycache__/search.cpython-313.pyc,,\ndatasets/__pycache__/splits.cpython-313.pyc,,\ndatasets/__pycache__/streaming.cpython-313.pyc,,\ndatasets/__pycache__/table.cpython-313.pyc,,\ndatasets/arrow_dataset.py,sha256=04xoW_fARh2Nsgz7Rk78r7vhHwBO14tPWKzhfCu6xgo,301860\ndatasets/arrow_reader.py,sha256=byEDpH_SwzjbnVqW0pXjBaE1NoXFE4Cmwoz7zTB_IvA,25131\ndatasets/arrow_writer.py,sha256=rwshYb2ClpaCc7xdFhHGaw__p9FkZHupGgQNzCdFBwY,30645\ndatasets/builder.py,sha256=XlMZRiXRkghHgjWZNDdmPeHGi9rv3EFeG0crfOjgGG0,90575\ndatasets/combine.py,sha256=iylOVTWReGk_9x1HYFEMkRvB8kLlQq6E0zEHdawp3ds,10892\ndatasets/commands/__init__.py,sha256=rujbQtxJbwHhF9WQqp2DD9tfVTghDMJdl0v6H551Pcs,312\ndatasets/commands/__pycache__/__init__.cpython-313.pyc,,\ndatasets/commands/__pycache__/convert.cpython-313.pyc,,\ndatasets/commands/__pycache__/convert_to_parquet.cpython-313.pyc,,\ndatasets/commands/__pycache__/datasets_cli.cpython-313.pyc,,\ndatasets/commands/__pycache__/delete_from_hub.cpython-313.pyc,,\ndatasets/commands/__pycache__/env.cpython-313.pyc,,\ndatasets/commands/__pycache__/test.cpython-313.pyc,,\ndatasets/commands/convert.py,sha256=yK0fjlM47kzo0W8LTdnEIfcMjDm9UOg2a3FZPA9dvmw,7878\ndatasets/commands/convert_to_parquet.py,sha256=cCCug82MPSUiA_TUlJLFUhqGdaKNOL2NVpKQNtTvaCQ,1593\ndatasets/commands/datasets_cli.py,sha256=KHQa0rn3w4DQ_mM-1BaYvsrG6todZFyoBxM3nWjUBZA,1422\ndatasets/commands/delete_from_hub.py,sha256=o0wdolb1r1Jnl6F0KdqKn3u0l8VR2od6KzbRoqrSNPM,1396\ndatasets/commands/env.py,sha256=8qg-hpXSXXsHvtYFvJkn5rn9IncqPsjjx3nR8no4a2I,1239\ndatasets/commands/test.py,sha256=RglIF7uamhDN5AZEgvF-Ur2d_BZFABr2-88IwqcXJ3o,9115\ndatasets/config.py,sha256=_po0FbqBqy8wSNrsLcjWAmOD8tzEdIYdTMpqkP4VtkI,10306\ndatasets/data_files.py,sha256=-rpEc0Z4bln1qdcvKdPG7MQctZNrPkEy6M3fyMH52uM,31571\ndatasets/dataset_dict.py,sha256=KW5Zfnl3dfXHJxSQYuF0qJ5rvGHhJWacOFqMYmWQDrY,110071\ndatasets/distributed.py,sha256=pulXFluRCmo69KeDqblPz32avS6LCHTGycS77XgI2mY,1562\ndatasets/download/__init__.py,sha256=lbFOtITDaR7PHrhzJ8VfRnpaOT6NYozSxUcLv_GVfTg,281\ndatasets/download/__pycache__/__init__.cpython-313.pyc,,\ndatasets/download/__pycache__/download_config.cpython-313.pyc,,\ndatasets/download/__pycache__/download_manager.cpython-313.pyc,,\ndatasets/download/__pycache__/streaming_download_manager.cpython-313.pyc,,\ndatasets/download/download_config.py,sha256=t5qA5qgy2Q1QJiDnpS8CqxO0XxNQ0ftAvOji99-l-Sk,3796\ndatasets/download/download_manager.py,sha256=44VSuSzIMJoZ-bDa3uF494jio5JmZFMeGAPzuXYRA7Q,12762\ndatasets/download/streaming_download_manager.py,sha256=qvcoVsXnAGNi2lzKRktck_DJrIx1fQ7xedm881s0IQw,7537\ndatasets/exceptions.py,sha256=B93GwElhEvlhHPU9GBSY8if27jhRwu875-gL6B2CL6c,4185\ndatasets/features/__init__.py,sha256=h3i4VatkedCKKMwaOMn5zykUnF4-MCU9hLzuX8AC58M,529\ndatasets/features/__pycache__/__init__.cpython-313.pyc,,\ndatasets/features/__pycache__/audio.cpython-313.pyc,,\ndatasets/features/__pycache__/features.cpython-313.pyc,,\ndatasets/features/__pycache__/image.cpython-313.pyc,,\ndatasets/features/__pycache__/pdf.cpython-313.pyc,,\ndatasets/features/__pycache__/translation.cpython-313.pyc,,\ndatasets/features/__pycache__/video.cpython-313.pyc,,\ndatasets/features/audio.py,sha256=kzIESK4J7NLEWH4EBPs0Dq1WCAOfnfsCEQX6LuZdOxo,12209\ndatasets/features/features.py,sha256=OR58kFbhs_4rezZMczK_Uifm19V2J3treKH4dLNR9no,95134\ndatasets/features/image.py,sha256=gSK3L3FahbRK-HLuC0VPtWjR-8Zl61Ax3DiYWT5H5ng,15556\ndatasets/features/pdf.py,sha256=5Q6MQsfHY21CIsUALPLGrkwtLoN5f61wDIofRarxqps,9291\ndatasets/features/translation.py,sha256=OO5ZPkKSTGpO8VHifHA8ft3Z-X4XZ9PJzk-w32pCxiE,4448\ndatasets/features/video.py,sha256=ZyVhJt77GH2XUx3m1-g7V3_1bwm-PPpHUm8MElA5kLo,11026\ndatasets/filesystems/__init__.py,sha256=jBDUQosQqEFIXUDLZwRWaTgNomwL6Fq2qiYPvvxuae0,1523\ndatasets/filesystems/__pycache__/__init__.cpython-313.pyc,,\ndatasets/filesystems/__pycache__/compression.cpython-313.pyc,,\ndatasets/filesystems/compression.py,sha256=2NnuTGzqmH5wk_Vmp9nhuQCAAZ6bzBpCErvrHVOLR4c,4488\ndatasets/fingerprint.py,sha256=9nIrIMTcsDdvMvhH56Ml_Zv0uXXR1dFvrolZkWxE-Ik,20333\ndatasets/formatting/__init__.py,sha256=-pM10fSzw4MVj_L3NFWEv2sUyBh4mbnvCkfXgfS6WII,5412\ndatasets/formatting/__pycache__/__init__.cpython-313.pyc,,\ndatasets/formatting/__pycache__/formatting.cpython-313.pyc,,\ndatasets/formatting/__pycache__/jax_formatter.cpython-313.pyc,,\ndatasets/formatting/__pycache__/np_formatter.cpython-313.pyc,,\ndatasets/formatting/__pycache__/polars_formatter.cpython-313.pyc,,\ndatasets/formatting/__pycache__/tf_formatter.cpython-313.pyc,,\ndatasets/formatting/__pycache__/torch_formatter.cpython-313.pyc,,\ndatasets/formatting/formatting.py,sha256=DiFh5gPgAD0gKwtZX3ORs9NbGeghsujUYYsqX4hLroc,26466\ndatasets/formatting/jax_formatter.py,sha256=mdOu5MLz854eWCfHRX9AkcVKhG5AwndXrGiydO7M0Cw,7135\ndatasets/formatting/np_formatter.py,sha256=tZy_TsVfylZdyXg0qHF0gveej_7m-wn0zNkXem0ua_s,4826\ndatasets/formatting/polars_formatter.py,sha256=oTm4l30SgGha-Oku42C0dA91Y8f2oifF9aWvi3QITDk,4744\ndatasets/formatting/tf_formatter.py,sha256=PI4SywSz4buSEKfpTxMLKPYxxCplxtLcJbofNcecFfs,4959\ndatasets/formatting/torch_formatter.py,sha256=nkXtPZLZ-D2jRh0gEEDkv3HydIlD_qB_dKPH8GhWe7g,5034\ndatasets/hub.py,sha256=TFBvGkTXceEmaz0FDlP-mDmQLYLLCZ9T6GT0J8Nn3Gw,9380\ndatasets/info.py,sha256=zeNDp3dva8_hnXbDIDw4bkpLyJr2GLDBW23WEjgoZKg,19689\ndatasets/inspect.py,sha256=x51_9bZ_-rVT2SYxLLKuOQoFGgvo0Y444lCbfkJpjbk,17143\ndatasets/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/io/__pycache__/__init__.cpython-313.pyc,,\ndatasets/io/__pycache__/abc.cpython-313.pyc,,\ndatasets/io/__pycache__/csv.cpython-313.pyc,,\ndatasets/io/__pycache__/generator.cpython-313.pyc,,\ndatasets/io/__pycache__/json.cpython-313.pyc,,\ndatasets/io/__pycache__/parquet.cpython-313.pyc,,\ndatasets/io/__pycache__/spark.cpython-313.pyc,,\ndatasets/io/__pycache__/sql.cpython-313.pyc,,\ndatasets/io/__pycache__/text.cpython-313.pyc,,\ndatasets/io/abc.py,sha256=LwDMXYs6YkhZuz1JiMK4PDIqgNjv7I8xH3UMUELW2ys,1672\ndatasets/io/csv.py,sha256=v4zaWehHb9U3njbdhy7wQnb8qO_c_58XOUC9JgBBVwI,5265\ndatasets/io/generator.py,sha256=sP_5GNozcxXIgDsPVMW_riqCZdInZ0_iFzcY_X1F-Mo,1909\ndatasets/io/json.py,sha256=vQZT9vhTbKX5Nyob4zQZR1NXWCft7bT5_6_8DD4XZyo,6697\ndatasets/io/parquet.py,sha256=IxotIfpNHXvJgFzsbT3-CjB1_FfvKpYhNNU1Akxe9bs,4354\ndatasets/io/spark.py,sha256=VUIODLHgIbiK0CI0UvthQ_gUO0MQDtHUozvw7Dfs8FI,1797\ndatasets/io/sql.py,sha256=4Zjw7peVEhhzoDtz2VTCFPqt2Tpy4zMB7T7ajb2GVTY,4234\ndatasets/io/text.py,sha256=bebEzXBSGC40_Gy94j9ZTJ7Hg0IfrV_4pnIUEhQZVig,1975\ndatasets/iterable_dataset.py,sha256=fF_bBxMHfAXiCoXNtN3UI6IcxBYNqh3hf88_NJp5mGk,159017\ndatasets/keyhash.py,sha256=4bqtuEHHlof2BBJIydN2s6N7--wJg54DXgsgzbtbNzA,3896\ndatasets/load.py,sha256=ViQ8cvFnfN-HCh3fxS9LHUfXl2Th4legOgYdKhysdKU,99829\ndatasets/naming.py,sha256=aqQqYG4QR8YoxJJMAUyVv_oQyudm4WAApsEHvcozpNg,3001\ndatasets/packaged_modules/__init__.py,sha256=mzJ6XaAZEzcCAIYNyUEBrf52xOwfs9qFljUNbJbhkw8,5212\ndatasets/packaged_modules/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/arrow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/arrow/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/arrow/__pycache__/arrow.cpython-313.pyc,,\ndatasets/packaged_modules/arrow/arrow.py,sha256=lkadNXfBbJMQNDw-tK4B4Y1KJR5G-J6aAn9I9jHiLWY,3494\ndatasets/packaged_modules/audiofolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/audiofolder/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/audiofolder/__pycache__/audiofolder.cpython-313.pyc,,\ndatasets/packaged_modules/audiofolder/audiofolder.py,sha256=fKJ03TQ0fAVEDJHBh7olw7iMLrlgq5TNEQyYZfUrgms,1468\ndatasets/packaged_modules/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/cache/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/cache/__pycache__/cache.cpython-313.pyc,,\ndatasets/packaged_modules/cache/cache.py,sha256=sjQDBHJUeLU1U9PUK179BHfn8dHNA2RoudCWeIAv8p8,8196\ndatasets/packaged_modules/csv/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/csv/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/csv/__pycache__/csv.cpython-313.pyc,,\ndatasets/packaged_modules/csv/csv.py,sha256=4LShCsr9o4YY0C-n4V37L01u2_2qithYrswSp1WMsRU,8568\ndatasets/packaged_modules/folder_based_builder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/folder_based_builder/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/folder_based_builder/__pycache__/folder_based_builder.cpython-313.pyc,,\ndatasets/packaged_modules/folder_based_builder/folder_based_builder.py,sha256=5sSSbuKHcRqCww0p0RKxLvUk_8v57sO0LPavA0hIpY4,21074\ndatasets/packaged_modules/generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/generator/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/generator/__pycache__/generator.cpython-313.pyc,,\ndatasets/packaged_modules/generator/generator.py,sha256=Oke-26QOyDRkGfmIARqSXDqOJW0sIDjboYCwWSHsbdQ,1002\ndatasets/packaged_modules/imagefolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/imagefolder/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/imagefolder/__pycache__/imagefolder.cpython-313.pyc,,\ndatasets/packaged_modules/imagefolder/imagefolder.py,sha256=UpMVe8TUyayzHsVSfKN5wiXcc94QdamMvxauI4oFdw4,1956\ndatasets/packaged_modules/json/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/json/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/json/__pycache__/json.cpython-313.pyc,,\ndatasets/packaged_modules/json/json.py,sha256=ipf8GieLlsGt5x1rJKr4ViJWg9oTHNp85OKYyPSW2R0,8698\ndatasets/packaged_modules/pandas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/pandas/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/pandas/__pycache__/pandas.cpython-313.pyc,,\ndatasets/packaged_modules/pandas/pandas.py,sha256=eR0B5iGOHZ1owzezYmlvx5U_rWblmlpCt_PdC5Ax59E,2547\ndatasets/packaged_modules/parquet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/parquet/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/parquet/__pycache__/parquet.cpython-313.pyc,,\ndatasets/packaged_modules/parquet/parquet.py,sha256=4P1SU_5Pqxp-nH2Jm_T8YDMof7YU-x6cUklFOl19wpc,5099\ndatasets/packaged_modules/pdffolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/pdffolder/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/pdffolder/__pycache__/pdffolder.cpython-313.pyc,,\ndatasets/packaged_modules/pdffolder/pdffolder.py,sha256=bPYBh9-XOr2C-gg_Fl8h-UKhsVQ7VXjBL2FfW8abiGU,565\ndatasets/packaged_modules/spark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/spark/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/spark/__pycache__/spark.cpython-313.pyc,,\ndatasets/packaged_modules/spark/spark.py,sha256=UKu4mRB3k0EFb-Ij83eXpzr7VjCYn_TohQconF8Npag,14689\ndatasets/packaged_modules/sql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/sql/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/sql/__pycache__/sql.cpython-313.pyc,,\ndatasets/packaged_modules/sql/sql.py,sha256=0WWm-Xfputk2_QRCVrbKDbZAqZNHxOGdUwfX__4F5E0,4495\ndatasets/packaged_modules/text/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/text/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/text/__pycache__/text.cpython-313.pyc,,\ndatasets/packaged_modules/text/text.py,sha256=VOJVHkmy4Vm53nspW7QboCkPxd1S0M0uEzun5v8rzUE,5516\ndatasets/packaged_modules/videofolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/videofolder/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/videofolder/__pycache__/videofolder.cpython-313.pyc,,\ndatasets/packaged_modules/videofolder/videofolder.py,sha256=HLTMldDZ3WfK8OAbI2wssBuNCP6ucRBpNLpCoJVDL10,807\ndatasets/packaged_modules/webdataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/webdataset/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/webdataset/__pycache__/_tenbin.cpython-313.pyc,,\ndatasets/packaged_modules/webdataset/__pycache__/webdataset.cpython-313.pyc,,\ndatasets/packaged_modules/webdataset/_tenbin.py,sha256=oovYsgR2R3eXSn1xSCLG3oTly1szKDP4UOiRp4ORdIk,8533\ndatasets/packaged_modules/webdataset/webdataset.py,sha256=nqZQeeYiFM2nc7zEGrUmBcn7I8xBiXJLby2dG9hSOKo,10599\ndatasets/packaged_modules/xml/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/packaged_modules/xml/__pycache__/__init__.cpython-313.pyc,,\ndatasets/packaged_modules/xml/__pycache__/xml.cpython-313.pyc,,\ndatasets/packaged_modules/xml/xml.py,sha256=av0HcLQnKl5d1yM0jfBqVhw9EbzqmO_RsHDfa5pkvx4,2822\ndatasets/parallel/__init__.py,sha256=wiRFK4x67ez2vvmjwM2Sb9R1yFdf38laSarU9y0Bido,76\ndatasets/parallel/__pycache__/__init__.cpython-313.pyc,,\ndatasets/parallel/__pycache__/parallel.cpython-313.pyc,,\ndatasets/parallel/parallel.py,sha256=E-oOQ6zwKrkLFPwZ-3EOcr_aANJDhE-d6QTq7Mp7WvA,4738\ndatasets/search.py,sha256=ezL8gWjdcvHqwGHoB2T1jnjrbiCES_7ElmBLRL1DLwY,35600\ndatasets/splits.py,sha256=zZO9vPnbfzfxXQG8LSAQkajXV7TGB2kEwOWrQxPFQbI,23430\ndatasets/streaming.py,sha256=lOn__P1Tp2Z8jEbjwrdRPs-abNhUqOTY18fXaXQyPtA,6534\ndatasets/table.py,sha256=DKnuZvgYzDjk70bAf0g1F6gwpLIdmPGqwlKkvcsOgfA,95878\ndatasets/utils/__init__.py,sha256=PuZtB9YTbRyvdwubnsx-JGdHuMA7p0I0Rmh0E_uxYF0,999\ndatasets/utils/__pycache__/__init__.cpython-313.pyc,,\ndatasets/utils/__pycache__/_dataset_viewer.cpython-313.pyc,,\ndatasets/utils/__pycache__/_dill.cpython-313.pyc,,\ndatasets/utils/__pycache__/_filelock.cpython-313.pyc,,\ndatasets/utils/__pycache__/deprecation_utils.cpython-313.pyc,,\ndatasets/utils/__pycache__/doc_utils.cpython-313.pyc,,\ndatasets/utils/__pycache__/experimental.cpython-313.pyc,,\ndatasets/utils/__pycache__/extract.cpython-313.pyc,,\ndatasets/utils/__pycache__/file_utils.cpython-313.pyc,,\ndatasets/utils/__pycache__/filelock.cpython-313.pyc,,\ndatasets/utils/__pycache__/hub.cpython-313.pyc,,\ndatasets/utils/__pycache__/info_utils.cpython-313.pyc,,\ndatasets/utils/__pycache__/logging.cpython-313.pyc,,\ndatasets/utils/__pycache__/metadata.cpython-313.pyc,,\ndatasets/utils/__pycache__/patching.cpython-313.pyc,,\ndatasets/utils/__pycache__/py_utils.cpython-313.pyc,,\ndatasets/utils/__pycache__/sharding.cpython-313.pyc,,\ndatasets/utils/__pycache__/stratify.cpython-313.pyc,,\ndatasets/utils/__pycache__/tf_utils.cpython-313.pyc,,\ndatasets/utils/__pycache__/tqdm.cpython-313.pyc,,\ndatasets/utils/__pycache__/track.cpython-313.pyc,,\ndatasets/utils/__pycache__/typing.cpython-313.pyc,,\ndatasets/utils/__pycache__/version.cpython-313.pyc,,\ndatasets/utils/_dataset_viewer.py,sha256=SrE1N18S5yCoCx0rAhwaHNDVS9uhxjspA84iNT4TFRw,4397\ndatasets/utils/_dill.py,sha256=0QphnYT5cKHJEn17Cs_i1XFYazIfJZUr5mm8ehee_bw,17136\ndatasets/utils/_filelock.py,sha256=iXW3bxsIr5JWNemhKtF_-q_0ysajkUTItzMm8LY9LBY,2355\ndatasets/utils/deprecation_utils.py,sha256=hTHwlzRs92NfNVudH71LMpW70sjbsP5amebrIgi3A-U,3452\ndatasets/utils/doc_utils.py,sha256=HoSm0TFaQaCYGfDgNhpBJ4Xc2WQZuOD6dTxLd9D87fs,407\ndatasets/utils/experimental.py,sha256=JgOjaEY3RWZ--3u0-ry82gLCDUpudfBfl-hWZ46SyS4,1097\ndatasets/utils/extract.py,sha256=kKMAujtg5FOK91MBXyWl6FFHZStEPn8WkOE7Jmo2Iq4,13021\ndatasets/utils/file_utils.py,sha256=im6LXdZy1hf9KsHK3RIHnSI_9V7cVuru5UF4BnN6W3M,54287\ndatasets/utils/filelock.py,sha256=H6C5dQGFCzVKyeDRRY8fZ4YGTEvvNd-MTjpL_sWYb5k,352\ndatasets/utils/hub.py,sha256=sD9VpJENA3M9_rWFGavUaVV_GsrOBLEKCZjcqtRdJ_s,438\ndatasets/utils/info_utils.py,sha256=gAzubjnQbE0YTzB3hf3Cipmx5wCBtOje3fPwjYdzVBE,4330\ndatasets/utils/logging.py,sha256=tk3Oz51sC6fqBAhoU7tRzKJZO8Spz41260W93npnQaA,5382\ndatasets/utils/metadata.py,sha256=Hrmn8xUoEzwpJKG3Y6tfJt5t7nW1OCxNjfLTlEaxsrI,9367\ndatasets/utils/patching.py,sha256=iTeb7XG4faLJKNylq55EcZyCndUXU_XBDvOOkuDz_sc,4955\ndatasets/utils/py_utils.py,sha256=Ql6QN-Lq7nRB_XTCxS_yBHU0ZmzWz9YqWo32rUuLPwU,28088\ndatasets/utils/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\ndatasets/utils/resources/__pycache__/__init__.cpython-313.pyc,,\ndatasets/utils/resources/creators.json,sha256=XtIpMZefgBOdTevRrQTkFiufbgCbp_iyxseyphYQkn0,257\ndatasets/utils/resources/languages.json,sha256=Z0rQNPsfje8zMi8KdvvwxF4APwwqcskJFUvhNiLAgPM,199138\ndatasets/utils/resources/multilingualities.json,sha256=02Uc8RtRzfl13l98Y_alZm5HuMYwPzL78B0S5a1X-8c,205\ndatasets/utils/resources/readme_structure.yaml,sha256=hNf9msoBZw5jfakQrDb0Af8T325TXdcaHsAO2MUcZvY,3877\ndatasets/utils/resources/size_categories.json,sha256=_5nAP7z8R6t7_GfER81QudFO6Y1tqYu4AWrr4Aot8S8,171\ndatasets/utils/sharding.py,sha256=VBQ4bRJQijMNDQTgFb1_ddlQ28wAcA0aQp4e-1jFIAk,4215\ndatasets/utils/stratify.py,sha256=-MVaLmijYhGyKDpnZS9A8SiHekaIyVm84HVyIIQOmfg,4085\ndatasets/utils/tf_utils.py,sha256=T3OysLGbkO7y-J-o9OVGyn9l-l-A3ruj-24JM_UULm8,24448\ndatasets/utils/tqdm.py,sha256=44F0g2fBpJwShh1l88PP7Z8kBihFWA_Yee4sjiQSxes,4303\ndatasets/utils/track.py,sha256=M81CGLn3MyJzHm98CQkbF3_1DG7evQsw-V52_Bp2paI,1838\ndatasets/utils/typing.py,sha256=G11ytWmwjqVia2IdziRDIWvQ4mLJee-sKzgJfHqU16E,205\ndatasets/utils/version.py,sha256=Z82cHpjTbQVJyWgnwSU8DsW2G0y-sSbSoOVeQrAds9k,3281\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\RECORD
|
RECORD
|
Other
| 19,514 | 0.7 | 0 | 0 |
python-kit
| 640 |
2024-05-05T09:40:43.399108
|
BSD-3-Clause
| false |
5472c48d7951f3e90e4d2c1252234d85
|
datasets\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\top_level.txt
|
top_level.txt
|
Other
| 9 | 0.5 | 0 | 0 |
react-lib
| 751 |
2024-04-24T04:42:19.773747
|
MIT
| false |
f3d9a6732a4128ce75c7ff5c881b6209
|
Wheel-Version: 1.0\nGenerator: setuptools (75.8.0)\nRoot-Is-Purelib: true\nTag: py3-none-any\n\n
|
.venv\Lib\site-packages\datasets-3.6.0.dist-info\WHEEL
|
WHEEL
|
Other
| 91 | 0.5 | 0 | 0 |
react-lib
| 744 |
2025-06-23T12:21:16.476392
|
GPL-3.0
| false |
68e6e969da03b3fdebefe35680a5c606
|
# -*- coding: utf-8 -*-\nimport datetime\nimport calendar\n\nimport operator\nfrom math import copysign\n\nfrom six import integer_types\nfrom warnings import warn\n\nfrom ._common import weekday\n\nMO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))\n\n__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]\n\n\nclass relativedelta(object):\n """\n The relativedelta type is designed to be applied to an existing datetime and\n can replace specific components of that datetime, or represents an interval\n of time.\n\n It is based on the specification of the excellent work done by M.-A. Lemburg\n in his\n `mx.DateTime <https://www.egenix.com/products/python/mxBase/mxDateTime/>`_ extension.\n However, notice that this type does *NOT* implement the same algorithm as\n his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.\n\n There are two different ways to build a relativedelta instance. The\n first one is passing it two date/datetime classes::\n\n relativedelta(datetime1, datetime2)\n\n The second one is passing it any number of the following keyword arguments::\n\n relativedelta(arg1=x,arg2=y,arg3=z...)\n\n year, month, day, hour, minute, second, microsecond:\n Absolute information (argument is singular); adding or subtracting a\n relativedelta with absolute information does not perform an arithmetic\n operation, but rather REPLACES the corresponding value in the\n original datetime with the value(s) in relativedelta.\n\n years, months, weeks, days, hours, minutes, seconds, microseconds:\n Relative information, may be negative (argument is plural); adding\n or subtracting a relativedelta with relative information performs\n the corresponding arithmetic operation on the original datetime value\n with the information in the relativedelta.\n\n weekday:\n One of the weekday instances (MO, TU, etc) available in the\n relativedelta module. These instances may receive a parameter N,\n specifying the Nth weekday, which could be positive or negative\n (like MO(+1) or MO(-2)). Not specifying it is the same as specifying\n +1. You can also use an integer, where 0=MO. This argument is always\n relative e.g. if the calculated date is already Monday, using MO(1)\n or MO(-1) won't change the day. To effectively make it absolute, use\n it in combination with the day argument (e.g. day=1, MO(1) for first\n Monday of the month).\n\n leapdays:\n Will add given days to the date found, if year is a leap\n year, and the date found is post 28 of february.\n\n yearday, nlyearday:\n Set the yearday or the non-leap year day (jump leap days).\n These are converted to day/month/leapdays information.\n\n There are relative and absolute forms of the keyword\n arguments. The plural is relative, and the singular is\n absolute. For each argument in the order below, the absolute form\n is applied first (by setting each attribute to that value) and\n then the relative form (by adding the value to the attribute).\n\n The order of attributes considered when this relativedelta is\n added to a datetime is:\n\n 1. Year\n 2. Month\n 3. Day\n 4. Hours\n 5. Minutes\n 6. Seconds\n 7. Microseconds\n\n Finally, weekday is applied, using the rule described above.\n\n For example\n\n >>> from datetime import datetime\n >>> from dateutil.relativedelta import relativedelta, MO\n >>> dt = datetime(2018, 4, 9, 13, 37, 0)\n >>> delta = relativedelta(hours=25, day=1, weekday=MO(1))\n >>> dt + delta\n datetime.datetime(2018, 4, 2, 14, 37)\n\n First, the day is set to 1 (the first of the month), then 25 hours\n are added, to get to the 2nd day and 14th hour, finally the\n weekday is applied, but since the 2nd is already a Monday there is\n no effect.\n\n """\n\n def __init__(self, dt1=None, dt2=None,\n years=0, months=0, days=0, leapdays=0, weeks=0,\n hours=0, minutes=0, seconds=0, microseconds=0,\n year=None, month=None, day=None, weekday=None,\n yearday=None, nlyearday=None,\n hour=None, minute=None, second=None, microsecond=None):\n\n if dt1 and dt2:\n # datetime is a subclass of date. So both must be date\n if not (isinstance(dt1, datetime.date) and\n isinstance(dt2, datetime.date)):\n raise TypeError("relativedelta only diffs datetime/date")\n\n # We allow two dates, or two datetimes, so we coerce them to be\n # of the same type\n if (isinstance(dt1, datetime.datetime) !=\n isinstance(dt2, datetime.datetime)):\n if not isinstance(dt1, datetime.datetime):\n dt1 = datetime.datetime.fromordinal(dt1.toordinal())\n elif not isinstance(dt2, datetime.datetime):\n dt2 = datetime.datetime.fromordinal(dt2.toordinal())\n\n self.years = 0\n self.months = 0\n self.days = 0\n self.leapdays = 0\n self.hours = 0\n self.minutes = 0\n self.seconds = 0\n self.microseconds = 0\n self.year = None\n self.month = None\n self.day = None\n self.weekday = None\n self.hour = None\n self.minute = None\n self.second = None\n self.microsecond = None\n self._has_time = 0\n\n # Get year / month delta between the two\n months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month)\n self._set_months(months)\n\n # Remove the year/month delta so the timedelta is just well-defined\n # time units (seconds, days and microseconds)\n dtm = self.__radd__(dt2)\n\n # If we've overshot our target, make an adjustment\n if dt1 < dt2:\n compare = operator.gt\n increment = 1\n else:\n compare = operator.lt\n increment = -1\n\n while compare(dt1, dtm):\n months += increment\n self._set_months(months)\n dtm = self.__radd__(dt2)\n\n # Get the timedelta between the "months-adjusted" date and dt1\n delta = dt1 - dtm\n self.seconds = delta.seconds + delta.days * 86400\n self.microseconds = delta.microseconds\n else:\n # Check for non-integer values in integer-only quantities\n if any(x is not None and x != int(x) for x in (years, months)):\n raise ValueError("Non-integer years and months are "\n "ambiguous and not currently supported.")\n\n # Relative information\n self.years = int(years)\n self.months = int(months)\n self.days = days + weeks * 7\n self.leapdays = leapdays\n self.hours = hours\n self.minutes = minutes\n self.seconds = seconds\n self.microseconds = microseconds\n\n # Absolute information\n self.year = year\n self.month = month\n self.day = day\n self.hour = hour\n self.minute = minute\n self.second = second\n self.microsecond = microsecond\n\n if any(x is not None and int(x) != x\n for x in (year, month, day, hour,\n minute, second, microsecond)):\n # For now we'll deprecate floats - later it'll be an error.\n warn("Non-integer value passed as absolute information. " +\n "This is not a well-defined condition and will raise " +\n "errors in future versions.", DeprecationWarning)\n\n if isinstance(weekday, integer_types):\n self.weekday = weekdays[weekday]\n else:\n self.weekday = weekday\n\n yday = 0\n if nlyearday:\n yday = nlyearday\n elif yearday:\n yday = yearday\n if yearday > 59:\n self.leapdays = -1\n if yday:\n ydayidx = [31, 59, 90, 120, 151, 181, 212,\n 243, 273, 304, 334, 366]\n for idx, ydays in enumerate(ydayidx):\n if yday <= ydays:\n self.month = idx+1\n if idx == 0:\n self.day = yday\n else:\n self.day = yday-ydayidx[idx-1]\n break\n else:\n raise ValueError("invalid year day (%d)" % yday)\n\n self._fix()\n\n def _fix(self):\n if abs(self.microseconds) > 999999:\n s = _sign(self.microseconds)\n div, mod = divmod(self.microseconds * s, 1000000)\n self.microseconds = mod * s\n self.seconds += div * s\n if abs(self.seconds) > 59:\n s = _sign(self.seconds)\n div, mod = divmod(self.seconds * s, 60)\n self.seconds = mod * s\n self.minutes += div * s\n if abs(self.minutes) > 59:\n s = _sign(self.minutes)\n div, mod = divmod(self.minutes * s, 60)\n self.minutes = mod * s\n self.hours += div * s\n if abs(self.hours) > 23:\n s = _sign(self.hours)\n div, mod = divmod(self.hours * s, 24)\n self.hours = mod * s\n self.days += div * s\n if abs(self.months) > 11:\n s = _sign(self.months)\n div, mod = divmod(self.months * s, 12)\n self.months = mod * s\n self.years += div * s\n if (self.hours or self.minutes or self.seconds or self.microseconds\n or self.hour is not None or self.minute is not None or\n self.second is not None or self.microsecond is not None):\n self._has_time = 1\n else:\n self._has_time = 0\n\n @property\n def weeks(self):\n return int(self.days / 7.0)\n\n @weeks.setter\n def weeks(self, value):\n self.days = self.days - (self.weeks * 7) + value * 7\n\n def _set_months(self, months):\n self.months = months\n if abs(self.months) > 11:\n s = _sign(self.months)\n div, mod = divmod(self.months * s, 12)\n self.months = mod * s\n self.years = div * s\n else:\n self.years = 0\n\n def normalized(self):\n """\n Return a version of this object represented entirely using integer\n values for the relative attributes.\n\n >>> relativedelta(days=1.5, hours=2).normalized()\n relativedelta(days=+1, hours=+14)\n\n :return:\n Returns a :class:`dateutil.relativedelta.relativedelta` object.\n """\n # Cascade remainders down (rounding each to roughly nearest microsecond)\n days = int(self.days)\n\n hours_f = round(self.hours + 24 * (self.days - days), 11)\n hours = int(hours_f)\n\n minutes_f = round(self.minutes + 60 * (hours_f - hours), 10)\n minutes = int(minutes_f)\n\n seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8)\n seconds = int(seconds_f)\n\n microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds))\n\n # Constructor carries overflow back up with call to _fix()\n return self.__class__(years=self.years, months=self.months,\n days=days, hours=hours, minutes=minutes,\n seconds=seconds, microseconds=microseconds,\n leapdays=self.leapdays, year=self.year,\n month=self.month, day=self.day,\n weekday=self.weekday, hour=self.hour,\n minute=self.minute, second=self.second,\n microsecond=self.microsecond)\n\n def __add__(self, other):\n if isinstance(other, relativedelta):\n return self.__class__(years=other.years + self.years,\n months=other.months + self.months,\n days=other.days + self.days,\n hours=other.hours + self.hours,\n minutes=other.minutes + self.minutes,\n seconds=other.seconds + self.seconds,\n microseconds=(other.microseconds +\n self.microseconds),\n leapdays=other.leapdays or self.leapdays,\n year=(other.year if other.year is not None\n else self.year),\n month=(other.month if other.month is not None\n else self.month),\n day=(other.day if other.day is not None\n else self.day),\n weekday=(other.weekday if other.weekday is not None\n else self.weekday),\n hour=(other.hour if other.hour is not None\n else self.hour),\n minute=(other.minute if other.minute is not None\n else self.minute),\n second=(other.second if other.second is not None\n else self.second),\n microsecond=(other.microsecond if other.microsecond\n is not None else\n self.microsecond))\n if isinstance(other, datetime.timedelta):\n return self.__class__(years=self.years,\n months=self.months,\n days=self.days + other.days,\n hours=self.hours,\n minutes=self.minutes,\n seconds=self.seconds + other.seconds,\n microseconds=self.microseconds + other.microseconds,\n leapdays=self.leapdays,\n year=self.year,\n month=self.month,\n day=self.day,\n weekday=self.weekday,\n hour=self.hour,\n minute=self.minute,\n second=self.second,\n microsecond=self.microsecond)\n if not isinstance(other, datetime.date):\n return NotImplemented\n elif self._has_time and not isinstance(other, datetime.datetime):\n other = datetime.datetime.fromordinal(other.toordinal())\n year = (self.year or other.year)+self.years\n month = self.month or other.month\n if self.months:\n assert 1 <= abs(self.months) <= 12\n month += self.months\n if month > 12:\n year += 1\n month -= 12\n elif month < 1:\n year -= 1\n month += 12\n day = min(calendar.monthrange(year, month)[1],\n self.day or other.day)\n repl = {"year": year, "month": month, "day": day}\n for attr in ["hour", "minute", "second", "microsecond"]:\n value = getattr(self, attr)\n if value is not None:\n repl[attr] = value\n days = self.days\n if self.leapdays and month > 2 and calendar.isleap(year):\n days += self.leapdays\n ret = (other.replace(**repl)\n + datetime.timedelta(days=days,\n hours=self.hours,\n minutes=self.minutes,\n seconds=self.seconds,\n microseconds=self.microseconds))\n if self.weekday:\n weekday, nth = self.weekday.weekday, self.weekday.n or 1\n jumpdays = (abs(nth) - 1) * 7\n if nth > 0:\n jumpdays += (7 - ret.weekday() + weekday) % 7\n else:\n jumpdays += (ret.weekday() - weekday) % 7\n jumpdays *= -1\n ret += datetime.timedelta(days=jumpdays)\n return ret\n\n def __radd__(self, other):\n return self.__add__(other)\n\n def __rsub__(self, other):\n return self.__neg__().__radd__(other)\n\n def __sub__(self, other):\n if not isinstance(other, relativedelta):\n return NotImplemented # In case the other object defines __rsub__\n return self.__class__(years=self.years - other.years,\n months=self.months - other.months,\n days=self.days - other.days,\n hours=self.hours - other.hours,\n minutes=self.minutes - other.minutes,\n seconds=self.seconds - other.seconds,\n microseconds=self.microseconds - other.microseconds,\n leapdays=self.leapdays or other.leapdays,\n year=(self.year if self.year is not None\n else other.year),\n month=(self.month if self.month is not None else\n other.month),\n day=(self.day if self.day is not None else\n other.day),\n weekday=(self.weekday if self.weekday is not None else\n other.weekday),\n hour=(self.hour if self.hour is not None else\n other.hour),\n minute=(self.minute if self.minute is not None else\n other.minute),\n second=(self.second if self.second is not None else\n other.second),\n microsecond=(self.microsecond if self.microsecond\n is not None else\n other.microsecond))\n\n def __abs__(self):\n return self.__class__(years=abs(self.years),\n months=abs(self.months),\n days=abs(self.days),\n hours=abs(self.hours),\n minutes=abs(self.minutes),\n seconds=abs(self.seconds),\n microseconds=abs(self.microseconds),\n leapdays=self.leapdays,\n year=self.year,\n month=self.month,\n day=self.day,\n weekday=self.weekday,\n hour=self.hour,\n minute=self.minute,\n second=self.second,\n microsecond=self.microsecond)\n\n def __neg__(self):\n return self.__class__(years=-self.years,\n months=-self.months,\n days=-self.days,\n hours=-self.hours,\n minutes=-self.minutes,\n seconds=-self.seconds,\n microseconds=-self.microseconds,\n leapdays=self.leapdays,\n year=self.year,\n month=self.month,\n day=self.day,\n weekday=self.weekday,\n hour=self.hour,\n minute=self.minute,\n second=self.second,\n microsecond=self.microsecond)\n\n def __bool__(self):\n return not (not self.years and\n not self.months and\n not self.days and\n not self.hours and\n not self.minutes and\n not self.seconds and\n not self.microseconds and\n not self.leapdays and\n self.year is None and\n self.month is None and\n self.day is None and\n self.weekday is None and\n self.hour is None and\n self.minute is None and\n self.second is None and\n self.microsecond is None)\n # Compatibility with Python 2.x\n __nonzero__ = __bool__\n\n def __mul__(self, other):\n try:\n f = float(other)\n except TypeError:\n return NotImplemented\n\n return self.__class__(years=int(self.years * f),\n months=int(self.months * f),\n days=int(self.days * f),\n hours=int(self.hours * f),\n minutes=int(self.minutes * f),\n seconds=int(self.seconds * f),\n microseconds=int(self.microseconds * f),\n leapdays=self.leapdays,\n year=self.year,\n month=self.month,\n day=self.day,\n weekday=self.weekday,\n hour=self.hour,\n minute=self.minute,\n second=self.second,\n microsecond=self.microsecond)\n\n __rmul__ = __mul__\n\n def __eq__(self, other):\n if not isinstance(other, relativedelta):\n return NotImplemented\n if self.weekday or other.weekday:\n if not self.weekday or not other.weekday:\n return False\n if self.weekday.weekday != other.weekday.weekday:\n return False\n n1, n2 = self.weekday.n, other.weekday.n\n if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):\n return False\n return (self.years == other.years and\n self.months == other.months and\n self.days == other.days and\n self.hours == other.hours and\n self.minutes == other.minutes and\n self.seconds == other.seconds and\n self.microseconds == other.microseconds and\n self.leapdays == other.leapdays and\n self.year == other.year and\n self.month == other.month and\n self.day == other.day and\n self.hour == other.hour and\n self.minute == other.minute and\n self.second == other.second and\n self.microsecond == other.microsecond)\n\n def __hash__(self):\n return hash((\n self.weekday,\n self.years,\n self.months,\n self.days,\n self.hours,\n self.minutes,\n self.seconds,\n self.microseconds,\n self.leapdays,\n self.year,\n self.month,\n self.day,\n self.hour,\n self.minute,\n self.second,\n self.microsecond,\n ))\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def __div__(self, other):\n try:\n reciprocal = 1 / float(other)\n except TypeError:\n return NotImplemented\n\n return self.__mul__(reciprocal)\n\n __truediv__ = __div__\n\n def __repr__(self):\n l = []\n for attr in ["years", "months", "days", "leapdays",\n "hours", "minutes", "seconds", "microseconds"]:\n value = getattr(self, attr)\n if value:\n l.append("{attr}={value:+g}".format(attr=attr, value=value))\n for attr in ["year", "month", "day", "weekday",\n "hour", "minute", "second", "microsecond"]:\n value = getattr(self, attr)\n if value is not None:\n l.append("{attr}={value}".format(attr=attr, value=repr(value)))\n return "{classname}({attrs})".format(classname=self.__class__.__name__,\n attrs=", ".join(l))\n\n\ndef _sign(x):\n return int(copysign(1, x))\n\n# vim:ts=4:sw=4:et\n
|
.venv\Lib\site-packages\dateutil\relativedelta.py
|
relativedelta.py
|
Python
| 24,903 | 0.95 | 0.15025 | 0.032258 |
node-utils
| 505 |
2024-03-28T17:55:28.819329
|
Apache-2.0
| false |
03d42bcded74d1a5ff1c845930e8ff75
|
# tzwin has moved to dateutil.tz.win\nfrom .tz.win import *\n
|
.venv\Lib\site-packages\dateutil\tzwin.py
|
tzwin.py
|
Python
| 59 | 0.75 | 0 | 0.5 |
vue-tools
| 207 |
2025-04-08T13:15:11.122020
|
GPL-3.0
| false |
be2ec72a367bdcb6cb7998caa76d6efd
|
# -*- coding: utf-8 -*-\n"""\nThis module offers general convenience and utility functions for dealing with\ndatetimes.\n\n.. versionadded:: 2.7.0\n"""\nfrom __future__ import unicode_literals\n\nfrom datetime import datetime, time\n\n\ndef today(tzinfo=None):\n """\n Returns a :py:class:`datetime` representing the current day at midnight\n\n :param tzinfo:\n The time zone to attach (also used to determine the current day).\n\n :return:\n A :py:class:`datetime.datetime` object representing the current day\n at midnight.\n """\n\n dt = datetime.now(tzinfo)\n return datetime.combine(dt.date(), time(0, tzinfo=tzinfo))\n\n\ndef default_tzinfo(dt, tzinfo):\n """\n Sets the ``tzinfo`` parameter on naive datetimes only\n\n This is useful for example when you are provided a datetime that may have\n either an implicit or explicit time zone, such as when parsing a time zone\n string.\n\n .. doctest::\n\n >>> from dateutil.tz import tzoffset\n >>> from dateutil.parser import parse\n >>> from dateutil.utils import default_tzinfo\n >>> dflt_tz = tzoffset("EST", -18000)\n >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz))\n 2014-01-01 12:30:00+00:00\n >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz))\n 2014-01-01 12:30:00-05:00\n\n :param dt:\n The datetime on which to replace the time zone\n\n :param tzinfo:\n The :py:class:`datetime.tzinfo` subclass instance to assign to\n ``dt`` if (and only if) it is naive.\n\n :return:\n Returns an aware :py:class:`datetime.datetime`.\n """\n if dt.tzinfo is not None:\n return dt\n else:\n return dt.replace(tzinfo=tzinfo)\n\n\ndef within_delta(dt1, dt2, delta):\n """\n Useful for comparing two datetimes that may have a negligible difference\n to be considered equal.\n """\n delta = abs(delta)\n difference = dt1 - dt2\n return -delta <= difference <= delta\n
|
.venv\Lib\site-packages\dateutil\utils.py
|
utils.py
|
Python
| 1,965 | 0.95 | 0.183099 | 0.018519 |
node-utils
| 614 |
2024-08-28T19:07:38.846409
|
GPL-3.0
| false |
230fa0bc1ab758b0777dfc48c4bbf71d
|
# file generated by setuptools_scm\n# don't change, don't track in version control\n__version__ = version = '2.9.0.post0'\n__version_tuple__ = version_tuple = (2, 9, 0)\n
|
.venv\Lib\site-packages\dateutil\_version.py
|
_version.py
|
Python
| 166 | 0.8 | 0 | 0.5 |
react-lib
| 825 |
2024-01-19T21:18:43.664976
|
Apache-2.0
| false |
af88eaaa186788011cafe28224151845
|
# -*- coding: utf-8 -*-\nimport sys\n\ntry:\n from ._version import version as __version__\nexcept ImportError:\n __version__ = 'unknown'\n\n__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',\n 'utils', 'zoneinfo']\n\ndef __getattr__(name):\n import importlib\n\n if name in __all__:\n return importlib.import_module("." + name, __name__)\n raise AttributeError(\n "module {!r} has not attribute {!r}".format(__name__, name)\n )\n\n\ndef __dir__():\n # __dir__ should include all the lazy-importable modules as well.\n return [x for x in globals() if x not in sys.modules] + __all__\n
|
.venv\Lib\site-packages\dateutil\__init__.py
|
__init__.py
|
Python
| 620 | 0.95 | 0.25 | 0.111111 |
python-kit
| 407 |
2024-03-31T13:16:41.175620
|
Apache-2.0
| false |
413df2a03410e4da901afb2e8c29e266
|
# -*- coding: utf-8 -*-\n"""\nThis module offers a parser for ISO-8601 strings\n\nIt is intended to support all valid date, time and datetime formats per the\nISO-8601 specification.\n\n..versionadded:: 2.7.0\n"""\nfrom datetime import datetime, timedelta, time, date\nimport calendar\nfrom dateutil import tz\n\nfrom functools import wraps\n\nimport re\nimport six\n\n__all__ = ["isoparse", "isoparser"]\n\n\ndef _takes_ascii(f):\n @wraps(f)\n def func(self, str_in, *args, **kwargs):\n # If it's a stream, read the whole thing\n str_in = getattr(str_in, 'read', lambda: str_in)()\n\n # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII\n if isinstance(str_in, six.text_type):\n # ASCII is the same in UTF-8\n try:\n str_in = str_in.encode('ascii')\n except UnicodeEncodeError as e:\n msg = 'ISO-8601 strings should contain only ASCII characters'\n six.raise_from(ValueError(msg), e)\n\n return f(self, str_in, *args, **kwargs)\n\n return func\n\n\nclass isoparser(object):\n def __init__(self, sep=None):\n """\n :param sep:\n A single character that separates date and time portions. If\n ``None``, the parser will accept any single character.\n For strict ISO-8601 adherence, pass ``'T'``.\n """\n if sep is not None:\n if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'):\n raise ValueError('Separator must be a single, non-numeric ' +\n 'ASCII character')\n\n sep = sep.encode('ascii')\n\n self._sep = sep\n\n @_takes_ascii\n def isoparse(self, dt_str):\n """\n Parse an ISO-8601 datetime string into a :class:`datetime.datetime`.\n\n An ISO-8601 datetime string consists of a date portion, followed\n optionally by a time portion - the date and time portions are separated\n by a single character separator, which is ``T`` in the official\n standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be\n combined with a time portion.\n\n Supported date formats are:\n\n Common:\n\n - ``YYYY``\n - ``YYYY-MM``\n - ``YYYY-MM-DD`` or ``YYYYMMDD``\n\n Uncommon:\n\n - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)\n - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day\n\n The ISO week and day numbering follows the same logic as\n :func:`datetime.date.isocalendar`.\n\n Supported time formats are:\n\n - ``hh``\n - ``hh:mm`` or ``hhmm``\n - ``hh:mm:ss`` or ``hhmmss``\n - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)\n\n Midnight is a special case for `hh`, as the standard supports both\n 00:00 and 24:00 as a representation. The decimal separator can be\n either a dot or a comma.\n\n\n .. caution::\n\n Support for fractional components other than seconds is part of the\n ISO-8601 standard, but is not currently implemented in this parser.\n\n Supported time zone offset formats are:\n\n - `Z` (UTC)\n - `±HH:MM`\n - `±HHMM`\n - `±HH`\n\n Offsets will be represented as :class:`dateutil.tz.tzoffset` objects,\n with the exception of UTC, which will be represented as\n :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such\n as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.\n\n :param dt_str:\n A string or stream containing only an ISO-8601 datetime string\n\n :return:\n Returns a :class:`datetime.datetime` representing the string.\n Unspecified components default to their lowest value.\n\n .. warning::\n\n As of version 2.7.0, the strictness of the parser should not be\n considered a stable part of the contract. Any valid ISO-8601 string\n that parses correctly with the default settings will continue to\n parse correctly in future versions, but invalid strings that\n currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not\n guaranteed to continue failing in future versions if they encode\n a valid date.\n\n .. versionadded:: 2.7.0\n """\n components, pos = self._parse_isodate(dt_str)\n\n if len(dt_str) > pos:\n if self._sep is None or dt_str[pos:pos + 1] == self._sep:\n components += self._parse_isotime(dt_str[pos + 1:])\n else:\n raise ValueError('String contains unknown ISO components')\n\n if len(components) > 3 and components[3] == 24:\n components[3] = 0\n return datetime(*components) + timedelta(days=1)\n\n return datetime(*components)\n\n @_takes_ascii\n def parse_isodate(self, datestr):\n """\n Parse the date portion of an ISO string.\n\n :param datestr:\n The string portion of an ISO string, without a separator\n\n :return:\n Returns a :class:`datetime.date` object\n """\n components, pos = self._parse_isodate(datestr)\n if pos < len(datestr):\n raise ValueError('String contains unknown ISO ' +\n 'components: {!r}'.format(datestr.decode('ascii')))\n return date(*components)\n\n @_takes_ascii\n def parse_isotime(self, timestr):\n """\n Parse the time portion of an ISO string.\n\n :param timestr:\n The time portion of an ISO string, without a separator\n\n :return:\n Returns a :class:`datetime.time` object\n """\n components = self._parse_isotime(timestr)\n if components[0] == 24:\n components[0] = 0\n return time(*components)\n\n @_takes_ascii\n def parse_tzstr(self, tzstr, zero_as_utc=True):\n """\n Parse a valid ISO time zone string.\n\n See :func:`isoparser.isoparse` for details on supported formats.\n\n :param tzstr:\n A string representing an ISO time zone offset\n\n :param zero_as_utc:\n Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones\n\n :return:\n Returns :class:`dateutil.tz.tzoffset` for offsets and\n :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is\n specified) offsets equivalent to UTC.\n """\n return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc)\n\n # Constants\n _DATE_SEP = b'-'\n _TIME_SEP = b':'\n _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)')\n\n def _parse_isodate(self, dt_str):\n try:\n return self._parse_isodate_common(dt_str)\n except ValueError:\n return self._parse_isodate_uncommon(dt_str)\n\n def _parse_isodate_common(self, dt_str):\n len_str = len(dt_str)\n components = [1, 1, 1]\n\n if len_str < 4:\n raise ValueError('ISO string too short')\n\n # Year\n components[0] = int(dt_str[0:4])\n pos = 4\n if pos >= len_str:\n return components, pos\n\n has_sep = dt_str[pos:pos + 1] == self._DATE_SEP\n if has_sep:\n pos += 1\n\n # Month\n if len_str - pos < 2:\n raise ValueError('Invalid common month')\n\n components[1] = int(dt_str[pos:pos + 2])\n pos += 2\n\n if pos >= len_str:\n if has_sep:\n return components, pos\n else:\n raise ValueError('Invalid ISO format')\n\n if has_sep:\n if dt_str[pos:pos + 1] != self._DATE_SEP:\n raise ValueError('Invalid separator in ISO string')\n pos += 1\n\n # Day\n if len_str - pos < 2:\n raise ValueError('Invalid common day')\n components[2] = int(dt_str[pos:pos + 2])\n return components, pos + 2\n\n def _parse_isodate_uncommon(self, dt_str):\n if len(dt_str) < 4:\n raise ValueError('ISO string too short')\n\n # All ISO formats start with the year\n year = int(dt_str[0:4])\n\n has_sep = dt_str[4:5] == self._DATE_SEP\n\n pos = 4 + has_sep # Skip '-' if it's there\n if dt_str[pos:pos + 1] == b'W':\n # YYYY-?Www-?D?\n pos += 1\n weekno = int(dt_str[pos:pos + 2])\n pos += 2\n\n dayno = 1\n if len(dt_str) > pos:\n if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep:\n raise ValueError('Inconsistent use of dash separator')\n\n pos += has_sep\n\n dayno = int(dt_str[pos:pos + 1])\n pos += 1\n\n base_date = self._calculate_weekdate(year, weekno, dayno)\n else:\n # YYYYDDD or YYYY-DDD\n if len(dt_str) - pos < 3:\n raise ValueError('Invalid ordinal day')\n\n ordinal_day = int(dt_str[pos:pos + 3])\n pos += 3\n\n if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)):\n raise ValueError('Invalid ordinal day' +\n ' {} for year {}'.format(ordinal_day, year))\n\n base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1)\n\n components = [base_date.year, base_date.month, base_date.day]\n return components, pos\n\n def _calculate_weekdate(self, year, week, day):\n """\n Calculate the day of corresponding to the ISO year-week-day calendar.\n\n This function is effectively the inverse of\n :func:`datetime.date.isocalendar`.\n\n :param year:\n The year in the ISO calendar\n\n :param week:\n The week in the ISO calendar - range is [1, 53]\n\n :param day:\n The day in the ISO calendar - range is [1 (MON), 7 (SUN)]\n\n :return:\n Returns a :class:`datetime.date`\n """\n if not 0 < week < 54:\n raise ValueError('Invalid week: {}'.format(week))\n\n if not 0 < day < 8: # Range is 1-7\n raise ValueError('Invalid weekday: {}'.format(day))\n\n # Get week 1 for the specific year:\n jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it\n week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1)\n\n # Now add the specific number of weeks and days to get what we want\n week_offset = (week - 1) * 7 + (day - 1)\n return week_1 + timedelta(days=week_offset)\n\n def _parse_isotime(self, timestr):\n len_str = len(timestr)\n components = [0, 0, 0, 0, None]\n pos = 0\n comp = -1\n\n if len_str < 2:\n raise ValueError('ISO time too short')\n\n has_sep = False\n\n while pos < len_str and comp < 5:\n comp += 1\n\n if timestr[pos:pos + 1] in b'-+Zz':\n # Detect time zone boundary\n components[-1] = self._parse_tzstr(timestr[pos:])\n pos = len_str\n break\n\n if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP:\n has_sep = True\n pos += 1\n elif comp == 2 and has_sep:\n if timestr[pos:pos+1] != self._TIME_SEP:\n raise ValueError('Inconsistent use of colon separator')\n pos += 1\n\n if comp < 3:\n # Hour, minute, second\n components[comp] = int(timestr[pos:pos + 2])\n pos += 2\n\n if comp == 3:\n # Fraction of a second\n frac = self._FRACTION_REGEX.match(timestr[pos:])\n if not frac:\n continue\n\n us_str = frac.group(1)[:6] # Truncate to microseconds\n components[comp] = int(us_str) * 10**(6 - len(us_str))\n pos += len(frac.group())\n\n if pos < len_str:\n raise ValueError('Unused components in ISO string')\n\n if components[0] == 24:\n # Standard supports 00:00 and 24:00 as representations of midnight\n if any(component != 0 for component in components[1:4]):\n raise ValueError('Hour may only be 24 at 24:00:00.000')\n\n return components\n\n def _parse_tzstr(self, tzstr, zero_as_utc=True):\n if tzstr == b'Z' or tzstr == b'z':\n return tz.UTC\n\n if len(tzstr) not in {3, 5, 6}:\n raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters')\n\n if tzstr[0:1] == b'-':\n mult = -1\n elif tzstr[0:1] == b'+':\n mult = 1\n else:\n raise ValueError('Time zone offset requires sign')\n\n hours = int(tzstr[1:3])\n if len(tzstr) == 3:\n minutes = 0\n else:\n minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):])\n\n if zero_as_utc and hours == 0 and minutes == 0:\n return tz.UTC\n else:\n if minutes > 59:\n raise ValueError('Invalid minutes in time zone offset')\n\n if hours > 23:\n raise ValueError('Invalid hours in time zone offset')\n\n return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)\n\n\nDEFAULT_ISOPARSER = isoparser()\nisoparse = DEFAULT_ISOPARSER.isoparse\n
|
.venv\Lib\site-packages\dateutil\parser\isoparser.py
|
isoparser.py
|
Python
| 13,233 | 0.95 | 0.204327 | 0.054313 |
react-lib
| 684 |
2024-09-15T11:18:33.999021
|
GPL-3.0
| false |
6f273bd3093f14de0e1915b9a635f19e
|
# -*- coding: utf-8 -*-\nfrom ._parser import parse, parser, parserinfo, ParserError\nfrom ._parser import DEFAULTPARSER, DEFAULTTZPARSER\nfrom ._parser import UnknownTimezoneWarning\n\nfrom ._parser import __doc__\n\nfrom .isoparser import isoparser, isoparse\n\n__all__ = ['parse', 'parser', 'parserinfo',\n 'isoparse', 'isoparser',\n 'ParserError',\n 'UnknownTimezoneWarning']\n\n\n###\n# Deprecate portions of the private interface so that downstream code that\n# is improperly relying on it is given *some* notice.\n\n\ndef __deprecated_private_func(f):\n from functools import wraps\n import warnings\n\n msg = ('{name} is a private function and may break without warning, '\n 'it will be moved and or renamed in future versions.')\n msg = msg.format(name=f.__name__)\n\n @wraps(f)\n def deprecated_func(*args, **kwargs):\n warnings.warn(msg, DeprecationWarning)\n return f(*args, **kwargs)\n\n return deprecated_func\n\ndef __deprecate_private_class(c):\n import warnings\n\n msg = ('{name} is a private class and may break without warning, '\n 'it will be moved and or renamed in future versions.')\n msg = msg.format(name=c.__name__)\n\n class private_class(c):\n __doc__ = c.__doc__\n\n def __init__(self, *args, **kwargs):\n warnings.warn(msg, DeprecationWarning)\n super(private_class, self).__init__(*args, **kwargs)\n\n private_class.__name__ = c.__name__\n\n return private_class\n\n\nfrom ._parser import _timelex, _resultbase\nfrom ._parser import _tzparser, _parsetz\n\n_timelex = __deprecate_private_class(_timelex)\n_tzparser = __deprecate_private_class(_tzparser)\n_resultbase = __deprecate_private_class(_resultbase)\n_parsetz = __deprecated_private_func(_parsetz)\n
|
.venv\Lib\site-packages\dateutil\parser\__init__.py
|
__init__.py
|
Python
| 1,766 | 0.95 | 0.114754 | 0.095238 |
awesome-app
| 388 |
2025-04-25T05:03:27.393580
|
Apache-2.0
| false |
3adfb3191307a4f45562140e0aae9b9b
|
\n\n
|
.venv\Lib\site-packages\dateutil\parser\__pycache__\isoparser.cpython-313.pyc
|
isoparser.cpython-313.pyc
|
Other
| 14,941 | 0.95 | 0.095652 | 0.005181 |
awesome-app
| 1 |
2024-11-24T22:21:19.997680
|
BSD-3-Clause
| false |
595aefde71a1a122c503ed69ba0f8478
|
\n\n
|
.venv\Lib\site-packages\dateutil\parser\__pycache__\_parser.cpython-313.pyc
|
_parser.cpython-313.pyc
|
Other
| 61,975 | 0.75 | 0.075145 | 0.003236 |
react-lib
| 46 |
2024-05-22T15:26:17.312016
|
Apache-2.0
| false |
3976f3b9a9bc893678c846797c4d5929
|
\n\n
|
.venv\Lib\site-packages\dateutil\parser\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 2,709 | 0.95 | 0.076923 | 0.090909 |
python-kit
| 948 |
2025-01-08T02:50:40.068290
|
GPL-3.0
| false |
82f06dd37289ae5ecbb9bc812016d2f0
|
from six import PY2\n\nfrom functools import wraps\n\nfrom datetime import datetime, timedelta, tzinfo\n\n\nZERO = timedelta(0)\n\n__all__ = ['tzname_in_python2', 'enfold']\n\n\ndef tzname_in_python2(namefunc):\n """Change unicode output into bytestrings in Python 2\n\n tzname() API changed in Python 3. It used to return bytes, but was changed\n to unicode strings\n """\n if PY2:\n @wraps(namefunc)\n def adjust_encoding(*args, **kwargs):\n name = namefunc(*args, **kwargs)\n if name is not None:\n name = name.encode()\n\n return name\n\n return adjust_encoding\n else:\n return namefunc\n\n\n# The following is adapted from Alexander Belopolsky's tz library\n# https://github.com/abalkin/tz\nif hasattr(datetime, 'fold'):\n # This is the pre-python 3.6 fold situation\n def enfold(dt, fold=1):\n """\n Provides a unified interface for assigning the ``fold`` attribute to\n datetimes both before and after the implementation of PEP-495.\n\n :param fold:\n The value for the ``fold`` attribute in the returned datetime. This\n should be either 0 or 1.\n\n :return:\n Returns an object for which ``getattr(dt, 'fold', 0)`` returns\n ``fold`` for all versions of Python. In versions prior to\n Python 3.6, this is a ``_DatetimeWithFold`` object, which is a\n subclass of :py:class:`datetime.datetime` with the ``fold``\n attribute added, if ``fold`` is 1.\n\n .. versionadded:: 2.6.0\n """\n return dt.replace(fold=fold)\n\nelse:\n class _DatetimeWithFold(datetime):\n """\n This is a class designed to provide a PEP 495-compliant interface for\n Python versions before 3.6. It is used only for dates in a fold, so\n the ``fold`` attribute is fixed at ``1``.\n\n .. versionadded:: 2.6.0\n """\n __slots__ = ()\n\n def replace(self, *args, **kwargs):\n """\n Return a datetime with the same attributes, except for those\n attributes given new values by whichever keyword arguments are\n specified. Note that tzinfo=None can be specified to create a naive\n datetime from an aware datetime with no conversion of date and time\n data.\n\n This is reimplemented in ``_DatetimeWithFold`` because pypy3 will\n return a ``datetime.datetime`` even if ``fold`` is unchanged.\n """\n argnames = (\n 'year', 'month', 'day', 'hour', 'minute', 'second',\n 'microsecond', 'tzinfo'\n )\n\n for arg, argname in zip(args, argnames):\n if argname in kwargs:\n raise TypeError('Duplicate argument: {}'.format(argname))\n\n kwargs[argname] = arg\n\n for argname in argnames:\n if argname not in kwargs:\n kwargs[argname] = getattr(self, argname)\n\n dt_class = self.__class__ if kwargs.get('fold', 1) else datetime\n\n return dt_class(**kwargs)\n\n @property\n def fold(self):\n return 1\n\n def enfold(dt, fold=1):\n """\n Provides a unified interface for assigning the ``fold`` attribute to\n datetimes both before and after the implementation of PEP-495.\n\n :param fold:\n The value for the ``fold`` attribute in the returned datetime. This\n should be either 0 or 1.\n\n :return:\n Returns an object for which ``getattr(dt, 'fold', 0)`` returns\n ``fold`` for all versions of Python. In versions prior to\n Python 3.6, this is a ``_DatetimeWithFold`` object, which is a\n subclass of :py:class:`datetime.datetime` with the ``fold``\n attribute added, if ``fold`` is 1.\n\n .. versionadded:: 2.6.0\n """\n if getattr(dt, 'fold', 0) == fold:\n return dt\n\n args = dt.timetuple()[:6]\n args += (dt.microsecond, dt.tzinfo)\n\n if fold:\n return _DatetimeWithFold(*args)\n else:\n return datetime(*args)\n\n\ndef _validate_fromutc_inputs(f):\n """\n The CPython version of ``fromutc`` checks that the input is a ``datetime``\n object and that ``self`` is attached as its ``tzinfo``.\n """\n @wraps(f)\n def fromutc(self, dt):\n if not isinstance(dt, datetime):\n raise TypeError("fromutc() requires a datetime argument")\n if dt.tzinfo is not self:\n raise ValueError("dt.tzinfo is not self")\n\n return f(self, dt)\n\n return fromutc\n\n\nclass _tzinfo(tzinfo):\n """\n Base class for all ``dateutil`` ``tzinfo`` objects.\n """\n\n def is_ambiguous(self, dt):\n """\n Whether or not the "wall time" of a given datetime is ambiguous in this\n zone.\n\n :param dt:\n A :py:class:`datetime.datetime`, naive or time zone aware.\n\n\n :return:\n Returns ``True`` if ambiguous, ``False`` otherwise.\n\n .. versionadded:: 2.6.0\n """\n\n dt = dt.replace(tzinfo=self)\n\n wall_0 = enfold(dt, fold=0)\n wall_1 = enfold(dt, fold=1)\n\n same_offset = wall_0.utcoffset() == wall_1.utcoffset()\n same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None)\n\n return same_dt and not same_offset\n\n def _fold_status(self, dt_utc, dt_wall):\n """\n Determine the fold status of a "wall" datetime, given a representation\n of the same datetime as a (naive) UTC datetime. This is calculated based\n on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all\n datetimes, and that this offset is the actual number of hours separating\n ``dt_utc`` and ``dt_wall``.\n\n :param dt_utc:\n Representation of the datetime as UTC\n\n :param dt_wall:\n Representation of the datetime as "wall time". This parameter must\n either have a `fold` attribute or have a fold-naive\n :class:`datetime.tzinfo` attached, otherwise the calculation may\n fail.\n """\n if self.is_ambiguous(dt_wall):\n delta_wall = dt_wall - dt_utc\n _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst()))\n else:\n _fold = 0\n\n return _fold\n\n def _fold(self, dt):\n return getattr(dt, 'fold', 0)\n\n def _fromutc(self, dt):\n """\n Given a timezone-aware datetime in a given timezone, calculates a\n timezone-aware datetime in a new timezone.\n\n Since this is the one time that we *know* we have an unambiguous\n datetime object, we take this opportunity to determine whether the\n datetime is ambiguous and in a "fold" state (e.g. if it's the first\n occurrence, chronologically, of the ambiguous datetime).\n\n :param dt:\n A timezone-aware :class:`datetime.datetime` object.\n """\n\n # Re-implement the algorithm from Python's datetime.py\n dtoff = dt.utcoffset()\n if dtoff is None:\n raise ValueError("fromutc() requires a non-None utcoffset() "\n "result")\n\n # The original datetime.py code assumes that `dst()` defaults to\n # zero during ambiguous times. PEP 495 inverts this presumption, so\n # for pre-PEP 495 versions of python, we need to tweak the algorithm.\n dtdst = dt.dst()\n if dtdst is None:\n raise ValueError("fromutc() requires a non-None dst() result")\n delta = dtoff - dtdst\n\n dt += delta\n # Set fold=1 so we can default to being in the fold for\n # ambiguous dates.\n dtdst = enfold(dt, fold=1).dst()\n if dtdst is None:\n raise ValueError("fromutc(): dt.dst gave inconsistent "\n "results; cannot convert")\n return dt + dtdst\n\n @_validate_fromutc_inputs\n def fromutc(self, dt):\n """\n Given a timezone-aware datetime in a given timezone, calculates a\n timezone-aware datetime in a new timezone.\n\n Since this is the one time that we *know* we have an unambiguous\n datetime object, we take this opportunity to determine whether the\n datetime is ambiguous and in a "fold" state (e.g. if it's the first\n occurrence, chronologically, of the ambiguous datetime).\n\n :param dt:\n A timezone-aware :class:`datetime.datetime` object.\n """\n dt_wall = self._fromutc(dt)\n\n # Calculate the fold status given the two datetimes.\n _fold = self._fold_status(dt, dt_wall)\n\n # Set the default fold value for ambiguous dates\n return enfold(dt_wall, fold=_fold)\n\n\nclass tzrangebase(_tzinfo):\n """\n This is an abstract base class for time zones represented by an annual\n transition into and out of DST. Child classes should implement the following\n methods:\n\n * ``__init__(self, *args, **kwargs)``\n * ``transitions(self, year)`` - this is expected to return a tuple of\n datetimes representing the DST on and off transitions in standard\n time.\n\n A fully initialized ``tzrangebase`` subclass should also provide the\n following attributes:\n * ``hasdst``: Boolean whether or not the zone uses DST.\n * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects\n representing the respective UTC offsets.\n * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short\n abbreviations in DST and STD, respectively.\n * ``_hasdst``: Whether or not the zone has DST.\n\n .. versionadded:: 2.6.0\n """\n def __init__(self):\n raise NotImplementedError('tzrangebase is an abstract base class')\n\n def utcoffset(self, dt):\n isdst = self._isdst(dt)\n\n if isdst is None:\n return None\n elif isdst:\n return self._dst_offset\n else:\n return self._std_offset\n\n def dst(self, dt):\n isdst = self._isdst(dt)\n\n if isdst is None:\n return None\n elif isdst:\n return self._dst_base_offset\n else:\n return ZERO\n\n @tzname_in_python2\n def tzname(self, dt):\n if self._isdst(dt):\n return self._dst_abbr\n else:\n return self._std_abbr\n\n def fromutc(self, dt):\n """ Given a datetime in UTC, return local time """\n if not isinstance(dt, datetime):\n raise TypeError("fromutc() requires a datetime argument")\n\n if dt.tzinfo is not self:\n raise ValueError("dt.tzinfo is not self")\n\n # Get transitions - if there are none, fixed offset\n transitions = self.transitions(dt.year)\n if transitions is None:\n return dt + self.utcoffset(dt)\n\n # Get the transition times in UTC\n dston, dstoff = transitions\n\n dston -= self._std_offset\n dstoff -= self._std_offset\n\n utc_transitions = (dston, dstoff)\n dt_utc = dt.replace(tzinfo=None)\n\n isdst = self._naive_isdst(dt_utc, utc_transitions)\n\n if isdst:\n dt_wall = dt + self._dst_offset\n else:\n dt_wall = dt + self._std_offset\n\n _fold = int(not isdst and self.is_ambiguous(dt_wall))\n\n return enfold(dt_wall, fold=_fold)\n\n def is_ambiguous(self, dt):\n """\n Whether or not the "wall time" of a given datetime is ambiguous in this\n zone.\n\n :param dt:\n A :py:class:`datetime.datetime`, naive or time zone aware.\n\n\n :return:\n Returns ``True`` if ambiguous, ``False`` otherwise.\n\n .. versionadded:: 2.6.0\n """\n if not self.hasdst:\n return False\n\n start, end = self.transitions(dt.year)\n\n dt = dt.replace(tzinfo=None)\n return (end <= dt < end + self._dst_base_offset)\n\n def _isdst(self, dt):\n if not self.hasdst:\n return False\n elif dt is None:\n return None\n\n transitions = self.transitions(dt.year)\n\n if transitions is None:\n return False\n\n dt = dt.replace(tzinfo=None)\n\n isdst = self._naive_isdst(dt, transitions)\n\n # Handle ambiguous dates\n if not isdst and self.is_ambiguous(dt):\n return not self._fold(dt)\n else:\n return isdst\n\n def _naive_isdst(self, dt, transitions):\n dston, dstoff = transitions\n\n dt = dt.replace(tzinfo=None)\n\n if dston < dstoff:\n isdst = dston <= dt < dstoff\n else:\n isdst = not dstoff <= dt < dston\n\n return isdst\n\n @property\n def _dst_base_offset(self):\n return self._dst_offset - self._std_offset\n\n __hash__ = None\n\n def __ne__(self, other):\n return not (self == other)\n\n def __repr__(self):\n return "%s(...)" % self.__class__.__name__\n\n __reduce__ = object.__reduce__\n
|
.venv\Lib\site-packages\dateutil\tz\_common.py
|
_common.py
|
Python
| 12,977 | 0.95 | 0.21957 | 0.063492 |
vue-tools
| 117 |
2023-07-17T11:52:32.838694
|
MIT
| false |
42a7fbd41803e102d2a59cce11fc9b24
|
# -*- coding: utf-8 -*-\nfrom .tz import *\nfrom .tz import __doc__\n\n__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",\n "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz",\n "enfold", "datetime_ambiguous", "datetime_exists",\n "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"]\n\n\nclass DeprecatedTzFormatWarning(Warning):\n """Warning raised when time zones are parsed from deprecated formats."""\n
|
.venv\Lib\site-packages\dateutil\tz\__init__.py
|
__init__.py
|
Python
| 444 | 0.95 | 0.083333 | 0.111111 |
vue-tools
| 740 |
2023-12-28T06:32:49.026305
|
MIT
| false |
5d98f9f10ac126250ba6a405949475ad
|
\n\n
|
.venv\Lib\site-packages\dateutil\tz\__pycache__\tz.cpython-313.pyc
|
tz.cpython-313.pyc
|
Other
| 66,112 | 0.75 | 0.083716 | 0.006784 |
awesome-app
| 594 |
2024-06-23T03:57:47.861522
|
Apache-2.0
| false |
a68902830f077bdc32f533d6f7ac9215
|
\n\n
|
.venv\Lib\site-packages\dateutil\tz\__pycache__\win.cpython-313.pyc
|
win.cpython-313.pyc
|
Other
| 17,374 | 0.95 | 0.068966 | 0.049261 |
awesome-app
| 283 |
2024-04-20T08:48:19.069112
|
MIT
| false |
de695f8c41d7bc4305021522c66241a5
|
\n\n
|
.venv\Lib\site-packages\dateutil\tz\__pycache__\_common.cpython-313.pyc
|
_common.cpython-313.pyc
|
Other
| 13,775 | 0.95 | 0.134503 | 0.039216 |
python-kit
| 653 |
2025-02-22T12:58:59.533042
|
GPL-3.0
| false |
a4defacf9e7d25e4ceee6bec8e523fc2
|
\n\n
|
.venv\Lib\site-packages\dateutil\tz\__pycache__\_factories.cpython-313.pyc
|
_factories.cpython-313.pyc
|
Other
| 4,742 | 0.8 | 0 | 0 |
react-lib
| 627 |
2024-01-13T22:06:34.753444
|
Apache-2.0
| false |
ef507e43bb47337bedfdad3440d6a93b
|
\n\n
|
.venv\Lib\site-packages\dateutil\tz\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 766 | 0.7 | 0 | 0 |
python-kit
| 745 |
2025-02-13T20:29:58.324129
|
Apache-2.0
| false |
e34aa80a7e459f9016751a46988541f3
|
edateutil-zoneinfo.tar
|
.venv\Lib\site-packages\dateutil\zoneinfo\dateutil-zoneinfo.tar.gz
|
dateutil-zoneinfo.tar.gz
|
Other
| 156,400 | 0.6 | 0 | 0.017617 |
awesome-app
| 535 |
2024-10-20T13:06:40.543865
|
MIT
| false |
68979e353750d68d08c06aa27fb06192
|
import logging\nimport os\nimport tempfile\nimport shutil\nimport json\nfrom subprocess import check_call, check_output\nfrom tarfile import TarFile\n\nfrom dateutil.zoneinfo import METADATA_FN, ZONEFILENAME\n\n\ndef rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None):\n """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar*\n\n filename is the timezone tarball from ``ftp.iana.org/tz``.\n\n """\n tmpdir = tempfile.mkdtemp()\n zonedir = os.path.join(tmpdir, "zoneinfo")\n moduledir = os.path.dirname(__file__)\n try:\n with TarFile.open(filename) as tf:\n for name in zonegroups:\n tf.extract(name, tmpdir)\n filepaths = [os.path.join(tmpdir, n) for n in zonegroups]\n\n _run_zic(zonedir, filepaths)\n\n # write metadata file\n with open(os.path.join(zonedir, METADATA_FN), 'w') as f:\n json.dump(metadata, f, indent=4, sort_keys=True)\n target = os.path.join(moduledir, ZONEFILENAME)\n with TarFile.open(target, "w:%s" % format) as tf:\n for entry in os.listdir(zonedir):\n entrypath = os.path.join(zonedir, entry)\n tf.add(entrypath, entry)\n finally:\n shutil.rmtree(tmpdir)\n\n\ndef _run_zic(zonedir, filepaths):\n """Calls the ``zic`` compiler in a compatible way to get a "fat" binary.\n\n Recent versions of ``zic`` default to ``-b slim``, while older versions\n don't even have the ``-b`` option (but default to "fat" binaries). The\n current version of dateutil does not support Version 2+ TZif files, which\n causes problems when used in conjunction with "slim" binaries, so this\n function is used to ensure that we always get a "fat" binary.\n """\n\n try:\n help_text = check_output(["zic", "--help"])\n except OSError as e:\n _print_on_nosuchfile(e)\n raise\n\n if b"-b " in help_text:\n bloat_args = ["-b", "fat"]\n else:\n bloat_args = []\n\n check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths)\n\n\ndef _print_on_nosuchfile(e):\n """Print helpful troubleshooting message\n\n e is an exception raised by subprocess.check_call()\n\n """\n if e.errno == 2:\n logging.error(\n "Could not find zic. Perhaps you need to install "\n "libc-bin or some other package that provides it, "\n "or it's not in your PATH?")\n
|
.venv\Lib\site-packages\dateutil\zoneinfo\rebuild.py
|
rebuild.py
|
Python
| 2,392 | 0.95 | 0.16 | 0.017241 |
python-kit
| 428 |
2025-01-08T23:25:56.018209
|
BSD-3-Clause
| false |
b49595d423f637c0f06a586a462d498a
|
# -*- coding: utf-8 -*-\nimport warnings\nimport json\n\nfrom tarfile import TarFile\nfrom pkgutil import get_data\nfrom io import BytesIO\n\nfrom dateutil.tz import tzfile as _tzfile\n\n__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]\n\nZONEFILENAME = "dateutil-zoneinfo.tar.gz"\nMETADATA_FN = 'METADATA'\n\n\nclass tzfile(_tzfile):\n def __reduce__(self):\n return (gettz, (self._filename,))\n\n\ndef getzoneinfofile_stream():\n try:\n return BytesIO(get_data(__name__, ZONEFILENAME))\n except IOError as e: # TODO switch to FileNotFoundError?\n warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror))\n return None\n\n\nclass ZoneInfoFile(object):\n def __init__(self, zonefile_stream=None):\n if zonefile_stream is not None:\n with TarFile.open(fileobj=zonefile_stream) as tf:\n self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name)\n for zf in tf.getmembers()\n if zf.isfile() and zf.name != METADATA_FN}\n # deal with links: They'll point to their parent object. Less\n # waste of memory\n links = {zl.name: self.zones[zl.linkname]\n for zl in tf.getmembers() if\n zl.islnk() or zl.issym()}\n self.zones.update(links)\n try:\n metadata_json = tf.extractfile(tf.getmember(METADATA_FN))\n metadata_str = metadata_json.read().decode('UTF-8')\n self.metadata = json.loads(metadata_str)\n except KeyError:\n # no metadata in tar file\n self.metadata = None\n else:\n self.zones = {}\n self.metadata = None\n\n def get(self, name, default=None):\n """\n Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method\n for retrieving zones from the zone dictionary.\n\n :param name:\n The name of the zone to retrieve. (Generally IANA zone names)\n\n :param default:\n The value to return in the event of a missing key.\n\n .. versionadded:: 2.6.0\n\n """\n return self.zones.get(name, default)\n\n\n# The current API has gettz as a module function, although in fact it taps into\n# a stateful class. So as a workaround for now, without changing the API, we\n# will create a new "global" class instance the first time a user requests a\n# timezone. Ugly, but adheres to the api.\n#\n# TODO: Remove after deprecation period.\n_CLASS_ZONE_INSTANCE = []\n\n\ndef get_zonefile_instance(new_instance=False):\n """\n This is a convenience function which provides a :class:`ZoneInfoFile`\n instance using the data provided by the ``dateutil`` package. By default, it\n caches a single instance of the ZoneInfoFile object and returns that.\n\n :param new_instance:\n If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and\n used as the cached instance for the next call. Otherwise, new instances\n are created only as necessary.\n\n :return:\n Returns a :class:`ZoneInfoFile` object.\n\n .. versionadded:: 2.6\n """\n if new_instance:\n zif = None\n else:\n zif = getattr(get_zonefile_instance, '_cached_instance', None)\n\n if zif is None:\n zif = ZoneInfoFile(getzoneinfofile_stream())\n\n get_zonefile_instance._cached_instance = zif\n\n return zif\n\n\ndef gettz(name):\n """\n This retrieves a time zone from the local zoneinfo tarball that is packaged\n with dateutil.\n\n :param name:\n An IANA-style time zone name, as found in the zoneinfo file.\n\n :return:\n Returns a :class:`dateutil.tz.tzfile` time zone object.\n\n .. warning::\n It is generally inadvisable to use this function, and it is only\n provided for API compatibility with earlier versions. This is *not*\n equivalent to ``dateutil.tz.gettz()``, which selects an appropriate\n time zone based on the inputs, favoring system zoneinfo. This is ONLY\n for accessing the dateutil-specific zoneinfo (which may be out of\n date compared to the system zoneinfo).\n\n .. deprecated:: 2.6\n If you need to use a specific zoneinfofile over the system zoneinfo,\n instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call\n :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead.\n\n Use :func:`get_zonefile_instance` to retrieve an instance of the\n dateutil-provided zoneinfo.\n """\n warnings.warn("zoneinfo.gettz() will be removed in future versions, "\n "to use the dateutil-provided zoneinfo files, instantiate a "\n "ZoneInfoFile object and use ZoneInfoFile.zones.get() "\n "instead. See the documentation for details.",\n DeprecationWarning)\n\n if len(_CLASS_ZONE_INSTANCE) == 0:\n _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))\n return _CLASS_ZONE_INSTANCE[0].zones.get(name)\n\n\ndef gettz_db_metadata():\n """ Get the zonefile metadata\n\n See `zonefile_metadata`_\n\n :returns:\n A dictionary with the database metadata\n\n .. deprecated:: 2.6\n See deprecation warning in :func:`zoneinfo.gettz`. To get metadata,\n query the attribute ``zoneinfo.ZoneInfoFile.metadata``.\n """\n warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future "\n "versions, to use the dateutil-provided zoneinfo files, "\n "ZoneInfoFile object and query the 'metadata' attribute "\n "instead. See the documentation for details.",\n DeprecationWarning)\n\n if len(_CLASS_ZONE_INSTANCE) == 0:\n _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))\n return _CLASS_ZONE_INSTANCE[0].metadata\n
|
.venv\Lib\site-packages\dateutil\zoneinfo\__init__.py
|
__init__.py
|
Python
| 5,889 | 0.95 | 0.233533 | 0.078125 |
awesome-app
| 819 |
2025-04-08T04:28:07.265811
|
MIT
| false |
ed896f480f2ee5d27fd0bf45c98546e6
|
\n\n
|
.venv\Lib\site-packages\dateutil\zoneinfo\__pycache__\rebuild.cpython-313.pyc
|
rebuild.cpython-313.pyc
|
Other
| 3,925 | 0.95 | 0.028986 | 0.035714 |
awesome-app
| 36 |
2023-12-16T10:51:08.643922
|
GPL-3.0
| false |
767485a7a39000364c321a03d4f56446
|
\n\n
|
.venv\Lib\site-packages\dateutil\zoneinfo\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 7,424 | 0.95 | 0.142857 | 0 |
awesome-app
| 102 |
2025-02-03T14:41:53.772400
|
MIT
| false |
3b19a4df44148dc598cce2f85655a6fe
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\easter.cpython-313.pyc
|
easter.cpython-313.pyc
|
Other
| 2,711 | 0.8 | 0.012346 | 0.042857 |
react-lib
| 282 |
2023-12-29T12:18:22.871639
|
BSD-3-Clause
| false |
a6fb5663cec657fb24309de6fc8e9825
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\relativedelta.cpython-313.pyc
|
relativedelta.cpython-313.pyc
|
Other
| 28,888 | 0.95 | 0.019841 | 0.004739 |
vue-tools
| 875 |
2025-03-14T14:28:38.511033
|
MIT
| false |
399a5aba27b9ad5d0682a064f48e77fe
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\rrule.cpython-313.pyc
|
rrule.cpython-313.pyc
|
Other
| 69,647 | 0.75 | 0.039422 | 0.002841 |
react-lib
| 642 |
2025-02-22T05:25:09.079638
|
BSD-3-Clause
| false |
942cc5af31c9a1791ebbaa37abd6dce0
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\tzwin.cpython-313.pyc
|
tzwin.cpython-313.pyc
|
Other
| 208 | 0.7 | 0 | 0 |
node-utils
| 890 |
2023-07-25T14:42:54.784190
|
MIT
| false |
f6c12d2a859c96ec4dc0c45f19c2fbfa
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\utils.cpython-313.pyc
|
utils.cpython-313.pyc
|
Other
| 2,442 | 0.95 | 0.155172 | 0 |
node-utils
| 325 |
2023-08-21T16:14:26.628268
|
GPL-3.0
| false |
e98dae74f3d206872c9b31a45b474fd7
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\_common.cpython-313.pyc
|
_common.cpython-313.pyc
|
Other
| 1,989 | 0.8 | 0 | 0.037037 |
node-utils
| 924 |
2024-09-03T04:26:07.997515
|
Apache-2.0
| false |
7a25f04925fc4a5fbc3d64ec5aaf3389
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\_version.cpython-313.pyc
|
_version.cpython-313.pyc
|
Other
| 305 | 0.7 | 0 | 0 |
react-lib
| 58 |
2023-10-03T07:21:11.215739
|
BSD-3-Clause
| false |
6b1751435c8a73f6fb8ae713383cd700
|
\n\n
|
.venv\Lib\site-packages\dateutil\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 1,147 | 0.95 | 0 | 0 |
node-utils
| 390 |
2025-03-01T11:10:01.637419
|
Apache-2.0
| false |
a9ab2326981e3f82ff6b840ab6daa61b
|
from datetime import date\nfrom typing import Literal\n\nEASTER_JULIAN: Literal[1]\nEASTER_ORTHODOX: Literal[2]\nEASTER_WESTERN: Literal[3]\n\ndef easter(year: int, method: Literal[1, 2, 3] = 3) -> date: ...\n
|
.venv\Lib\site-packages\dateutil-stubs\easter.pyi
|
easter.pyi
|
Other
| 201 | 0.85 | 0.125 | 0 |
python-kit
| 295 |
2024-08-26T00:56:39.579591
|
MIT
| false |
aeb38fd30888edade333d7573698c06f
|
version = "2.9.*"\nupstream_repository = "https://github.com/dateutil/dateutil"\npartial_stub = true\n\n[tool.stubtest]\nignore_missing_stub = true\n
|
.venv\Lib\site-packages\dateutil-stubs\METADATA.toml
|
METADATA.toml
|
Other
| 143 | 0.8 | 0 | 0 |
node-utils
| 208 |
2024-03-28T18:49:28.474006
|
GPL-3.0
| false |
006bd34b03dc6949dba8bfdcf13750bd
|
partial\n
|
.venv\Lib\site-packages\dateutil-stubs\py.typed
|
py.typed
|
Other
| 8 | 0.5 | 0 | 0 |
python-kit
| 942 |
2025-04-30T06:09:22.841178
|
Apache-2.0
| false |
6f0cb8ce082a1d25dcfe12801403f58b
|
from datetime import date, timedelta\nfrom typing import SupportsFloat, TypeVar, overload\nfrom typing_extensions import Self, TypeAlias\n\n# See #9817 for why we reexport this here\nfrom ._common import weekday as weekday\n\n_DateT = TypeVar("_DateT", bound=date)\n# Work around attribute and type having the same name.\n_Weekday: TypeAlias = weekday\n\nMO: weekday\nTU: weekday\nWE: weekday\nTH: weekday\nFR: weekday\nSA: weekday\nSU: weekday\n\nclass relativedelta:\n years: int\n months: int\n days: int\n leapdays: int\n hours: int\n minutes: int\n seconds: int\n microseconds: int\n year: int | None\n month: int | None\n weekday: _Weekday | None\n day: int | None\n hour: int | None\n minute: int | None\n second: int | None\n microsecond: int | None\n def __init__(\n self,\n dt1: date | None = None,\n dt2: date | None = None,\n years: int = 0,\n months: int = 0,\n days: int = 0,\n leapdays: int = 0,\n weeks: int = 0,\n hours: int = 0,\n minutes: int = 0,\n seconds: int = 0,\n microseconds: int = 0,\n year: int | None = None,\n month: int | None = None,\n day: int | None = None,\n weekday: int | _Weekday | None = None,\n yearday: int | None = None,\n nlyearday: int | None = None,\n hour: int | None = None,\n minute: int | None = None,\n second: int | None = None,\n microsecond: int | None = None,\n ) -> None: ...\n @property\n def weeks(self) -> int: ...\n @weeks.setter\n def weeks(self, value: int) -> None: ...\n def normalized(self) -> Self: ...\n @overload\n def __add__(self, other: timedelta | relativedelta) -> Self: ...\n @overload\n def __add__(self, other: _DateT) -> _DateT: ...\n @overload\n def __radd__(self, other: timedelta | relativedelta) -> Self: ...\n @overload\n def __radd__(self, other: _DateT) -> _DateT: ...\n @overload\n def __rsub__(self, other: timedelta | relativedelta) -> Self: ...\n @overload\n def __rsub__(self, other: _DateT) -> _DateT: ...\n def __sub__(self, other: relativedelta) -> Self: ...\n def __neg__(self) -> Self: ...\n def __bool__(self) -> bool: ...\n def __nonzero__(self) -> bool: ...\n def __mul__(self, other: SupportsFloat) -> Self: ...\n def __rmul__(self, other: SupportsFloat) -> Self: ...\n def __eq__(self, other: object) -> bool: ...\n def __ne__(self, other: object) -> bool: ...\n def __div__(self, other: SupportsFloat) -> Self: ...\n def __truediv__(self, other: SupportsFloat) -> Self: ...\n def __abs__(self) -> Self: ...\n def __hash__(self) -> int: ...\n
|
.venv\Lib\site-packages\dateutil-stubs\relativedelta.pyi
|
relativedelta.pyi
|
Other
| 2,647 | 0.95 | 0.269663 | 0.023529 |
react-lib
| 934 |
2024-04-03T23:50:38.680358
|
MIT
| false |
266b4764e71c1e9caf66489b2dbc21a0
|
import datetime\nfrom _typeshed import Incomplete\nfrom collections.abc import Iterable, Iterator, Sequence\nfrom typing_extensions import TypeAlias\n\nfrom ._common import weekday as weekdaybase\n\nYEARLY: int\nMONTHLY: int\nWEEKLY: int\nDAILY: int\nHOURLY: int\nMINUTELY: int\nSECONDLY: int\n\nclass weekday(weekdaybase): ...\n\nweekdays: tuple[weekday, weekday, weekday, weekday, weekday, weekday, weekday]\nMO: weekday\nTU: weekday\nWE: weekday\nTH: weekday\nFR: weekday\nSA: weekday\nSU: weekday\n\nclass rrulebase:\n def __init__(self, cache: bool = False) -> None: ...\n def __iter__(self) -> Iterator[datetime.datetime]: ...\n def __getitem__(self, item): ...\n def __contains__(self, item): ...\n def count(self): ...\n def before(self, dt, inc: bool = False): ...\n def after(self, dt, inc: bool = False): ...\n def xafter(self, dt, count=None, inc: bool = False): ...\n def between(self, after, before, inc: bool = False, count: int = 1): ...\n\nclass rrule(rrulebase):\n def __init__(\n self,\n freq,\n dtstart: datetime.date | None = None,\n interval: int = 1,\n wkst: weekday | int | None = None,\n count: int | None = None,\n until: datetime.date | int | None = None,\n bysetpos: int | Iterable[int] | None = None,\n bymonth: int | Iterable[int] | None = None,\n bymonthday: int | Iterable[int] | None = None,\n byyearday: int | Iterable[int] | None = None,\n byeaster: int | Iterable[int] | None = None,\n byweekno: int | Iterable[int] | None = None,\n byweekday: int | weekday | Iterable[int] | Iterable[weekday] | None = None,\n byhour: int | Iterable[int] | None = None,\n byminute: int | Iterable[int] | None = None,\n bysecond: int | Iterable[int] | None = None,\n cache: bool = False,\n ) -> None: ...\n def replace(self, **kwargs): ...\n\n_RRule: TypeAlias = rrule\n\nclass _iterinfo:\n rrule: _RRule\n def __init__(self, rrule: _RRule) -> None: ...\n yearlen: int | None\n nextyearlen: int | None\n yearordinal: int | None\n yearweekday: int | None\n mmask: Sequence[int] | None\n mdaymask: Sequence[int] | None\n nmdaymask: Sequence[int] | None\n wdaymask: Sequence[int] | None\n mrange: Sequence[int] | None\n wnomask: Sequence[int] | None\n nwdaymask: Sequence[int] | None\n eastermask: Sequence[int] | None\n lastyear: int | None\n lastmonth: int | None\n def rebuild(self, year, month): ...\n def ydayset(self, year, month, day): ...\n def mdayset(self, year, month, day): ...\n def wdayset(self, year, month, day): ...\n def ddayset(self, year, month, day): ...\n def htimeset(self, hour, minute, second): ...\n def mtimeset(self, hour, minute, second): ...\n def stimeset(self, hour, minute, second): ...\n\nclass rruleset(rrulebase):\n class _genitem:\n dt: Incomplete\n genlist: list[Incomplete]\n gen: Incomplete\n def __init__(self, genlist, gen) -> None: ...\n def __next__(self) -> None: ...\n next = __next__\n def __lt__(self, other) -> bool: ...\n def __gt__(self, other) -> bool: ...\n def __eq__(self, other) -> bool: ...\n def __ne__(self, other) -> bool: ...\n\n def __init__(self, cache: bool = False) -> None: ...\n def rrule(self, rrule: _RRule): ...\n def rdate(self, rdate): ...\n def exrule(self, exrule): ...\n def exdate(self, exdate): ...\n\nclass _rrulestr:\n def __call__(self, s, **kwargs) -> rrule | rruleset: ...\n\nrrulestr: _rrulestr\n
|
.venv\Lib\site-packages\dateutil-stubs\rrule.pyi
|
rrule.pyi
|
Other
| 3,509 | 0.85 | 0.351351 | 0 |
vue-tools
| 842 |
2024-03-03T14:12:20.224180
|
GPL-3.0
| false |
59c9ab39cf5c87ea6acd05b6085937c9
|
from datetime import datetime, timedelta, tzinfo\n\ndef default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ...\ndef today(tzinfo: tzinfo | None = None) -> datetime: ...\ndef within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ...\n
|
.venv\Lib\site-packages\dateutil-stubs\utils.pyi
|
utils.pyi
|
Other
| 251 | 0.85 | 0.6 | 0 |
react-lib
| 785 |
2024-05-29T05:51:01.108060
|
BSD-3-Clause
| false |
f38383633ba1508f0c73fbb32207f4a6
|
from typing_extensions import Self\n\nclass weekday:\n def __init__(self, weekday: int, n: int | None = None) -> None: ...\n def __call__(self, n: int) -> Self: ...\n def __eq__(self, other: object) -> bool: ...\n def __hash__(self) -> int: ...\n weekday: int\n n: int\n
|
.venv\Lib\site-packages\dateutil-stubs\_common.pyi
|
_common.pyi
|
Other
| 279 | 0.85 | 0.555556 | 0 |
awesome-app
| 50 |
2024-03-02T22:22:49.627296
|
Apache-2.0
| false |
c9cc3164bee520e6c491ed21c5d99f51
|
from _typeshed import SupportsRead\nfrom datetime import date, datetime, time, tzinfo\nfrom typing_extensions import TypeAlias\n\n_Readable: TypeAlias = SupportsRead[str | bytes]\n_TakesAscii: TypeAlias = str | bytes | _Readable\n\nclass isoparser:\n def __init__(self, sep: str | bytes | None = None): ...\n def isoparse(self, dt_str: _TakesAscii) -> datetime: ...\n def parse_isodate(self, datestr: _TakesAscii) -> date: ...\n def parse_isotime(self, timestr: _TakesAscii) -> time: ...\n def parse_tzstr(self, tzstr: _TakesAscii, zero_as_utc: bool = True) -> tzinfo: ...\n\ndef isoparse(dt_str: _TakesAscii) -> datetime: ...\n
|
.venv\Lib\site-packages\dateutil-stubs\parser\isoparser.pyi
|
isoparser.pyi
|
Other
| 628 | 0.85 | 0.466667 | 0 |
python-kit
| 694 |
2024-09-30T05:04:39.109973
|
Apache-2.0
| false |
34d91be50213f9d9cbb25e3100f5d949
|
from collections.abc import Callable, Mapping\nfrom datetime import datetime, tzinfo\nfrom typing import IO, Any\nfrom typing_extensions import TypeAlias\n\nfrom .isoparser import isoparse as isoparse, isoparser as isoparser\n\n_FileOrStr: TypeAlias = bytes | str | IO[str] | IO[Any]\n_TzData: TypeAlias = tzinfo | int | str | None\n_TzInfo: TypeAlias = Mapping[str, _TzData] | Callable[[str, int], _TzData]\n\nclass parserinfo:\n JUMP: list[str]\n WEEKDAYS: list[tuple[str, ...]]\n MONTHS: list[tuple[str, ...]]\n HMS: list[tuple[str, str, str]]\n AMPM: list[tuple[str, str]]\n UTCZONE: list[str]\n PERTAIN: list[str]\n TZOFFSET: dict[str, int]\n def __init__(self, dayfirst: bool = False, yearfirst: bool = False) -> None: ...\n def jump(self, name: str) -> bool: ...\n def weekday(self, name: str) -> int | None: ...\n def month(self, name: str) -> int | None: ...\n def hms(self, name: str) -> int | None: ...\n def ampm(self, name: str) -> int | None: ...\n def pertain(self, name: str) -> bool: ...\n def utczone(self, name: str) -> bool: ...\n def tzoffset(self, name: str) -> int | None: ...\n def convertyear(self, year: int) -> int: ...\n def validate(self, res: datetime) -> bool: ...\n\nclass parser:\n def __init__(self, info: parserinfo | None = None) -> None: ...\n def parse(\n self,\n timestr: _FileOrStr,\n default: datetime | None = None,\n ignoretz: bool = False,\n tzinfos: _TzInfo | None = None,\n *,\n dayfirst: bool | None = ...,\n yearfirst: bool | None = ...,\n fuzzy: bool = ...,\n fuzzy_with_tokens: bool = ...,\n ) -> datetime: ...\n\nDEFAULTPARSER: parser\n\ndef parse(\n timestr: _FileOrStr,\n parserinfo: parserinfo | None = None,\n *,\n dayfirst: bool | None = ...,\n yearfirst: bool | None = ...,\n ignoretz: bool = ...,\n fuzzy: bool = ...,\n fuzzy_with_tokens: bool = ...,\n default: datetime | None = ...,\n tzinfos: _TzInfo | None = ...,\n) -> datetime: ...\n\nclass _tzparser: ...\n\nDEFAULTTZPARSER: _tzparser\n\nclass ParserError(ValueError): ...\nclass UnknownTimezoneWarning(RuntimeWarning): ...\n
|
.venv\Lib\site-packages\dateutil-stubs\parser\__init__.pyi
|
__init__.pyi
|
Other
| 2,138 | 0.85 | 0.279412 | 0.033898 |
react-lib
| 415 |
2025-04-16T18:10:01.749678
|
Apache-2.0
| false |
d53ec8f0e6bf4e081d932b57eabe8a8b
|
import datetime\nfrom typing import ClassVar, Literal, Protocol, TypeVar\n\nfrom ..relativedelta import relativedelta\nfrom ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase\n\n_DT = TypeVar("_DT", bound=datetime.datetime)\n\nZERO: datetime.timedelta\nEPOCH: datetime.datetime\nEPOCHORDINAL: int\n\nclass tzutc(datetime.tzinfo):\n def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def tzname(self, dt: datetime.datetime | None) -> str: ...\n def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...\n def fromutc(self, dt: _DT) -> _DT: ...\n def __eq__(self, other): ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __ne__(self, other): ...\n __reduce__ = object.__reduce__\n\nclass tzoffset(datetime.tzinfo):\n def __init__(self, name, offset) -> None: ...\n def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...\n def tzname(self, dt: datetime.datetime | None) -> str: ...\n def fromutc(self, dt: _DT) -> _DT: ...\n def __eq__(self, other): ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __ne__(self, other): ...\n __reduce__ = object.__reduce__\n @classmethod\n def instance(cls, name, offset) -> tzoffset: ...\n\nclass tzlocal(_tzinfo):\n def __init__(self) -> None: ...\n def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def tzname(self, dt: datetime.datetime | None) -> str: ...\n def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...\n def __eq__(self, other): ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __ne__(self, other): ...\n __reduce__ = object.__reduce__\n\nclass _ttinfo:\n def __init__(self) -> None: ...\n def __eq__(self, other): ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __ne__(self, other): ...\n\nclass _TZFileReader(Protocol):\n # optional attribute:\n # name: str\n def read(self, size: int, /) -> bytes: ...\n def seek(self, target: int, whence: Literal[1], /) -> object: ...\n\nclass tzfile(_tzinfo):\n def __init__(self, fileobj: str | _TZFileReader, filename: str | None = None) -> None: ...\n def is_ambiguous(self, dt: datetime.datetime | None, idx: int | None = None) -> bool: ...\n def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...\n def tzname(self, dt: datetime.datetime | None) -> str: ...\n def __eq__(self, other): ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __ne__(self, other): ...\n def __reduce__(self): ...\n def __reduce_ex__(self, protocol): ...\n\nclass tzrange(tzrangebase):\n hasdst: bool\n def __init__(\n self,\n stdabbr: str,\n stdoffset: int | datetime.timedelta | None = None,\n dstabbr: str | None = None,\n dstoffset: int | datetime.timedelta | None = None,\n start: relativedelta | None = None,\n end: relativedelta | None = None,\n ) -> None: ...\n def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ...\n def __eq__(self, other): ...\n\nclass tzstr(tzrange):\n hasdst: bool\n def __init__(self, s: str, posix_offset: bool = False) -> None: ...\n @classmethod\n def instance(cls, name, offset) -> tzoffset: ...\n\nclass _ICalReader(Protocol):\n # optional attribute:\n # name: str\n def read(self) -> str: ...\n\nclass tzical:\n def __init__(self, fileobj: str | _ICalReader) -> None: ...\n def keys(self): ...\n def get(self, tzid=None): ...\n\nTZFILES: list[str]\nTZPATHS: list[str]\n\ndef datetime_exists(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...\ndef datetime_ambiguous(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...\ndef resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ...\n\nclass _GetTZ:\n def __call__(self, name: str | None = ...) -> datetime.tzinfo | None: ...\n def nocache(self, name: str | None) -> datetime.tzinfo | None: ...\n\ngettz: _GetTZ\n
|
.venv\Lib\site-packages\dateutil-stubs\tz\tz.pyi
|
tz.pyi
|
Other
| 4,458 | 0.95 | 0.54386 | 0.041237 |
node-utils
| 101 |
2023-08-01T11:22:35.983351
|
BSD-3-Clause
| false |
61d11a52d8f46d17902e447f1c766502
|
import abc\nfrom datetime import datetime, timedelta, tzinfo\nfrom typing import ClassVar\n\ndef tzname_in_python2(namefunc): ...\ndef enfold(dt: datetime, fold: int = 1): ...\n\nclass _DatetimeWithFold(datetime):\n @property\n def fold(self): ...\n\n# Doesn't actually have ABCMeta as the metaclass at runtime,\n# but mypy complains if we don't have it in the stub.\n# See discussion in #8908\nclass _tzinfo(tzinfo, metaclass=abc.ABCMeta):\n def is_ambiguous(self, dt: datetime) -> bool: ...\n def fromutc(self, dt: datetime) -> datetime: ...\n\nclass tzrangebase(_tzinfo):\n def __init__(self) -> None: ...\n def utcoffset(self, dt: datetime | None) -> timedelta | None: ...\n def dst(self, dt: datetime | None) -> timedelta | None: ...\n def tzname(self, dt: datetime | None) -> str: ...\n def fromutc(self, dt: datetime) -> datetime: ...\n def is_ambiguous(self, dt: datetime) -> bool: ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __ne__(self, other): ...\n __reduce__ = object.__reduce__\n
|
.venv\Lib\site-packages\dateutil-stubs\tz\_common.pyi
|
_common.pyi
|
Other
| 1,025 | 0.95 | 0.571429 | 0.125 |
awesome-app
| 135 |
2023-08-19T05:53:49.248541
|
BSD-3-Clause
| false |
a02eae0b3ef21fec5259eb40936273b7
|
from .tz import (\n datetime_ambiguous as datetime_ambiguous,\n datetime_exists as datetime_exists,\n gettz as gettz,\n resolve_imaginary as resolve_imaginary,\n tzfile as tzfile,\n tzical as tzical,\n tzlocal as tzlocal,\n tzoffset as tzoffset,\n tzrange as tzrange,\n tzstr as tzstr,\n tzutc as tzutc,\n)\n\nUTC: tzutc\n
|
.venv\Lib\site-packages\dateutil-stubs\tz\__init__.pyi
|
__init__.pyi
|
Other
| 340 | 0.85 | 0 | 0 |
react-lib
| 499 |
2024-06-11T13:25:14.380975
|
MIT
| false |
32f747b7e6ae75439be5c5e4d3aad39c
|
from _typeshed import StrOrBytesPath\nfrom collections.abc import Sequence\nfrom tarfile import TarInfo\n\ndef rebuild(\n filename: StrOrBytesPath, tag=None, format: str = "gz", zonegroups: Sequence[str | TarInfo] = [], metadata=None\n) -> None: ...\n
|
.venv\Lib\site-packages\dateutil-stubs\zoneinfo\rebuild.pyi
|
rebuild.pyi
|
Other
| 247 | 0.85 | 0.142857 | 0 |
node-utils
| 936 |
2023-11-02T17:28:36.860949
|
Apache-2.0
| false |
0e72729a7a325c8a57c01cd9297bf7f5
|
from _typeshed import Incomplete\nfrom typing import IO\nfrom typing_extensions import TypeAlias\n\n__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]\n\n_MetadataType: TypeAlias = dict[str, Incomplete]\n\nclass ZoneInfoFile:\n zones: dict[Incomplete, Incomplete]\n metadata: _MetadataType | None\n def __init__(self, zonefile_stream: IO[bytes] | None = None) -> None: ...\n def get(self, name, default=None): ...\n\ndef get_zonefile_instance(new_instance: bool = False) -> ZoneInfoFile: ...\ndef gettz(name): ...\ndef gettz_db_metadata() -> _MetadataType: ...\n
|
.venv\Lib\site-packages\dateutil-stubs\zoneinfo\__init__.pyi
|
__init__.pyi
|
Other
| 572 | 0.85 | 0.352941 | 0 |
vue-tools
| 305 |
2023-07-27T12:42:42.226578
|
Apache-2.0
| false |
df78267ca60b24dbdac2b4421901c601
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import annotations\n\nimport functools\nimport typing\n\nfrom debugpy import _version\n\n\n# Expose debugpy.server API from subpackage, but do not actually import it unless\n# and until a member is invoked - we don't want the server package loaded in the\n# adapter, the tests, or setup.py.\n\n# Docstrings for public API members must be formatted according to PEP 8 - no more\n# than 72 characters per line! - and must be readable when retrieved via help().\n\n\nEndpoint = typing.Tuple[str, int]\n\n\ndef _api(cancelable=False):\n def apply(f):\n @functools.wraps(f)\n def wrapper(*args, **kwargs):\n from debugpy.server import api\n\n wrapped = getattr(api, f.__name__)\n return wrapped(*args, **kwargs)\n\n if cancelable:\n\n def cancel(*args, **kwargs):\n from debugpy.server import api\n\n wrapped = getattr(api, f.__name__)\n return wrapped.cancel(*args, **kwargs)\n\n wrapper.cancel = cancel # pyright: ignore\n\n return wrapper\n\n return apply\n\n\n@_api()\ndef log_to(__path: str | typing.TextIO) -> None:\n """Generate detailed debugpy logs in the specified directory.\n\n The directory must already exist. Several log files are generated,\n one for every process involved in the debug session.\n """\n\n\n@_api()\ndef configure(__properties: dict[str, typing.Any] | None = None, **kwargs) -> None:\n """Sets debug configuration properties that cannot be set in the\n "attach" request, because they must be applied as early as possible\n in the process being debugged.\n\n For example, a "launch" configuration with subprocess debugging\n disabled can be defined entirely in JSON::\n\n {\n "request": "launch",\n "subProcess": false,\n ...\n }\n\n But the same cannot be done with "attach", because "subProcess"\n must be known at the point debugpy starts tracing execution. Thus,\n it is not available in JSON, and must be omitted::\n\n {\n "request": "attach",\n ...\n }\n\n and set from within the debugged process instead::\n\n debugpy.configure(subProcess=False)\n debugpy.listen(...)\n\n Properties to set can be passed either as a single dict argument,\n or as separate keyword arguments::\n\n debugpy.configure({"subProcess": False})\n """\n\n\n@_api()\ndef listen(\n __endpoint: Endpoint | int, *, in_process_debug_adapter: bool = False\n) -> Endpoint:\n """Starts a debug adapter debugging this process, that listens for\n incoming socket connections from clients on the specified address.\n\n `__endpoint` must be either a (host, port) tuple as defined by the\n standard `socket` module for the `AF_INET` address family, or a port\n number. If only the port is specified, host is "127.0.0.1".\n\n `in_process_debug_adapter`: by default a separate python process is\n spawned and used to communicate with the client as the debug adapter.\n By setting the value of `in_process_debug_adapter` to True a new \n python process is not spawned. Note: the con of setting \n `in_process_debug_adapter` to True is that subprocesses won't be \n automatically debugged.\n \n Returns the interface and the port on which the debug adapter is\n actually listening, in the same format as `__endpoint`. This may be\n different from address if port was 0 in the latter, in which case\n the adapter will pick some unused ephemeral port to listen on.\n\n This function does't wait for a client to connect to the debug\n adapter that it starts. Use `wait_for_client` to block execution\n until the client connects.\n """\n ...\n\n@_api()\ndef connect(__endpoint: Endpoint | int, *, access_token: str | None = None) -> Endpoint:\n """Tells an existing debug adapter instance that is listening on the\n specified address to debug this process.\n\n `__endpoint` must be either a (host, port) tuple as defined by the\n standard `socket` module for the `AF_INET` address family, or a port\n number. If only the port is specified, host is "127.0.0.1".\n\n `access_token` must be the same value that was passed to the adapter\n via the `--server-access-token` command-line switch.\n\n This function does't wait for a client to connect to the debug\n adapter that it connects to. Use `wait_for_client` to block\n execution until the client connects.\n """\n ...\n\n@_api(cancelable=True)\ndef wait_for_client() -> None:\n """If there is a client connected to the debug adapter that is\n debugging this process, returns immediately. Otherwise, blocks\n until a client connects to the adapter.\n\n While this function is waiting, it can be canceled by calling\n `wait_for_client.cancel()` from another thread.\n """\n\n\n@_api()\ndef is_client_connected() -> bool:\n """True if a client is connected to the debug adapter that is\n debugging this process.\n """\n ...\n\n\n@_api()\ndef breakpoint() -> None:\n """If a client is connected to the debug adapter that is debugging\n this process, pauses execution of all threads, and simulates a\n breakpoint being hit at the line following the call.\n\n It is also registered as the default handler for builtins.breakpoint().\n """\n\n\n@_api()\ndef debug_this_thread() -> None:\n """Makes the debugger aware of the current thread.\n\n Must be called on any background thread that is started by means\n other than the usual Python APIs (i.e. the "threading" module),\n in order for breakpoints to work on that thread.\n """\n\n\n@_api()\ndef trace_this_thread(__should_trace: bool):\n """Tells the debug adapter to enable or disable tracing on the\n current thread.\n\n When the thread is traced, the debug adapter can detect breakpoints\n being hit, but execution is slower, especially in functions that\n have any breakpoints set in them. Disabling tracing when breakpoints\n are not anticipated to be hit can improve performance. It can also\n be used to skip breakpoints on a particular thread.\n\n Tracing is automatically disabled for all threads when there is no\n client connected to the debug adapter.\n """\n\n\n__version__: str = _version.get_versions()["version"]\n
|
.venv\Lib\site-packages\debugpy\public_api.py
|
public_api.py
|
Python
| 6,571 | 0.95 | 0.158974 | 0.057554 |
node-utils
| 599 |
2023-10-30T21:04:17.744672
|
BSD-3-Clause
| false |
0203ce2ee48e826e89b294ebf2e79846
|
\nTHIRD-PARTY SOFTWARE NOTICES AND INFORMATION\nDo Not Translate or Localize\n\ndebugpy incorporates third party material from the projects listed below.\n\n\n1. PyDev.Debugger (https://github.com/fabioz/PyDev.Debugger)\n Includes:File copyright Brainwy Software Ltda.\n Includes:File(s) related to Python, Cpython\n Includes:File authored by Yuli Fitterman\n Includes:File copyright Brainwy software Ltda\n Includes:File with methods from Spyder\n Includes:File(s) related to IPython\n Includes:Files copyright Microsoft Corporation\n Includes:six\n Includes:WinAppDbg\n Includes:XML-RPC client interface for Python\n\n\n%% PyDev.Debugger NOTICES, INFORMATION, AND LICENSE BEGIN HERE\n=========================================\nThe source code for the PyDev.Debugger files are provided with debugpy, or you may send a check or money order for US $5.00, including the product name (debugpy), the open source component name (PyDev.Debugger) and version number, to: Source Code Compliance Team, Microsoft Corporation, One Microsoft Way, Redmond, WA 98052, USA.\n\nEclipse Public License, Version 1.0 (EPL-1.0)\nTHE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.\n1. DEFINITIONS\n"Contribution" means:\na) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and\nb) in the case of each subsequent Contributor:\ni) changes to the Program, and\nii) additions to the Program;\nwhere such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.\n"Contributor" means any person or entity that distributes the Program.\n"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.\n"Program" means the Contributions distributed in accordance with this Agreement.\n"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.\n2. GRANT OF RIGHTS\na) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.\nb) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.\nc) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.\nd) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.\n3. REQUIREMENTS\nA Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:\na) it complies with the terms and conditions of this Agreement; and\nb) its license agreement:\ni) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;\nii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;\niii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and\niv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.\nWhen the Program is made available in source code form:\na) it must be made available under this Agreement; and\nb) a copy of this Agreement must be included with each copy of the Program.\nContributors may not remove or alter any copyright notices contained within the Program.\nEach Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.\n4. COMMERCIAL DISTRIBUTION\nCommercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.\nFor example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.\n5. NO WARRANTY\nEXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.\n6. DISCLAIMER OF LIABILITY\nEXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.\n7. GENERAL\nIf any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.\nIf Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.\nAll Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.\nEveryone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.\nThis Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.\n=========================================\nIncludes File copyright Brainwy Software Ltda.\n\nFile includes the following notice:\n\nCopyright: Brainwy Software Ltda.\n\nLicense: EPL.\n\n=========================================\nIncludes file(s) from Python, Python xreload, Cpython and an ActiveState.com Recipe on "NULL OBJECT DESIGN PATTERN (PYTHON RECIPE)"\n\nPYTHON SOFTWARE FOUNDATION LICENSE VERSION 2\n--------------------------------------------\n\n1. This LICENSE AGREEMENT is between the Python Software Foundation\n("PSF"), and the Individual or Organization ("Licensee") accessing and\notherwise using this software ("Python") in source or binary form and\nits associated documentation.\n\n2. Subject to the terms and conditions of this License Agreement, PSF hereby\ngrants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,\nanalyze, test, perform and/or display publicly, prepare derivative works,\ndistribute, and otherwise use Python alone or in any derivative version,\nprovided, however, that PSF's License Agreement and PSF's notice of copyright,\ni.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,\n2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights\nReserved" are retained in Python alone or in any derivative version prepared by\nLicensee.\n\n3. In the event Licensee prepares a derivative work that is based on\nor incorporates Python or any part thereof, and wants to make\nthe derivative work available to others as provided herein, then\nLicensee hereby agrees to include in any such work a brief summary of\nthe changes made to Python.\n\n4. PSF is making Python available to Licensee on an "AS IS"\nbasis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR\nIMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND\nDISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS\nFOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT\nINFRINGE ANY THIRD PARTY RIGHTS.\n\n5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON\nFOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS\nA RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,\nOR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.\n\n6. This License Agreement will automatically terminate upon a material\nbreach of its terms and conditions.\n\n7. Nothing in this License Agreement shall be deemed to create any\nrelationship of agency, partnership, or joint venture between PSF and\nLicensee. This License Agreement does not grant permission to use PSF\ntrademarks or trade name in a trademark sense to endorse or promote\nproducts or services of Licensee, or any third party.\n\n8. By copying, installing or otherwise using Python, Licensee\nagrees to be bound by the terms and conditions of this License\nAgreement.\n\n\nBEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0\n-------------------------------------------\n\nBEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1\n\n1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an\noffice at 160 Saratoga Avenue, Santa Clara, CA 95051, and the\nIndividual or Organization ("Licensee") accessing and otherwise using\nthis software in source or binary form and its associated\ndocumentation ("the Software").\n\n2. Subject to the terms and conditions of this BeOpen Python License\nAgreement, BeOpen hereby grants Licensee a non-exclusive,\nroyalty-free, world-wide license to reproduce, analyze, test, perform\nand/or display publicly, prepare derivative works, distribute, and\notherwise use the Software alone or in any derivative version,\nprovided, however, that the BeOpen Python License is retained in the\nSoftware, alone or in any derivative version prepared by Licensee.\n\n3. BeOpen is making the Software available to Licensee on an "AS IS"\nbasis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR\nIMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND\nDISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS\nFOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT\nINFRINGE ANY THIRD PARTY RIGHTS.\n\n4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE\nSOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS\nAS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY\nDERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.\n\n5. This License Agreement will automatically terminate upon a material\nbreach of its terms and conditions.\n\n6. This License Agreement shall be governed by and interpreted in all\nrespects by the law of the State of California, excluding conflict of\nlaw provisions. Nothing in this License Agreement shall be deemed to\ncreate any relationship of agency, partnership, or joint venture\nbetween BeOpen and Licensee. This License Agreement does not grant\npermission to use BeOpen trademarks or trade names in a trademark\nsense to endorse or promote products or services of Licensee, or any\nthird party. As an exception, the "BeOpen Python" logos available at\nhttp://www.pythonlabs.com/logos.html may be used according to the\npermissions granted on that web page.\n\n7. By copying, installing or otherwise using the software, Licensee\nagrees to be bound by the terms and conditions of this License\nAgreement.\n\n\nCNRI LICENSE AGREEMENT FOR PYTHON 1.6.1\n---------------------------------------\n\n1. This LICENSE AGREEMENT is between the Corporation for National\nResearch Initiatives, having an office at 1895 Preston White Drive,\nReston, VA 20191 ("CNRI"), and the Individual or Organization\n("Licensee") accessing and otherwise using Python 1.6.1 software in\nsource or binary form and its associated documentation.\n\n2. Subject to the terms and conditions of this License Agreement, CNRI\nhereby grants Licensee a nonexclusive, royalty-free, world-wide\nlicense to reproduce, analyze, test, perform and/or display publicly,\nprepare derivative works, distribute, and otherwise use Python 1.6.1\nalone or in any derivative version, provided, however, that CNRI's\nLicense Agreement and CNRI's notice of copyright, i.e., "Copyright (c)\n1995-2001 Corporation for National Research Initiatives; All Rights\nReserved" are retained in Python 1.6.1 alone or in any derivative\nversion prepared by Licensee. Alternately, in lieu of CNRI's License\nAgreement, Licensee may substitute the following text (omitting the\nquotes): "Python 1.6.1 is made available subject to the terms and\nconditions in CNRI's License Agreement. This Agreement together with\nPython 1.6.1 may be located on the Internet using the following\nunique, persistent identifier (known as a handle): 1895.22/1013. This\nAgreement may also be obtained from a proxy server on the Internet\nusing the following URL: http://hdl.handle.net/1895.22/1013".\n\n3. In the event Licensee prepares a derivative work that is based on\nor incorporates Python 1.6.1 or any part thereof, and wants to make\nthe derivative work available to others as provided herein, then\nLicensee hereby agrees to include in any such work a brief summary of\nthe changes made to Python 1.6.1.\n\n4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"\nbasis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR\nIMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND\nDISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS\nFOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT\nINFRINGE ANY THIRD PARTY RIGHTS.\n\n5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON\n1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS\nA RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,\nOR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.\n\n6. This License Agreement will automatically terminate upon a material\nbreach of its terms and conditions.\n\n7. This License Agreement shall be governed by the federal\nintellectual property law of the United States, including without\nlimitation the federal copyright law, and, to the extent such\nU.S. federal law does not apply, by the law of the Commonwealth of\nVirginia, excluding Virginia's conflict of law provisions.\nNotwithstanding the foregoing, with regard to derivative works based\non Python 1.6.1 that incorporate non-separable material that was\npreviously distributed under the GNU General Public License (GPL), the\nlaw of the Commonwealth of Virginia shall govern this License\nAgreement only as to issues arising under or with respect to\nParagraphs 4, 5, and 7 of this License Agreement. Nothing in this\nLicense Agreement shall be deemed to create any relationship of\nagency, partnership, or joint venture between CNRI and Licensee. This\nLicense Agreement does not grant permission to use CNRI trademarks or\ntrade name in a trademark sense to endorse or promote products or\nservices of Licensee, or any third party.\n\n8. By clicking on the "ACCEPT" button where indicated, or by copying,\ninstalling or otherwise using Python 1.6.1, Licensee agrees to be\nbound by the terms and conditions of this License Agreement.\n\n ACCEPT\n\n\nCWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2\n--------------------------------------------------\n\nCopyright (C) 2006-2010 Python Software Foundation\n\nPermission to use, copy, modify, and distribute this software and its\ndocumentation for any purpose and without fee is hereby granted,\nprovided that the above copyright notice appear in all copies and that\nboth that copyright notice and this permission notice appear in\nsupporting documentation, and that the name of Stichting Mathematisch\nCentrum or CWI not be used in advertising or publicity pertaining to\ndistribution of the software without specific, written prior\npermission.\n\nSTICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO\nTHIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND\nFITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE\nFOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\nWHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\nACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT\nOF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n=========================================\nIncludes File authored by Yuli Fitterman\n\nCopyright (c) Yuli Fitterman\n\nLicensed under the Apache License, Version 2.0 (the "License");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an "AS IS" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n=========================================\nIncludes file(s): * Copyright (c) Brainwy software Ltda.\n *\n * This source code is subject to terms and conditions of the Apache License, Version 2.0. A\n * copy of the license can be found in the License.html file at the root of this distribution. If\n * you cannot locate the Apache License, Version 2.0, please send an email to\n * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound\n * by the terms of the Apache License, Version 2.0.\n *\n * You must not remove this notice, or any other, from this software.\n=========================================\nIncludes file(s): Copyright (c) 2009-2012 Pierre Raybaut\n\nPermission is hereby granted, free of charge, to any person\nobtaining a copy of this software and associated documentation\nfiles (the "Software"), to deal in the Software without\nrestriction, including without limitation the rights to use,\ncopy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following\nconditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\nOF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\nHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\nWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\nOTHER DEALINGS IN THE SOFTWARE.\n=========================================\nIncludes file(s) from Ipython\n\nCopyright (c) 2008-2010, IPython Development Team\nCopyright (c) 2001-2007, Fernando Perez. <fernando.perez@colorado.edu>\nCopyright (c) 2001, Janko Hauser <jhauser@zscout.de>\nCopyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>\n\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\nRedistributions of source code must retain the above copyright notice, this\nlist of conditions and the following disclaimer.\n\nRedistributions in binary form must reproduce the above copyright notice, this\nlist of conditions and the following disclaimer in the documentation and/or\nother materials provided with the distribution.\n\nNeither the name of the IPython Development Team nor the names of its\ncontributors may be used to endorse or promote products derived from this\nsoftware without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND\nANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\nWARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n=========================================\nIncludes file(s): * Copyright (c) Microsoft Corporation.\n*\n* This source code is subject to terms and conditions of the Apache License, Version 2.0. A\n* copy of the license can be found in the License.html file at the root of this distribution. If\n* you cannot locate the Apache License, Version 2.0, please send an email to\n* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound\n* by the terms of the Apache License, Version 2.0.\n*\n* You must not remove this notice, or any other, from this software.\n=========================================\nIncludes file(s) from https://pythonhosted.org/six/\n\nCopyright (c) 2010-2018 Benjamin Peterson\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the "Software"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n=========================================\nIncludes WinAppDbg\n\n# Copyright (c) 2009-2014, Mario Vilas\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice,\n# this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above copyright\n# notice,this list of conditions and the following disclaimer in the\n# documentation and/or other materials provided with the distribution.\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\n# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n=========================================\nIncludes XML-RPC client interface for Python\n# Copyright (c) 1999-2002 by Secret Labs AB\n# Copyright (c) 1999-2002 by Fredrik Lundh\n#\n# By obtaining, using, and/or copying this software and/or its\n# associated documentation, you agree that you have read, understood,\n# and will comply with the following terms and conditions:\n#\n# Permission to use, copy, modify, and distribute this software and\n# its associated documentation for any purpose and without fee is\n# hereby granted, provided that the above copyright notice appears in\n# all copies, and that both that copyright notice and this permission\n# notice appear in supporting documentation, and that the name of\n# Secret Labs AB or the author not be used in advertising or publicity\n# pertaining to distribution of the software without specific, written\n# prior permission.\n#\n# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD\n# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-\n# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR\n# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY\n# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,\n# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS\n# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE\n# OF THIS SOFTWARE.\n=========================================\nIncludes https://github.com/vstinner/bytecode e3e77fb690ed05ac171e15694e1c5d0e0dc34e86 - MIT\n\nCopyright (c) 2016 Red Hat.\n\nThe MIT License (MIT)\nCopyright (c) 2016 Red Hat.\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the\n"Software"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be included\nin all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n=========================================\nIncludes https://github.com/benhoyt/scandir 6ed381881bc2fb9de05804e892eeeeb3601a3af2 - BSD 3-Clause "New" or "Revised" License\n\nCopyright (c) 2012, Ben Hoyt\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this\nlist of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice,\nthis list of conditions and the following disclaimer in the documentation\nand/or other materials provided with the distribution.\n\n* Neither the name of Ben Hoyt nor the names of its contributors may be used\nto endorse or promote products derived from this software without specific\nprior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n=========================================\nEND OF PyDev.Debugger NOTICES, INFORMATION, AND LICENSE\n
|
.venv\Lib\site-packages\debugpy\ThirdPartyNotices.txt
|
ThirdPartyNotices.txt
|
Other
| 34,844 | 0.95 | 0.054108 | 0.163121 |
python-kit
| 964 |
2024-07-06T14:25:24.083364
|
MIT
| false |
fd7dd02842ebc7298c60575ffd4a47d1
|
\n# This file was generated by 'versioneer.py' (0.23) from\n# revision-control system data, or from the parent directory name of an\n# unpacked source archive. Distribution tarballs contain a pre-generated copy\n# of this file.\n\nimport json\n\nversion_json = '''\n{\n "date": "2025-04-10T11:38:49-0700",\n "dirty": false,\n "error": null,\n "full-revisionid": "8b5b84aec3d4ca6fcc5d17edff5627ba19f7d9e6",\n "version": "1.8.14"\n}\n''' # END VERSION_JSON\n\n\ndef get_versions():\n return json.loads(version_json)\n
|
.venv\Lib\site-packages\debugpy\_version.py
|
_version.py
|
Python
| 519 | 0.95 | 0.047619 | 0.25 |
python-kit
| 379 |
2024-08-01T01:20:33.479114
|
GPL-3.0
| false |
143f02d902fbb2fe6ef6c1d65d95cd77
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\n"""An implementation of the Debug Adapter Protocol (DAP) for Python.\n\nhttps://microsoft.github.io/debug-adapter-protocol/\n"""\n\n# debugpy stable public API consists solely of members of this module that are\n# enumerated below.\n__all__ = [ # noqa\n "__version__",\n "breakpoint",\n "configure",\n "connect",\n "debug_this_thread",\n "is_client_connected",\n "listen",\n "log_to",\n "trace_this_thread",\n "wait_for_client",\n]\n\nimport sys\n\nassert sys.version_info >= (3, 7), (\n "Python 3.6 and below is not supported by this version of debugpy; "\n "use debugpy 1.5.1 or earlier."\n)\n\n\n# Actual definitions are in a separate file to work around parsing issues causing\n# SyntaxError on Python 2 and preventing the above version check from executing.\nfrom debugpy.public_api import * # noqa\nfrom debugpy.public_api import __version__\n\ndel sys\n
|
.venv\Lib\site-packages\debugpy\__init__.py
|
__init__.py
|
Python
| 1,056 | 0.95 | 0.052632 | 0.233333 |
awesome-app
| 791 |
2023-11-12T14:54:05.185627
|
MIT
| false |
3bcdee1b6b64f9e95758b130491d23ee
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport sys\n\nif __name__ == "__main__":\n\n # There are three ways to run debugpy:\n #\n # 1. Installed as a module in the current environment (python -m debugpy ...)\n # 2. Run as a script from source code (python <repo_root>/src/debugpy ...)\n # 3. Installed as a module in a random directory\n #\n # -----\n #\n # In the first case, no extra work is needed. Importing debugpy will work as expected.\n # Also, running 'debugpy' instead of 'python -m debugpy' will work because of the entry point\n # defined in setup.py.\n #\n # -----\n #\n # In the second case, sys.path[0] is the one added automatically by Python for the directory \n # containing this file. 'import debugpy' will not work since we need the parent directory \n # of debugpy/ to be in sys.path, rather than debugpy/ itself. So we need to modify sys.path[0].\n # Running 'debugpy' will not work because the entry point is not defined in this case.\n #\n # -----\n #\n # In the third case, running 'python -m debugpy' will not work because the module is not installed\n # in any environment. Running 'python <install_dir>/debugpy' will work, just like the second case. \n # But running the entry point will not work because python doesn't know where to find the debugpy module.\n #\n # In this case, no changes to sys.path are required. You just have to do the following before calling\n # the entry point:\n # 1. Add <install_dir> to PYTHONPATH.\n # On Windows, this is set PYTHONPATH=%PYTHONPATH%;<install_dir>\n # 2. Add <install_dir>/bin to PATH. (OPTIONAL)\n # On Windows, this is set PATH=%PATH%;<install_dir>\bin\n # 3. Run the entry point from a command prompt\n # On Windows, this is <install_dir>\bin\debugpy.exe, or just 'debugpy' if you did the previous step.\n #\n # -----\n #\n # If we modify sys.path, 'import debugpy' will work, but it will break other imports\n # because they will be resolved relative to debugpy/ - e.g. `import debugger` will try\n # to import debugpy/debugger.py.\n #\n # To fix both problems, we need to do the following steps:\n # 1. Modify sys.path[0] to point at the parent directory of debugpy/ instead of debugpy/ itself.\n # 2. Import debugpy.\n # 3. Remove sys.path[0] so that it doesn't affect future imports. \n # \n # For example, suppose the user did:\n #\n # python /foo/bar/debugpy ...\n #\n # At the beginning of this script, sys.path[0] will contain "/foo/bar/debugpy".\n # We want to replace it with "/foo/bar', then 'import debugpy', then remove the replaced entry.\n # The imported debugpy module will remain in sys.modules, and thus all future imports of it \n # or its submodules will resolve accordingly.\n if "debugpy" not in sys.modules:\n\n # Do not use dirname() to walk up - this can be a relative path, e.g. ".".\n sys.path[0] = sys.path[0] + "/../"\n import debugpy # noqa\n del sys.path[0]\n\n from debugpy.server import cli\n\n cli.main()\n
|
.venv\Lib\site-packages\debugpy\__main__.py
|
__main__.py
|
Python
| 3,258 | 0.95 | 0.084507 | 0.876923 |
python-kit
| 579 |
2025-07-09T10:56:19.550324
|
Apache-2.0
| false |
f189b0fe49d8636e380f5e8abb2bcd71
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import annotations\n\nimport atexit\nimport os\nimport sys\n\nimport debugpy\nfrom debugpy import adapter, common, launcher\nfrom debugpy.common import json, log, messaging, sockets\nfrom debugpy.adapter import clients, components, launchers, servers, sessions\n\n\nclass Client(components.Component):\n """Handles the client side of a debug session."""\n\n message_handler = components.Component.message_handler\n\n known_subprocesses: set[servers.Connection]\n """Server connections to subprocesses that this client has been made aware of.\n """\n\n class Capabilities(components.Capabilities):\n PROPERTIES = {\n "supportsVariableType": False,\n "supportsVariablePaging": False,\n "supportsRunInTerminalRequest": False,\n "supportsMemoryReferences": False,\n "supportsArgsCanBeInterpretedByShell": False,\n "supportsStartDebuggingRequest": False,\n }\n\n class Expectations(components.Capabilities):\n PROPERTIES = {\n "locale": "en-US",\n "linesStartAt1": True,\n "columnsStartAt1": True,\n "pathFormat": json.enum("path", optional=True), # we don't support "uri"\n }\n\n def __init__(self, sock):\n if sock == "stdio":\n log.info("Connecting to client over stdio...", self)\n self.using_stdio = True\n stream = messaging.JsonIOStream.from_stdio()\n # Make sure that nothing else tries to interfere with the stdio streams\n # that are going to be used for DAP communication from now on.\n sys.stdin = stdin = open(os.devnull, "r")\n atexit.register(stdin.close)\n sys.stdout = stdout = open(os.devnull, "w")\n atexit.register(stdout.close)\n else:\n self.using_stdio = False\n stream = messaging.JsonIOStream.from_socket(sock)\n\n with sessions.Session() as session:\n super().__init__(session, stream)\n\n self.client_id = None\n """ID of the connecting client. This can be 'test' while running tests."""\n\n self.has_started = False\n """Whether the "launch" or "attach" request was received from the client, and\n fully handled.\n """\n\n self.start_request = None\n """The "launch" or "attach" request as received from the client.\n """\n\n self.restart_requested = False\n """Whether the client requested the debug adapter to be automatically\n restarted via "restart":true in the start request.\n """\n\n self._initialize_request = None\n """The "initialize" request as received from the client, to propagate to the\n server later."""\n\n self._deferred_events = []\n """Deferred events from the launcher and the server that must be propagated\n only if and when the "launch" or "attach" response is sent.\n """\n\n self._forward_terminate_request = False\n\n self.known_subprocesses = set()\n\n session.client = self\n session.register()\n\n # For the transition period, send the telemetry events with both old and new\n # name. The old one should be removed once the new one lights up.\n self.channel.send_event(\n "output",\n {\n "category": "telemetry",\n "output": "ptvsd",\n "data": {"packageVersion": debugpy.__version__},\n },\n )\n self.channel.send_event(\n "output",\n {\n "category": "telemetry",\n "output": "debugpy",\n "data": {"packageVersion": debugpy.__version__},\n },\n )\n sessions.report_sockets()\n\n def propagate_after_start(self, event):\n # pydevd starts sending events as soon as we connect, but the client doesn't\n # expect to see any until it receives the response to "launch" or "attach"\n # request. If client is not ready yet, save the event instead of propagating\n # it immediately.\n if self._deferred_events is not None:\n self._deferred_events.append(event)\n log.debug("Propagation deferred.")\n else:\n self.client.channel.propagate(event)\n\n def _propagate_deferred_events(self):\n log.debug("Propagating deferred events to {0}...", self.client)\n for event in self._deferred_events:\n log.debug("Propagating deferred {0}", event.describe())\n self.client.channel.propagate(event)\n log.info("All deferred events propagated to {0}.", self.client)\n self._deferred_events = None\n\n # Generic event handler. There are no specific handlers for client events, because\n # there are no events from the client in DAP - but we propagate them if we can, in\n # case some events appear in future protocol versions.\n @message_handler\n def event(self, event):\n if self.server:\n self.server.channel.propagate(event)\n\n # Generic request handler, used if there's no specific handler below.\n @message_handler\n def request(self, request):\n return self.server.channel.delegate(request)\n\n @message_handler\n def initialize_request(self, request):\n if self._initialize_request is not None:\n raise request.isnt_valid("Session is already initialized")\n\n self.client_id = request("clientID", "")\n self.capabilities = self.Capabilities(self, request)\n self.expectations = self.Expectations(self, request)\n self._initialize_request = request\n\n exception_breakpoint_filters = [\n {\n "filter": "raised",\n "label": "Raised Exceptions",\n "default": False,\n "description": "Break whenever any exception is raised.",\n },\n {\n "filter": "uncaught",\n "label": "Uncaught Exceptions",\n "default": True,\n "description": "Break when the process is exiting due to unhandled exception.",\n },\n {\n "filter": "userUnhandled",\n "label": "User Uncaught Exceptions",\n "default": False,\n "description": "Break when exception escapes into library code.",\n },\n ]\n\n return {\n "supportsCompletionsRequest": True,\n "supportsConditionalBreakpoints": True,\n "supportsConfigurationDoneRequest": True,\n "supportsDebuggerProperties": True,\n "supportsDelayedStackTraceLoading": True,\n "supportsEvaluateForHovers": True,\n "supportsExceptionInfoRequest": True,\n "supportsExceptionOptions": True,\n "supportsFunctionBreakpoints": True,\n "supportsHitConditionalBreakpoints": True,\n "supportsLogPoints": True,\n "supportsModulesRequest": True,\n "supportsSetExpression": True,\n "supportsSetVariable": True,\n "supportsValueFormattingOptions": True,\n "supportsTerminateDebuggee": True,\n "supportsTerminateRequest": True,\n "supportsGotoTargetsRequest": True,\n "supportsClipboardContext": True,\n "exceptionBreakpointFilters": exception_breakpoint_filters,\n "supportsStepInTargetsRequest": True,\n }\n\n # Common code for "launch" and "attach" request handlers.\n #\n # See https://github.com/microsoft/vscode/issues/4902#issuecomment-368583522\n # for the sequence of request and events necessary to orchestrate the start.\n def _start_message_handler(f):\n @components.Component.message_handler\n def handle(self, request):\n assert request.is_request("launch", "attach")\n if self._initialize_request is None:\n raise request.isnt_valid("Session is not initialized yet")\n if self.launcher or self.server:\n raise request.isnt_valid("Session is already started")\n\n self.session.no_debug = request("noDebug", json.default(False))\n if self.session.no_debug:\n servers.dont_wait_for_first_connection()\n\n self.session.debug_options = debug_options = set(\n request("debugOptions", json.array(str))\n )\n\n f(self, request)\n if request.response is not None:\n return\n\n if self.server:\n self.server.initialize(self._initialize_request)\n self._initialize_request = None\n\n arguments = request.arguments\n if self.launcher:\n redirecting = arguments.get("console") == "internalConsole"\n if "RedirectOutput" in debug_options:\n # The launcher is doing output redirection, so we don't need the\n # server to do it, as well.\n arguments = dict(arguments)\n arguments["debugOptions"] = list(\n debug_options - {"RedirectOutput"}\n )\n redirecting = True\n\n if arguments.get("redirectOutput"):\n arguments = dict(arguments)\n del arguments["redirectOutput"]\n redirecting = True\n\n arguments["isOutputRedirected"] = redirecting\n\n # pydevd doesn't send "initialized", and responds to the start request\n # immediately, without waiting for "configurationDone". If it changes\n # to conform to the DAP spec, we'll need to defer waiting for response.\n try:\n self.server.channel.request(request.command, arguments)\n except messaging.NoMoreMessages:\n # Server closed connection before we could receive the response to\n # "attach" or "launch" - this can happen when debuggee exits shortly\n # after starting. It's not an error, but we can't do anything useful\n # here at this point, either, so just bail out.\n request.respond({})\n self.session.finalize(\n "{0} disconnected before responding to {1}".format(\n self.server,\n json.repr(request.command),\n )\n )\n return\n except messaging.MessageHandlingError as exc:\n exc.propagate(request)\n\n if self.session.no_debug:\n self.start_request = request\n self.has_started = True\n request.respond({})\n self._propagate_deferred_events()\n return\n\n # Let the client know that it can begin configuring the adapter.\n self.channel.send_event("initialized")\n\n self.start_request = request\n return messaging.NO_RESPONSE # will respond on "configurationDone"\n\n return handle\n\n @_start_message_handler\n def launch_request(self, request):\n from debugpy.adapter import launchers\n\n if self.session.id != 1 or len(servers.connections()):\n raise request.cant_handle('"attach" expected')\n\n debug_options = set(request("debugOptions", json.array(str)))\n\n # Handling of properties that can also be specified as legacy "debugOptions" flags.\n # If property is explicitly set to false, but the flag is in "debugOptions", treat\n # it as an error. Returns None if the property wasn't explicitly set either way.\n def property_or_debug_option(prop_name, flag_name):\n assert prop_name[0].islower() and flag_name[0].isupper()\n\n value = request(prop_name, bool, optional=True)\n if value == ():\n value = None\n\n if flag_name in debug_options:\n if value is False:\n raise request.isnt_valid(\n '{0}:false and "debugOptions":[{1}] are mutually exclusive',\n json.repr(prop_name),\n json.repr(flag_name),\n )\n value = True\n\n return value\n\n # "pythonPath" is a deprecated legacy spelling. If "python" is missing, then try\n # the alternative. But if both are missing, the error message should say "python".\n python_key = "python"\n if python_key in request:\n if "pythonPath" in request:\n raise request.isnt_valid(\n '"pythonPath" is not valid if "python" is specified'\n )\n elif "pythonPath" in request:\n python_key = "pythonPath"\n python = request(python_key, json.array(str, vectorize=True, size=(0,)))\n if not len(python):\n python = [sys.executable]\n\n python += request("pythonArgs", json.array(str, size=(0,)))\n request.arguments["pythonArgs"] = python[1:]\n request.arguments["python"] = python\n\n launcher_python = request("debugLauncherPython", str, optional=True)\n if launcher_python == ():\n launcher_python = python[0]\n\n program = module = code = ()\n if "program" in request:\n program = request("program", str)\n args = [program]\n request.arguments["processName"] = program\n if "module" in request:\n module = request("module", str)\n args = ["-m", module]\n request.arguments["processName"] = module\n if "code" in request:\n code = request("code", json.array(str, vectorize=True, size=(1,)))\n args = ["-c", "\n".join(code)]\n request.arguments["processName"] = "-c"\n\n num_targets = len([x for x in (program, module, code) if x != ()])\n if num_targets == 0:\n raise request.isnt_valid(\n 'either "program", "module", or "code" must be specified'\n )\n elif num_targets != 1:\n raise request.isnt_valid(\n '"program", "module", and "code" are mutually exclusive'\n )\n\n console = request(\n "console",\n json.enum(\n "internalConsole",\n "integratedTerminal",\n "externalTerminal",\n optional=True,\n ),\n )\n console_title = request("consoleTitle", json.default("Python Debug Console"))\n\n # Propagate "args" via CLI so that shell expansion can be applied if requested.\n target_args = request("args", json.array(str, vectorize=True))\n args += target_args\n\n # If "args" was a single string rather than an array, shell expansion must be applied.\n shell_expand_args = len(target_args) > 0 and isinstance(\n request.arguments["args"], str\n )\n if shell_expand_args:\n if not self.capabilities["supportsArgsCanBeInterpretedByShell"]:\n raise request.isnt_valid(\n 'Shell expansion in "args" is not supported by the client'\n )\n if console == "internalConsole":\n raise request.isnt_valid(\n 'Shell expansion in "args" is not available for "console":"internalConsole"'\n )\n\n cwd = request("cwd", str, optional=True)\n if cwd == ():\n # If it's not specified, but we're launching a file rather than a module,\n # and the specified path has a directory in it, use that.\n cwd = None if program == () else (os.path.dirname(program) or None)\n\n sudo = bool(property_or_debug_option("sudo", "Sudo"))\n if sudo and sys.platform == "win32":\n raise request.cant_handle('"sudo":true is not supported on Windows.')\n\n on_terminate = request("onTerminate", str, optional=True)\n\n if on_terminate:\n self._forward_terminate_request = on_terminate == "KeyboardInterrupt"\n\n launcher_path = request("debugLauncherPath", os.path.dirname(launcher.__file__))\n adapter_host = request("debugAdapterHost", "127.0.0.1")\n\n try:\n servers.serve(adapter_host)\n except Exception as exc:\n raise request.cant_handle(\n "{0} couldn't create listener socket for servers: {1}",\n self.session,\n exc,\n )\n\n launchers.spawn_debuggee(\n self.session,\n request,\n [launcher_python],\n launcher_path,\n adapter_host,\n args,\n shell_expand_args,\n cwd,\n console,\n console_title,\n sudo,\n )\n\n @_start_message_handler\n def attach_request(self, request):\n if self.session.no_debug:\n raise request.isnt_valid('"noDebug" is not supported for "attach"')\n\n host = request("host", str, optional=True)\n port = request("port", int, optional=True)\n listen = request("listen", dict, optional=True)\n connect = request("connect", dict, optional=True)\n pid = request("processId", (int, str), optional=True)\n sub_pid = request("subProcessId", int, optional=True)\n on_terminate = request("onTerminate", bool, optional=True)\n\n if on_terminate:\n self._forward_terminate_request = on_terminate == "KeyboardInterrupt"\n\n if host != () or port != ():\n if listen != ():\n raise request.isnt_valid(\n '"listen" and "host"/"port" are mutually exclusive'\n )\n if connect != ():\n raise request.isnt_valid(\n '"connect" and "host"/"port" are mutually exclusive'\n )\n if listen != ():\n if connect != ():\n raise request.isnt_valid(\n '"listen" and "connect" are mutually exclusive'\n )\n if pid != ():\n raise request.isnt_valid(\n '"listen" and "processId" are mutually exclusive'\n )\n if sub_pid != ():\n raise request.isnt_valid(\n '"listen" and "subProcessId" are mutually exclusive'\n )\n if pid != () and sub_pid != ():\n raise request.isnt_valid(\n '"processId" and "subProcessId" are mutually exclusive'\n )\n\n if listen != ():\n if servers.is_serving():\n raise request.isnt_valid(\n 'Multiple concurrent "listen" sessions are not supported'\n )\n host = listen("host", "127.0.0.1")\n port = listen("port", int)\n adapter.access_token = None\n self.restart_requested = request("restart", False)\n host, port = servers.serve(host, port)\n else:\n if not servers.is_serving():\n servers.serve()\n host, port = servers.listener.getsockname()\n\n # There are four distinct possibilities here.\n #\n # If "processId" is specified, this is attach-by-PID. We need to inject the\n # debug server into the designated process, and then wait until it connects\n # back to us. Since the injected server can crash, there must be a timeout.\n #\n # If "subProcessId" is specified, this is attach to a known subprocess, likely\n # in response to a "debugpyAttach" event. If so, the debug server should be\n # connected already, and thus the wait timeout is zero.\n #\n # If "listen" is specified, this is attach-by-socket with the server expected\n # to connect to the adapter via debugpy.connect(). There is no PID known in\n # advance, so just wait until the first server connection indefinitely, with\n # no timeout.\n #\n # If "connect" is specified, this is attach-by-socket in which the server has\n # spawned the adapter via debugpy.listen(). There is no PID known to the client\n # in advance, but the server connection should be either be there already, or\n # the server should be connecting shortly, so there must be a timeout.\n #\n # In the last two cases, if there's more than one server connection already,\n # this is a multiprocess re-attach. The client doesn't know the PID, so we just\n # connect it to the oldest server connection that we have - in most cases, it\n # will be the one for the root debuggee process, but if it has exited already,\n # it will be some subprocess.\n if pid != ():\n if not isinstance(pid, int):\n try:\n pid = int(pid)\n except Exception:\n raise request.isnt_valid('"processId" must be parseable as int')\n debugpy_args = request("debugpyArgs", json.array(str))\n\n def on_output(category, output):\n self.channel.send_event(\n "output",\n {\n "category": category,\n "output": output,\n },\n )\n\n try:\n servers.inject(pid, debugpy_args, on_output)\n except Exception as e:\n log.swallow_exception()\n self.session.finalize(\n "Error when trying to attach to PID:\n%s" % (str(e),)\n )\n return\n\n timeout = common.PROCESS_SPAWN_TIMEOUT\n pred = lambda conn: conn.pid == pid\n else:\n if sub_pid == ():\n pred = lambda conn: True\n timeout = common.PROCESS_SPAWN_TIMEOUT if listen == () else None\n else:\n pred = lambda conn: conn.pid == sub_pid\n timeout = 0\n\n self.channel.send_event("debugpyWaitingForServer", {"host": host, "port": port})\n conn = servers.wait_for_connection(self.session, pred, timeout)\n if conn is None:\n if sub_pid != ():\n # If we can't find a matching subprocess, it's not always an error -\n # it might have already exited, or didn't even get a chance to connect.\n # To prevent the client from complaining, pretend that the "attach"\n # request was successful, but that the session terminated immediately.\n request.respond({})\n self.session.finalize(\n 'No known subprocess with "subProcessId":{0}'.format(sub_pid)\n )\n return\n\n raise request.cant_handle(\n (\n "Timed out waiting for debug server to connect."\n if timeout\n else "There is no debug server connected to this adapter."\n ),\n sub_pid,\n )\n\n try:\n conn.attach_to_session(self.session)\n except ValueError:\n request.cant_handle("{0} is already being debugged.", conn)\n\n @message_handler\n def configurationDone_request(self, request):\n if self.start_request is None or self.has_started:\n request.cant_handle(\n '"configurationDone" is only allowed during handling of a "launch" '\n 'or an "attach" request'\n )\n\n try:\n self.has_started = True\n try:\n result = self.server.channel.delegate(request)\n except messaging.NoMoreMessages:\n # Server closed connection before we could receive the response to\n # "configurationDone" - this can happen when debuggee exits shortly\n # after starting. It's not an error, but we can't do anything useful\n # here at this point, either, so just bail out.\n request.respond({})\n self.start_request.respond({})\n self.session.finalize(\n "{0} disconnected before responding to {1}".format(\n self.server,\n json.repr(request.command),\n )\n )\n return\n else:\n request.respond(result)\n except messaging.MessageHandlingError as exc:\n self.start_request.cant_handle(str(exc))\n finally:\n if self.start_request.response is None:\n self.start_request.respond({})\n self._propagate_deferred_events()\n\n # Notify the client of any child processes of the debuggee that aren't already\n # being debugged.\n for conn in servers.connections():\n if conn.server is None and conn.ppid == self.session.pid:\n self.notify_of_subprocess(conn)\n\n @message_handler\n def evaluate_request(self, request):\n propagated_request = self.server.channel.propagate(request)\n\n def handle_response(response):\n request.respond(response.body)\n\n propagated_request.on_response(handle_response)\n\n return messaging.NO_RESPONSE\n\n @message_handler\n def pause_request(self, request):\n request.arguments["threadId"] = "*"\n return self.server.channel.delegate(request)\n\n @message_handler\n def continue_request(self, request):\n request.arguments["threadId"] = "*"\n\n try:\n return self.server.channel.delegate(request)\n except messaging.NoMoreMessages:\n # pydevd can sometimes allow the debuggee to exit before the queued\n # "continue" response gets sent. Thus, a failed "continue" response\n # indicating that the server disconnected should be treated as success.\n return {"allThreadsContinued": True}\n\n @message_handler\n def debugpySystemInfo_request(self, request):\n result = {"debugpy": {"version": debugpy.__version__}}\n if self.server:\n try:\n pydevd_info = self.server.channel.request("pydevdSystemInfo")\n except Exception:\n # If the server has already disconnected, or couldn't handle it,\n # report what we've got.\n pass\n else:\n result.update(pydevd_info)\n return result\n\n @message_handler\n def terminate_request(self, request):\n # If user specifically requests to terminate, it means that they don't want\n # debug session auto-restart kicking in.\n self.restart_requested = False\n\n if self._forward_terminate_request:\n # According to the spec, terminate should try to do a gracefull shutdown.\n # We do this in the server by interrupting the main thread with a Ctrl+C.\n # To force the kill a subsequent request would do a disconnect.\n #\n # We only do this if the onTerminate option is set though (the default\n # is a hard-kill for the process and subprocesses).\n return self.server.channel.delegate(request)\n\n self.session.finalize('client requested "terminate"', terminate_debuggee=True)\n return {}\n\n @message_handler\n def disconnect_request(self, request):\n # If user specifically requests to disconnect, it means that they don't want\n # debug session auto-restart kicking in.\n self.restart_requested = False\n\n terminate_debuggee = request("terminateDebuggee", bool, optional=True)\n if terminate_debuggee == ():\n terminate_debuggee = None\n self.session.finalize('client requested "disconnect"', terminate_debuggee)\n request.respond({})\n\n if self.using_stdio:\n # There's no way for the client to reconnect to this adapter once it disconnects\n # from this session, so close any remaining server connections.\n servers.stop_serving()\n log.info("{0} disconnected from stdio; closing remaining server connections.", self)\n for conn in servers.connections():\n try:\n conn.channel.close()\n except Exception:\n log.swallow_exception()\n\n # Close the client channel since we disconnected from the client.\n try:\n self.channel.close()\n except Exception:\n log.swallow_exception(level="warning")\n\n def disconnect(self):\n super().disconnect()\n\n def report_sockets(self):\n sockets = [\n {\n "host": host,\n "port": port,\n "internal": listener is not clients.listener,\n }\n for listener in [clients.listener, launchers.listener, servers.listener]\n if listener is not None\n for (host, port) in [listener.getsockname()]\n ]\n self.channel.send_event(\n "debugpySockets",\n {\n "sockets": sockets\n },\n )\n\n def notify_of_subprocess(self, conn):\n log.info("{1} is a subprocess of {0}.", self, conn)\n with self.session:\n if self.start_request is None or conn in self.known_subprocesses:\n return\n if "processId" in self.start_request.arguments:\n log.warning(\n "Not reporting subprocess for {0}, because the parent process "\n 'was attached to using "processId" rather than "port".',\n self.session,\n )\n return\n\n log.info("Notifying {0} about {1}.", self, conn)\n body = dict(self.start_request.arguments)\n self.known_subprocesses.add(conn)\n self.session.notify_changed()\n\n for key in "processId", "listen", "preLaunchTask", "postDebugTask", "request", "restart":\n body.pop(key, None)\n\n body["name"] = "Subprocess {0}".format(conn.pid)\n body["subProcessId"] = conn.pid\n\n for key in "args", "processName", "pythonArgs":\n body.pop(key, None)\n\n host = body.pop("host", None)\n port = body.pop("port", None)\n if "connect" not in body:\n body["connect"] = {}\n if "host" not in body["connect"]:\n body["connect"]["host"] = host if host is not None else "127.0.0.1"\n if "port" not in body["connect"]:\n if port is None:\n _, port = listener.getsockname()\n body["connect"]["port"] = port\n\n if self.capabilities["supportsStartDebuggingRequest"]:\n self.channel.request("startDebugging", {\n "request": "attach",\n "configuration": body,\n })\n else:\n body["request"] = "attach"\n self.channel.send_event("debugpyAttach", body)\n\n\ndef serve(host, port):\n global listener\n listener = sockets.serve("Client", Client, host, port)\n sessions.report_sockets()\n return listener.getsockname()\n\n\ndef stop_serving():\n global listener\n if listener is not None:\n try:\n listener.close()\n except Exception:\n log.swallow_exception(level="warning")\n listener = None\n sessions.report_sockets()\n
|
.venv\Lib\site-packages\debugpy\adapter\clients.py
|
clients.py
|
Python
| 32,379 | 0.95 | 0.184111 | 0.132653 |
vue-tools
| 339 |
2024-06-22T13:07:07.470129
|
MIT
| false |
31cbdee5a09ff1d84e0aca31ed89611b
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport functools\n\nfrom debugpy.common import json, log, messaging, util\n\n\nACCEPT_CONNECTIONS_TIMEOUT = 60\n\n\nclass ComponentNotAvailable(Exception):\n def __init__(self, type):\n super().__init__(f"{type.__name__} is not available")\n\n\nclass Component(util.Observable):\n """A component managed by a debug adapter: client, launcher, or debug server.\n\n Every component belongs to a Session, which is used for synchronization and\n shared data.\n\n Every component has its own message channel, and provides message handlers for\n that channel. All handlers should be decorated with @Component.message_handler,\n which ensures that Session is locked for the duration of the handler. Thus, only\n one handler is running at any given time across all components, unless the lock\n is released explicitly or via Session.wait_for().\n\n Components report changes to their attributes to Session, allowing one component\n to wait_for() a change caused by another component.\n """\n\n def __init__(self, session, stream=None, channel=None):\n assert (stream is None) ^ (channel is None)\n\n try:\n lock_held = session.lock.acquire(blocking=False)\n assert lock_held, "__init__ of a Component subclass must lock its Session"\n finally:\n session.lock.release()\n\n super().__init__()\n\n self.session = session\n\n if channel is None:\n stream.name = str(self)\n channel = messaging.JsonMessageChannel(stream, self)\n channel.start()\n else:\n channel.name = channel.stream.name = str(self)\n channel.handlers = self\n self.channel = channel\n self.is_connected = True\n\n # Do this last to avoid triggering useless notifications for assignments above.\n self.observers += [lambda *_: self.session.notify_changed()]\n\n def __str__(self):\n return f"{type(self).__name__}[{self.session.id}]"\n\n @property\n def client(self):\n return self.session.client\n\n @property\n def launcher(self):\n return self.session.launcher\n\n @property\n def server(self):\n return self.session.server\n\n def wait_for(self, *args, **kwargs):\n return self.session.wait_for(*args, **kwargs)\n\n @staticmethod\n def message_handler(f):\n """Applied to a message handler to automatically lock and unlock the session\n for its duration, and to validate the session state.\n\n If the handler raises ComponentNotAvailable or JsonIOError, converts it to\n Message.cant_handle().\n """\n\n @functools.wraps(f)\n def lock_and_handle(self, message):\n try:\n with self.session:\n return f(self, message)\n except ComponentNotAvailable as exc:\n raise message.cant_handle("{0}", exc, silent=True)\n except messaging.MessageHandlingError as exc:\n if exc.cause is message:\n raise\n else:\n exc.propagate(message)\n except messaging.JsonIOError as exc:\n raise message.cant_handle(\n "{0} disconnected unexpectedly", exc.stream.name, silent=True\n )\n\n return lock_and_handle\n\n def disconnect(self):\n with self.session:\n self.is_connected = False\n self.session.finalize("{0} has disconnected".format(self))\n\n\ndef missing(session, type):\n class Missing(object):\n """A dummy component that raises ComponentNotAvailable whenever some\n attribute is accessed on it.\n """\n\n __getattr__ = __setattr__ = lambda self, *_: report()\n __bool__ = __nonzero__ = lambda self: False\n\n def report():\n try:\n raise ComponentNotAvailable(type)\n except Exception as exc:\n log.reraise_exception("{0} in {1}", exc, session)\n\n return Missing()\n\n\nclass Capabilities(dict):\n """A collection of feature flags for a component. Corresponds to JSON properties\n in the DAP "initialize" request or response, other than those that identify the\n party.\n """\n\n PROPERTIES = {}\n """JSON property names and default values for the the capabilities represented\n by instances of this class. Keys are names, and values are either default values\n or validators.\n\n If the value is callable, it must be a JSON validator; see debugpy.common.json for\n details. If the value is not callable, it is as if json.default(value) validator\n was used instead.\n """\n\n def __init__(self, component, message):\n """Parses an "initialize" request or response and extracts the feature flags.\n\n For every "X" in self.PROPERTIES, sets self["X"] to the corresponding value\n from message.payload if it's present there, or to the default value otherwise.\n """\n\n assert message.is_request("initialize") or message.is_response("initialize")\n\n self.component = component\n\n payload = message.payload\n for name, validate in self.PROPERTIES.items():\n value = payload.get(name, ())\n if not callable(validate):\n validate = json.default(validate)\n\n try:\n value = validate(value)\n except Exception as exc:\n raise message.isnt_valid("{0} {1}", json.repr(name), exc)\n\n assert (\n value != ()\n ), f"{validate} must provide a default value for missing properties."\n self[name] = value\n\n log.debug("{0}", self)\n\n def __repr__(self):\n return f"{type(self).__name__}: {json.repr(dict(self))}"\n\n def require(self, *keys):\n for key in keys:\n if not self[key]:\n raise messaging.MessageHandlingError(\n f"{self.component} does not have capability {json.repr(key)}",\n )\n
|
.venv\Lib\site-packages\debugpy\adapter\components.py
|
components.py
|
Python
| 6,264 | 0.95 | 0.229508 | 0.029197 |
node-utils
| 47 |
2024-01-06T19:14:59.858575
|
GPL-3.0
| false |
065a1cd157ceb72d46403025cd3be119
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport os\nimport subprocess\nimport sys\n\nfrom debugpy import adapter, common\nfrom debugpy.common import log, messaging, sockets\nfrom debugpy.adapter import components, servers, sessions\n\nlistener = None\n\n\nclass Launcher(components.Component):\n """Handles the launcher side of a debug session."""\n\n message_handler = components.Component.message_handler\n\n def __init__(self, session, stream):\n with session:\n assert not session.launcher\n super().__init__(session, stream)\n\n self.pid = None\n """Process ID of the debuggee process, as reported by the launcher."""\n\n self.exit_code = None\n """Exit code of the debuggee process."""\n\n session.launcher = self\n\n @message_handler\n def process_event(self, event):\n self.pid = event("systemProcessId", int)\n self.client.propagate_after_start(event)\n\n @message_handler\n def output_event(self, event):\n self.client.propagate_after_start(event)\n\n @message_handler\n def exited_event(self, event):\n self.exit_code = event("exitCode", int)\n # We don't want to tell the client about this just yet, because it will then\n # want to disconnect, and the launcher might still be waiting for keypress\n # (if wait-on-exit was enabled). Instead, we'll report the event when we\n # receive "terminated" from the launcher, right before it exits.\n\n @message_handler\n def terminated_event(self, event):\n try:\n self.client.channel.send_event("exited", {"exitCode": self.exit_code})\n except Exception:\n pass\n self.channel.close()\n\n def terminate_debuggee(self):\n with self.session:\n if self.exit_code is None:\n try:\n self.channel.request("terminate")\n except Exception:\n pass\n\n\ndef spawn_debuggee(\n session,\n start_request,\n python,\n launcher_path,\n adapter_host,\n args,\n shell_expand_args,\n cwd,\n console,\n console_title,\n sudo,\n):\n global listener\n\n # -E tells sudo to propagate environment variables to the target process - this\n # is necessary for launcher to get DEBUGPY_LAUNCHER_PORT and DEBUGPY_LOG_DIR.\n cmdline = ["sudo", "-E"] if sudo else []\n cmdline += python\n cmdline += [launcher_path]\n env = {}\n\n arguments = dict(start_request.arguments)\n if not session.no_debug:\n _, arguments["port"] = servers.listener.getsockname()\n arguments["adapterAccessToken"] = adapter.access_token\n\n def on_launcher_connected(sock):\n listener.close()\n stream = messaging.JsonIOStream.from_socket(sock)\n Launcher(session, stream)\n\n try:\n listener = sockets.serve(\n "Launcher", on_launcher_connected, adapter_host, backlog=1\n )\n except Exception as exc:\n raise start_request.cant_handle(\n "{0} couldn't create listener socket for launcher: {1}", session, exc\n )\n sessions.report_sockets()\n\n try:\n launcher_host, launcher_port = listener.getsockname()\n launcher_addr = (\n launcher_port\n if launcher_host == "127.0.0.1"\n else f"{launcher_host}:{launcher_port}"\n )\n cmdline += [str(launcher_addr), "--"]\n cmdline += args\n\n if log.log_dir is not None:\n env[str("DEBUGPY_LOG_DIR")] = log.log_dir\n if log.stderr.levels != {"warning", "error"}:\n env[str("DEBUGPY_LOG_STDERR")] = str(" ".join(log.stderr.levels))\n\n if console == "internalConsole":\n log.info("{0} spawning launcher: {1!r}", session, cmdline)\n try:\n # If we are talking to the client over stdio, sys.stdin and sys.stdout\n # are redirected to avoid mangling the DAP message stream. Make sure\n # the launcher also respects that.\n subprocess.Popen(\n cmdline,\n cwd=cwd,\n env=dict(list(os.environ.items()) + list(env.items())),\n stdin=sys.stdin,\n stdout=sys.stdout,\n stderr=sys.stderr,\n )\n except Exception as exc:\n raise start_request.cant_handle("Failed to spawn launcher: {0}", exc)\n else:\n log.info('{0} spawning launcher via "runInTerminal" request.', session)\n session.client.capabilities.require("supportsRunInTerminalRequest")\n kinds = {"integratedTerminal": "integrated", "externalTerminal": "external"}\n request_args = {\n "kind": kinds[console],\n "title": console_title,\n "args": cmdline,\n "env": env,\n }\n if cwd is not None:\n request_args["cwd"] = cwd\n if shell_expand_args:\n request_args["argsCanBeInterpretedByShell"] = True\n try:\n # It is unspecified whether this request receives a response immediately, or only\n # after the spawned command has completed running, so do not block waiting for it.\n session.client.channel.send_request("runInTerminal", request_args)\n except messaging.MessageHandlingError as exc:\n exc.propagate(start_request)\n\n # If using sudo, it might prompt for password, and launcher won't start running\n # until the user enters it, so don't apply timeout in that case.\n if not session.wait_for(\n lambda: session.launcher,\n timeout=(None if sudo else common.PROCESS_SPAWN_TIMEOUT),\n ):\n raise start_request.cant_handle("Timed out waiting for launcher to connect")\n\n try:\n session.launcher.channel.request(start_request.command, arguments)\n except messaging.MessageHandlingError as exc:\n exc.propagate(start_request)\n\n if not session.wait_for(\n lambda: session.launcher.pid is not None,\n timeout=common.PROCESS_SPAWN_TIMEOUT,\n ):\n raise start_request.cant_handle(\n 'Timed out waiting for "process" event from launcher'\n )\n\n if session.no_debug:\n return\n\n # Wait for the first incoming connection regardless of the PID - it won't\n # necessarily match due to the use of stubs like py.exe or "conda run".\n conn = servers.wait_for_connection(\n session, lambda conn: True, timeout=common.PROCESS_SPAWN_TIMEOUT\n )\n if conn is None:\n raise start_request.cant_handle("Timed out waiting for debuggee to spawn")\n conn.attach_to_session(session)\n\n finally:\n listener.close()\n listener = None\n sessions.report_sockets()\n
|
.venv\Lib\site-packages\debugpy\adapter\launchers.py
|
launchers.py
|
Python
| 7,198 | 0.95 | 0.207071 | 0.107143 |
awesome-app
| 127 |
2023-09-07T18:42:21.524194
|
Apache-2.0
| false |
871c8f9d67744a76695350cf2ddccfab
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport itertools\nimport os\nimport signal\nimport threading\nimport time\n\nfrom debugpy import common\nfrom debugpy.common import log, util\nfrom debugpy.adapter import components, launchers, servers\n\n\n_lock = threading.RLock()\n_sessions = set()\n_sessions_changed = threading.Event()\n\n\nclass Session(util.Observable):\n """A debug session involving a client, an adapter, a launcher, and a debug server.\n\n The client and the adapter are always present, and at least one of launcher and debug\n server is present, depending on the scenario.\n """\n\n _counter = itertools.count(1)\n\n def __init__(self):\n from debugpy.adapter import clients\n\n super().__init__()\n\n self.lock = threading.RLock()\n self.id = next(self._counter)\n self._changed_condition = threading.Condition(self.lock)\n\n self.client = components.missing(self, clients.Client)\n """The client component. Always present."""\n\n self.launcher = components.missing(self, launchers.Launcher)\n """The launcher componet. Always present in "launch" sessions, and never\n present in "attach" sessions.\n """\n\n self.server = components.missing(self, servers.Server)\n """The debug server component. Always present, unless this is a "launch"\n session with "noDebug".\n """\n\n self.no_debug = None\n """Whether this is a "noDebug" session."""\n\n self.pid = None\n """Process ID of the debuggee process."""\n\n self.debug_options = {}\n """Debug options as specified by "launch" or "attach" request."""\n\n self.is_finalizing = False\n """Whether finalize() has been invoked."""\n\n self.observers += [lambda *_: self.notify_changed()]\n\n def __str__(self):\n return f"Session[{self.id}]"\n\n def __enter__(self):\n """Lock the session for exclusive access."""\n self.lock.acquire()\n return self\n\n def __exit__(self, exc_type, exc_value, exc_tb):\n """Unlock the session."""\n self.lock.release()\n\n def register(self):\n with _lock:\n _sessions.add(self)\n _sessions_changed.set()\n\n def notify_changed(self):\n with self:\n self._changed_condition.notify_all()\n\n # A session is considered ended once all components disconnect, and there\n # are no further incoming messages from anything to handle.\n components = self.client, self.launcher, self.server\n if all(not com or not com.is_connected for com in components):\n with _lock:\n if self in _sessions:\n log.info("{0} has ended.", self)\n _sessions.remove(self)\n _sessions_changed.set()\n\n def wait_for(self, predicate, timeout=None):\n """Waits until predicate() becomes true.\n\n The predicate is invoked with the session locked. If satisfied, the method\n returns immediately. Otherwise, the lock is released (even if it was held\n at entry), and the method blocks waiting for some attribute of either self,\n self.client, self.server, or self.launcher to change. On every change, session\n is re-locked and predicate is re-evaluated, until it is satisfied.\n\n While the session is unlocked, message handlers for components other than\n the one that is waiting can run, but message handlers for that one are still\n blocked.\n\n If timeout is not None, the method will unblock and return after that many\n seconds regardless of whether the predicate was satisfied. The method returns\n False if it timed out, and True otherwise.\n """\n\n def wait_for_timeout():\n time.sleep(timeout)\n wait_for_timeout.timed_out = True\n self.notify_changed()\n\n wait_for_timeout.timed_out = False\n if timeout is not None:\n thread = threading.Thread(\n target=wait_for_timeout, name="Session.wait_for() timeout"\n )\n thread.daemon = True\n thread.start()\n\n with self:\n while not predicate():\n if wait_for_timeout.timed_out:\n return False\n self._changed_condition.wait()\n return True\n\n def finalize(self, why, terminate_debuggee=None):\n """Finalizes the debug session.\n\n If the server is present, sends "disconnect" request with "terminateDebuggee"\n set as specified request to it; waits for it to disconnect, allowing any\n remaining messages from it to be handled; and closes the server channel.\n\n If the launcher is present, sends "terminate" request to it, regardless of the\n value of terminate; waits for it to disconnect, allowing any remaining messages\n from it to be handled; and closes the launcher channel.\n\n If the client is present, sends "terminated" event to it.\n\n If terminate_debuggee=None, it is treated as True if the session has a Launcher\n component, and False otherwise.\n """\n\n if self.is_finalizing:\n return\n self.is_finalizing = True\n log.info("{0}; finalizing {1}.", why, self)\n\n if terminate_debuggee is None:\n terminate_debuggee = bool(self.launcher)\n\n try:\n self._finalize(why, terminate_debuggee)\n except Exception:\n # Finalization should never fail, and if it does, the session is in an\n # indeterminate and likely unrecoverable state, so just fail fast.\n log.swallow_exception("Fatal error while finalizing {0}", self)\n os._exit(1)\n\n log.info("{0} finalized.", self)\n\n def _finalize(self, why, terminate_debuggee):\n # If the client started a session, and then disconnected before issuing "launch"\n # or "attach", the main thread will be blocked waiting for the first server\n # connection to come in - unblock it, so that we can exit.\n servers.dont_wait_for_first_connection()\n\n if self.server:\n if self.server.is_connected:\n if terminate_debuggee and self.launcher and self.launcher.is_connected:\n # If we were specifically asked to terminate the debuggee, and we\n # can ask the launcher to kill it, do so instead of disconnecting\n # from the server to prevent debuggee from running any more code.\n self.launcher.terminate_debuggee()\n else:\n # Otherwise, let the server handle it the best it can.\n try:\n self.server.channel.request(\n "disconnect", {"terminateDebuggee": terminate_debuggee}\n )\n except Exception:\n pass\n self.server.detach_from_session()\n\n if self.launcher and self.launcher.is_connected:\n # If there was a server, we just disconnected from it above, which should\n # cause the debuggee process to exit, unless it is being replaced in situ -\n # so let's wait for that first.\n if self.server and not self.server.connection.process_replaced:\n log.info('{0} waiting for "exited" event...', self)\n if not self.wait_for(\n lambda: self.launcher.exit_code is not None,\n timeout=common.PROCESS_EXIT_TIMEOUT,\n ):\n log.warning('{0} timed out waiting for "exited" event.', self)\n\n # Terminate the debuggee process if it's still alive for any reason -\n # whether it's because there was no server to handle graceful shutdown,\n # or because the server couldn't handle it for some reason - unless the\n # process is being replaced in situ.\n if not (self.server and self.server.connection.process_replaced):\n self.launcher.terminate_debuggee()\n\n # Wait until the launcher message queue fully drains. There is no timeout\n # here, because the final "terminated" event will only come after reading\n # user input in wait-on-exit scenarios. In addition, if the process was\n # replaced in situ, the launcher might still have more output to capture\n # from its replacement.\n log.info("{0} waiting for {1} to disconnect...", self, self.launcher)\n self.wait_for(lambda: not self.launcher.is_connected)\n\n try:\n self.launcher.channel.close()\n except Exception:\n log.swallow_exception()\n\n if self.client:\n if self.client.is_connected:\n # Tell the client that debugging is over, but don't close the channel until it\n # tells us to, via the "disconnect" request.\n body = {}\n if self.client.restart_requested:\n body["restart"] = True\n try:\n self.client.channel.send_event("terminated", body)\n except Exception:\n pass\n\n if (\n self.client.start_request is not None\n and self.client.start_request.command == "launch"\n and not (self.server and self.server.connection.process_replaced)\n ):\n servers.stop_serving()\n log.info(\n '"launch" session ended - killing remaining debuggee processes.'\n )\n\n pids_killed = set()\n if self.launcher and self.launcher.pid is not None:\n # Already killed above.\n pids_killed.add(self.launcher.pid)\n\n while True:\n conns = [\n conn\n for conn in servers.connections()\n if conn.pid not in pids_killed\n ]\n if not len(conns):\n break\n for conn in conns:\n log.info("Killing {0}", conn)\n try:\n os.kill(conn.pid, signal.SIGTERM)\n except Exception:\n log.swallow_exception("Failed to kill {0}", conn)\n pids_killed.add(conn.pid)\n\n\ndef get(pid):\n with _lock:\n return next((session for session in _sessions if session.pid == pid), None)\n\n\ndef wait_until_ended():\n """Blocks until all sessions have ended.\n\n A session ends when all components that it manages disconnect from it.\n """\n while True:\n with _lock:\n if not len(_sessions):\n return\n _sessions_changed.clear()\n _sessions_changed.wait()\n\n\ndef report_sockets():\n if not _sessions:\n return\n session = sorted(_sessions, key=lambda session: session.id)[0]\n client = session.client\n if client is not None:\n client.report_sockets()\n
|
.venv\Lib\site-packages\debugpy\adapter\sessions.py
|
sessions.py
|
Python
| 11,522 | 0.95 | 0.242321 | 0.123404 |
node-utils
| 289 |
2024-03-23T21:30:51.376761
|
GPL-3.0
| false |
f91a4232467634b1cd32d7d9cf82af20
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import annotations\nimport typing\n\nif typing.TYPE_CHECKING:\n __all__: list[str]\n\n__all__ = []\n\naccess_token = None\n"""Access token used to authenticate with this adapter."""\n
|
.venv\Lib\site-packages\debugpy\adapter\__init__.py
|
__init__.py
|
Python
| 360 | 0.95 | 0.142857 | 0.3 |
awesome-app
| 142 |
2024-09-23T01:56:12.819801
|
GPL-3.0
| false |
70341ffae987d05ca31ed82260820906
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport argparse\nimport atexit\nimport codecs\nimport locale\nimport os\nimport sys\n\n# WARNING: debugpy and submodules must not be imported on top level in this module,\n# and should be imported locally inside main() instead.\n\n\ndef main():\n args = _parse_argv(sys.argv)\n\n # If we're talking DAP over stdio, stderr is not guaranteed to be read from,\n # so disable it to avoid the pipe filling and locking up. This must be done\n # as early as possible, before the logging module starts writing to it.\n if args.port is None:\n sys.stderr = stderr = open(os.devnull, "w")\n atexit.register(stderr.close)\n\n from debugpy import adapter\n from debugpy.common import json, log, sockets\n from debugpy.adapter import clients, servers, sessions\n\n if args.for_server is not None:\n if os.name == "posix":\n # On POSIX, we need to leave the process group and its session, and then\n # daemonize properly by double-forking (first fork already happened when\n # this process was spawned).\n # NOTE: if process is already the session leader, then\n # setsid would fail with `operation not permitted`\n if os.getsid(os.getpid()) != os.getpid():\n os.setsid()\n if os.fork() != 0:\n sys.exit(0)\n\n for stdio in sys.stdin, sys.stdout, sys.stderr:\n if stdio is not None:\n stdio.close()\n\n if args.log_stderr:\n log.stderr.levels |= set(log.LEVELS)\n if args.log_dir is not None:\n log.log_dir = args.log_dir\n\n log.to_file(prefix="debugpy.adapter")\n log.describe_environment("debugpy.adapter startup environment:")\n\n servers.access_token = args.server_access_token\n if args.for_server is None:\n adapter.access_token = codecs.encode(os.urandom(32), "hex").decode("ascii")\n\n endpoints = {}\n try:\n client_host, client_port = clients.serve(args.host, args.port)\n except Exception as exc:\n if args.for_server is None:\n raise\n endpoints = {"error": "Can't listen for client connections: " + str(exc)}\n else:\n endpoints["client"] = {"host": client_host, "port": client_port}\n\n if args.for_server is not None:\n try:\n server_host, server_port = servers.serve()\n except Exception as exc:\n endpoints = {"error": "Can't listen for server connections: " + str(exc)}\n else:\n endpoints["server"] = {"host": server_host, "port": server_port}\n\n log.info(\n "Sending endpoints info to debug server at localhost:{0}:\n{1}",\n args.for_server,\n json.repr(endpoints),\n )\n\n try:\n sock = sockets.create_client()\n try:\n sock.settimeout(None)\n sock.connect(("127.0.0.1", args.for_server))\n sock_io = sock.makefile("wb", 0)\n try:\n sock_io.write(json.dumps(endpoints).encode("utf-8"))\n finally:\n sock_io.close()\n finally:\n sockets.close_socket(sock)\n except Exception:\n log.reraise_exception("Error sending endpoints info to debug server:")\n\n if "error" in endpoints:\n log.error("Couldn't set up endpoints; exiting.")\n sys.exit(1)\n\n listener_file = os.getenv("DEBUGPY_ADAPTER_ENDPOINTS")\n if listener_file is not None:\n log.info(\n "Writing endpoints info to {0!r}:\n{1}", listener_file, json.repr(endpoints)\n )\n\n def delete_listener_file():\n log.info("Listener ports closed; deleting {0!r}", listener_file)\n try:\n os.remove(listener_file)\n except Exception:\n log.swallow_exception(\n "Failed to delete {0!r}", listener_file, level="warning"\n )\n\n try:\n with open(listener_file, "w") as f:\n atexit.register(delete_listener_file)\n print(json.dumps(endpoints), file=f)\n except Exception:\n log.reraise_exception("Error writing endpoints info to file:")\n\n if args.port is None:\n clients.Client("stdio")\n\n # These must be registered after the one above, to ensure that the listener sockets\n # are closed before the endpoint info file is deleted - this way, another process\n # can wait for the file to go away as a signal that the ports are no longer in use.\n atexit.register(servers.stop_serving)\n atexit.register(clients.stop_serving)\n\n servers.wait_until_disconnected()\n log.info("All debug servers disconnected; waiting for remaining sessions...")\n\n sessions.wait_until_ended()\n log.info("All debug sessions have ended; exiting.")\n\n\ndef _parse_argv(argv):\n parser = argparse.ArgumentParser()\n\n parser.add_argument(\n "--for-server", type=int, metavar="PORT", help=argparse.SUPPRESS\n )\n\n parser.add_argument(\n "--port",\n type=int,\n default=None,\n metavar="PORT",\n help="start the adapter in debugServer mode on the specified port",\n )\n\n parser.add_argument(\n "--host",\n type=str,\n default="127.0.0.1",\n metavar="HOST",\n help="start the adapter in debugServer mode on the specified host",\n )\n\n parser.add_argument(\n "--access-token", type=str, help="access token expected from the server"\n )\n\n parser.add_argument(\n "--server-access-token", type=str, help="access token expected by the server"\n )\n\n parser.add_argument(\n "--log-dir",\n type=str,\n metavar="DIR",\n help="enable logging and use DIR to save adapter logs",\n )\n\n parser.add_argument(\n "--log-stderr", action="store_true", help="enable logging to stderr"\n )\n\n args = parser.parse_args(argv[1:])\n\n if args.port is None:\n if args.log_stderr:\n parser.error("--log-stderr requires --port")\n if args.for_server is not None:\n parser.error("--for-server requires --port")\n\n return args\n\n\nif __name__ == "__main__":\n # debugpy can also be invoked directly rather than via -m. In this case, the first\n # entry on sys.path is the one added automatically by Python for the directory\n # containing this file. This means that import debugpy will not work, since we need\n # the parent directory of debugpy/ to be in sys.path, rather than debugpy/adapter/.\n #\n # The other issue is that many other absolute imports will break, because they\n # will be resolved relative to debugpy/adapter/ - e.g. `import state` will then try\n # to import debugpy/adapter/state.py.\n #\n # To fix both, we need to replace the automatically added entry such that it points\n # at parent directory of debugpy/ instead of debugpy/adapter, import debugpy with that\n # in sys.path, and then remove the first entry entry altogether, so that it doesn't\n # affect any further imports we might do. For example, suppose the user did:\n #\n # python /foo/bar/debugpy/adapter ...\n #\n # At the beginning of this script, sys.path will contain "/foo/bar/debugpy/adapter"\n # as the first entry. What we want is to replace it with "/foo/bar', then import\n # debugpy with that in effect, and then remove the replaced entry before any more\n # code runs. The imported debugpy module will remain in sys.modules, and thus all\n # future imports of it or its submodules will resolve accordingly.\n if "debugpy" not in sys.modules:\n # Do not use dirname() to walk up - this can be a relative path, e.g. ".".\n if os.name == "nt":\n import pathlib\n\n windows_path = pathlib.Path(sys.path[0])\n sys.path[0] = str(windows_path.parent.parent)\n else:\n sys.path[0] = sys.path[0] + "/../../"\n __import__("debugpy")\n del sys.path[0]\n\n # Apply OS-global and user-specific locale settings.\n try:\n locale.setlocale(locale.LC_ALL, "")\n except Exception:\n # On POSIX, locale is set via environment variables, and this can fail if\n # those variables reference a non-existing locale. Ignore and continue using\n # the default "C" locale if so.\n pass\n\n main()\n
|
.venv\Lib\site-packages\debugpy\adapter\__main__.py
|
__main__.py
|
Python
| 8,686 | 0.95 | 0.187234 | 0.215385 |
node-utils
| 718 |
2024-01-14T18:49:03.212525
|
BSD-3-Clause
| false |
8d894173d2d53ad632a688213f199a22
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\clients.cpython-313.pyc
|
clients.cpython-313.pyc
|
Other
| 31,657 | 0.8 | 0.019293 | 0.003367 |
vue-tools
| 984 |
2024-02-23T09:09:27.489412
|
GPL-3.0
| false |
9a74d968a70dfe2b8dc022c5e83cff2f
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\components.cpython-313.pyc
|
components.cpython-313.pyc
|
Other
| 9,868 | 0.95 | 0.071429 | 0 |
awesome-app
| 958 |
2025-02-12T06:08:45.515856
|
MIT
| false |
beb875f113760ebedca22d5e545768e0
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\launchers.cpython-313.pyc
|
launchers.cpython-313.pyc
|
Other
| 9,072 | 0.95 | 0.04878 | 0 |
awesome-app
| 323 |
2024-03-09T09:20:16.477789
|
Apache-2.0
| false |
89226a5202d3ac427dee663c02b3c702
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\servers.cpython-313.pyc
|
servers.cpython-313.pyc
|
Other
| 25,830 | 0.8 | 0.02952 | 0.011905 |
react-lib
| 295 |
2023-09-11T12:35:26.934105
|
BSD-3-Clause
| false |
3b29ec658b132403c664f0385d80f713
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\sessions.cpython-313.pyc
|
sessions.cpython-313.pyc
|
Other
| 14,074 | 0.8 | 0.106557 | 0.009174 |
awesome-app
| 512 |
2024-12-24T23:13:46.464322
|
BSD-3-Clause
| false |
aa7365aab9334922a4d63c5035821033
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\__init__.cpython-313.pyc
|
__init__.cpython-313.pyc
|
Other
| 413 | 0.8 | 0 | 0 |
awesome-app
| 501 |
2025-05-12T02:46:06.140816
|
GPL-3.0
| false |
89bed01304219b8a93a1e5ce3a1c0235
|
\n\n
|
.venv\Lib\site-packages\debugpy\adapter\__pycache__\__main__.cpython-313.pyc
|
__main__.cpython-313.pyc
|
Other
| 8,892 | 0.95 | 0.061728 | 0 |
vue-tools
| 656 |
2024-09-22T02:14:33.116865
|
Apache-2.0
| false |
c2dfed28feb7a06b4e8da70444309f8e
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport atexit\nimport contextlib\nimport functools\nimport inspect\nimport io\nimport os\nimport platform\nimport sys\nimport threading\nimport traceback\n\nimport debugpy\nfrom debugpy.common import json, timestamp, util\n\n\nLEVELS = ("debug", "info", "warning", "error")\n"""Logging levels, lowest to highest importance.\n"""\n\nlog_dir = os.getenv("DEBUGPY_LOG_DIR")\n"""If not None, debugger logs its activity to a file named debugpy.*-<pid>.log\nin the specified directory, where <pid> is the return value of os.getpid().\n"""\n\ntimestamp_format = "09.3f"\n"""Format spec used for timestamps. Can be changed to dial precision up or down.\n"""\n\n_lock = threading.RLock()\n_tls = threading.local()\n_files = {} # filename -> LogFile\n_levels = set() # combined for all log files\n\n\ndef _update_levels():\n global _levels\n _levels = frozenset(level for file in _files.values() for level in file.levels)\n\n\nclass LogFile(object):\n def __init__(self, filename, file, levels=LEVELS, close_file=True):\n info("Also logging to {0}.", json.repr(filename))\n self.filename = filename\n self.file = file\n self.close_file = close_file\n self._levels = frozenset(levels)\n\n with _lock:\n _files[self.filename] = self\n _update_levels()\n info(\n "{0} {1}\n{2} {3} ({4}-bit)\ndebugpy {5}",\n platform.platform(),\n platform.machine(),\n platform.python_implementation(),\n platform.python_version(),\n 64 if sys.maxsize > 2**32 else 32,\n debugpy.__version__,\n _to_files=[self],\n )\n\n @property\n def levels(self):\n return self._levels\n\n @levels.setter\n def levels(self, value):\n with _lock:\n self._levels = frozenset(LEVELS if value is all else value)\n _update_levels()\n\n def write(self, level, output):\n if level in self.levels:\n try:\n self.file.write(output)\n self.file.flush()\n except Exception: # pragma: no cover\n pass\n\n def close(self):\n with _lock:\n del _files[self.filename]\n _update_levels()\n info("Not logging to {0} anymore.", json.repr(self.filename))\n\n if self.close_file:\n try:\n self.file.close()\n except Exception: # pragma: no cover\n pass\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.close()\n\n\nclass NoLog(object):\n file = filename = None\n\n __bool__ = __nonzero__ = lambda self: False\n\n def close(self):\n pass\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n pass\n\n\n# Used to inject a newline into stderr if logging there, to clean up the output\n# when it's intermixed with regular prints from other sources.\ndef newline(level="info"):\n with _lock:\n stderr.write(level, "\n")\n\n\ndef write(level, text, _to_files=all):\n assert level in LEVELS\n\n t = timestamp.current()\n format_string = "{0}+{1:" + timestamp_format + "}: "\n prefix = format_string.format(level[0].upper(), t)\n\n text = getattr(_tls, "prefix", "") + text\n indent = "\n" + (" " * len(prefix))\n output = indent.join(text.split("\n"))\n output = prefix + output + "\n\n"\n\n with _lock:\n if _to_files is all:\n _to_files = _files.values()\n for file in _to_files:\n file.write(level, output)\n\n return text\n\n\ndef write_format(level, format_string, *args, **kwargs):\n # Don't spend cycles doing expensive formatting if we don't have to. Errors are\n # always formatted, so that error() can return the text even if it's not logged.\n if level != "error" and level not in _levels:\n return\n\n try:\n text = format_string.format(*args, **kwargs)\n except Exception: # pragma: no cover\n reraise_exception()\n\n return write(level, text, kwargs.pop("_to_files", all))\n\n\ndebug = functools.partial(write_format, "debug")\ninfo = functools.partial(write_format, "info")\nwarning = functools.partial(write_format, "warning")\n\n\ndef error(*args, **kwargs):\n """Logs an error.\n\n Returns the output wrapped in AssertionError. Thus, the following::\n\n raise log.error(s, ...)\n\n has the same effect as::\n\n log.error(...)\n assert False, (s.format(...))\n """\n return AssertionError(write_format("error", *args, **kwargs))\n\n\ndef _exception(format_string="", *args, **kwargs):\n level = kwargs.pop("level", "error")\n exc_info = kwargs.pop("exc_info", sys.exc_info())\n\n if format_string:\n format_string += "\n\n"\n format_string += "{exception}\nStack where logged:\n{stack}"\n\n exception = "".join(traceback.format_exception(*exc_info))\n\n f = inspect.currentframe()\n f = f.f_back if f else f # don't log this frame\n try:\n stack = "".join(traceback.format_stack(f))\n finally:\n del f # avoid cycles\n\n write_format(\n level, format_string, *args, exception=exception, stack=stack, **kwargs\n )\n\n\ndef swallow_exception(format_string="", *args, **kwargs):\n """Logs an exception with full traceback.\n\n If format_string is specified, it is formatted with format(*args, **kwargs), and\n prepended to the exception traceback on a separate line.\n\n If exc_info is specified, the exception it describes will be logged. Otherwise,\n sys.exc_info() - i.e. the exception being handled currently - will be logged.\n\n If level is specified, the exception will be logged as a message of that level.\n The default is "error".\n """\n\n _exception(format_string, *args, **kwargs)\n\n\ndef reraise_exception(format_string="", *args, **kwargs):\n """Like swallow_exception(), but re-raises the current exception after logging it."""\n\n assert "exc_info" not in kwargs\n _exception(format_string, *args, **kwargs)\n raise\n\n\ndef to_file(filename=None, prefix=None, levels=LEVELS):\n """Starts logging all messages at the specified levels to the designated file.\n\n Either filename or prefix must be specified, but not both.\n\n If filename is specified, it designates the log file directly.\n\n If prefix is specified, the log file is automatically created in options.log_dir,\n with filename computed as prefix + os.getpid(). If log_dir is None, no log file\n is created, and the function returns immediately.\n\n If the file with the specified or computed name is already being used as a log\n file, it is not overwritten, but its levels are updated as specified.\n\n The function returns an object with a close() method. When the object is closed,\n logs are not written into that file anymore. Alternatively, the returned object\n can be used in a with-statement:\n\n with log.to_file("some.log"):\n # now also logging to some.log\n # not logging to some.log anymore\n """\n\n assert (filename is not None) ^ (prefix is not None)\n\n if filename is None:\n if log_dir is None:\n return NoLog()\n try:\n os.makedirs(log_dir)\n except OSError: # pragma: no cover\n pass\n filename = f"{log_dir}/{prefix}-{os.getpid()}.log"\n\n file = _files.get(filename)\n if file is None:\n file = LogFile(filename, io.open(filename, "w", encoding="utf-8"), levels)\n else:\n file.levels = levels\n return file\n\n\n@contextlib.contextmanager\ndef prefixed(format_string, *args, **kwargs):\n """Adds a prefix to all messages logged from the current thread for the duration\n of the context manager.\n """\n prefix = format_string.format(*args, **kwargs)\n old_prefix = getattr(_tls, "prefix", "")\n _tls.prefix = prefix + old_prefix\n try:\n yield\n finally:\n _tls.prefix = old_prefix\n\n\ndef get_environment_description(header):\n import sysconfig\n import site # noqa\n\n result = [header, "\n\n"]\n\n def report(s, *args, **kwargs):\n result.append(s.format(*args, **kwargs))\n\n def report_paths(get_paths, label=None):\n prefix = f" {label or get_paths}: "\n\n expr = None\n if not callable(get_paths):\n expr = get_paths\n get_paths = lambda: util.evaluate(expr)\n try:\n paths = get_paths()\n except AttributeError:\n report("{0}<missing>\n", prefix)\n return\n except Exception: # pragma: no cover\n swallow_exception(\n "Error evaluating {0}",\n repr(expr) if expr else util.srcnameof(get_paths),\n level="info",\n )\n return\n\n if not isinstance(paths, (list, tuple)):\n paths = [paths]\n\n for p in sorted(paths):\n report("{0}{1}", prefix, p)\n if p is not None:\n rp = os.path.realpath(p)\n if p != rp:\n report("({0})", rp)\n report("\n")\n\n prefix = " " * len(prefix)\n\n report("System paths:\n")\n report_paths("sys.executable")\n report_paths("sys.prefix")\n report_paths("sys.base_prefix")\n report_paths("sys.real_prefix")\n report_paths("site.getsitepackages()")\n report_paths("site.getusersitepackages()")\n\n site_packages = [\n p\n for p in sys.path\n if os.path.exists(p) and os.path.basename(p) == "site-packages"\n ]\n report_paths(lambda: site_packages, "sys.path (site-packages)")\n\n for name in sysconfig.get_path_names():\n expr = "sysconfig.get_path({0!r})".format(name)\n report_paths(expr)\n\n report_paths("os.__file__")\n report_paths("threading.__file__")\n report_paths("debugpy.__file__")\n report("\n")\n\n importlib_metadata = None\n try:\n import importlib_metadata\n except ImportError: # pragma: no cover\n try:\n from importlib import metadata as importlib_metadata\n except ImportError:\n pass\n if importlib_metadata is None: # pragma: no cover\n report("Cannot enumerate installed packages - missing importlib_metadata.")\n else:\n report("Installed packages:\n")\n try:\n for pkg in importlib_metadata.distributions():\n report(" {0}=={1}\n", pkg.name, pkg.version)\n except Exception: # pragma: no cover\n swallow_exception(\n "Error while enumerating installed packages.", level="info"\n )\n\n return "".join(result).rstrip("\n")\n\n\ndef describe_environment(header):\n info("{0}", get_environment_description(header))\n\n\nstderr = LogFile(\n "<stderr>",\n sys.stderr,\n levels=os.getenv("DEBUGPY_LOG_STDERR", "warning error").split(),\n close_file=False,\n)\n\n\n@atexit.register\ndef _close_files():\n for file in tuple(_files.values()):\n file.close()\n\n\n# The following are helper shortcuts for printf debugging. They must never be used\n# in production code.\n\n\ndef _repr(value): # pragma: no cover\n warning("$REPR {0!r}", value)\n\n\ndef _vars(*names): # pragma: no cover\n locals = inspect.currentframe().f_back.f_locals\n if names:\n locals = {name: locals[name] for name in names if name in locals}\n warning("$VARS {0!r}", locals)\n\n\ndef _stack(): # pragma: no cover\n stack = "\n".join(traceback.format_stack())\n warning("$STACK:\n\n{0}", stack)\n\n\ndef _threads(): # pragma: no cover\n output = "\n".join([str(t) for t in threading.enumerate()])\n warning("$THREADS:\n\n{0}", output)\n
|
.venv\Lib\site-packages\debugpy\common\log.py
|
log.py
|
Python
| 12,117 | 0.95 | 0.199513 | 0.035831 |
python-kit
| 810 |
2023-11-07T06:13:31.531344
|
GPL-3.0
| false |
6ac87d6451339ae09c7f287a39a1a1d4
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport functools\nimport threading\n\n\nclass Singleton(object):\n """A base class for a class of a singleton object.\n\n For any derived class T, the first invocation of T() will create the instance,\n and any future invocations of T() will return that instance.\n\n Concurrent invocations of T() from different threads are safe.\n """\n\n # A dual-lock scheme is necessary to be thread safe while avoiding deadlocks.\n # _lock_lock is shared by all singleton types, and is used to construct their\n # respective _lock instances when invoked for a new type. Then _lock is used\n # to synchronize all further access for that type, including __init__. This way,\n # __init__ for any given singleton can access another singleton, and not get\n # deadlocked if that other singleton is trying to access it.\n _lock_lock = threading.RLock()\n _lock = None\n\n # Specific subclasses will get their own _instance set in __new__.\n _instance = None\n\n _is_shared = None # True if shared, False if exclusive\n\n def __new__(cls, *args, **kwargs):\n # Allow arbitrary args and kwargs if shared=False, because that is guaranteed\n # to construct a new singleton if it succeeds. Otherwise, this call might end\n # up returning an existing instance, which might have been constructed with\n # different arguments, so allowing them is misleading.\n assert not kwargs.get("shared", False) or (len(args) + len(kwargs)) == 0, (\n "Cannot use constructor arguments when accessing a Singleton without "\n "specifying shared=False."\n )\n\n # Avoid locking as much as possible with repeated double-checks - the most\n # common path is when everything is already allocated.\n if not cls._instance:\n # If there's no per-type lock, allocate it.\n if cls._lock is None:\n with cls._lock_lock:\n if cls._lock is None:\n cls._lock = threading.RLock()\n\n # Now that we have a per-type lock, we can synchronize construction.\n if not cls._instance:\n with cls._lock:\n if not cls._instance:\n cls._instance = object.__new__(cls)\n # To prevent having __init__ invoked multiple times, call\n # it here directly, and then replace it with a stub that\n # does nothing - that stub will get auto-invoked on return,\n # and on all future singleton accesses.\n cls._instance.__init__()\n cls.__init__ = lambda *args, **kwargs: None\n\n return cls._instance\n\n def __init__(self, *args, **kwargs):\n """Initializes the singleton instance. Guaranteed to only be invoked once for\n any given type derived from Singleton.\n\n If shared=False, the caller is requesting a singleton instance for their own\n exclusive use. This is only allowed if the singleton has not been created yet;\n if so, it is created and marked as being in exclusive use. While it is marked\n as such, all attempts to obtain an existing instance of it immediately raise\n an exception. The singleton can eventually be promoted to shared use by calling\n share() on it.\n """\n\n shared = kwargs.pop("shared", True)\n with self:\n if shared:\n assert (\n type(self)._is_shared is not False\n ), "Cannot access a non-shared Singleton."\n type(self)._is_shared = True\n else:\n assert type(self)._is_shared is None, "Singleton is already created."\n\n def __enter__(self):\n """Lock this singleton to prevent concurrent access."""\n type(self)._lock.acquire()\n return self\n\n def __exit__(self, exc_type, exc_value, exc_tb):\n """Unlock this singleton to allow concurrent access."""\n type(self)._lock.release()\n\n def share(self):\n """Share this singleton, if it was originally created with shared=False."""\n type(self)._is_shared = True\n\n\nclass ThreadSafeSingleton(Singleton):\n """A singleton that incorporates a lock for thread-safe access to its members.\n\n The lock can be acquired using the context manager protocol, and thus idiomatic\n use is in conjunction with a with-statement. For example, given derived class T::\n\n with T() as t:\n t.x = t.frob(t.y)\n\n All access to the singleton from the outside should follow this pattern for both\n attributes and method calls. Singleton members can assume that self is locked by\n the caller while they're executing, but recursive locking of the same singleton\n on the same thread is also permitted.\n """\n\n threadsafe_attrs = frozenset()\n """Names of attributes that are guaranteed to be used in a thread-safe manner.\n\n This is typically used in conjunction with share() to simplify synchronization.\n """\n\n readonly_attrs = frozenset()\n """Names of attributes that are readonly. These can be read without locking, but\n cannot be written at all.\n\n Every derived class gets its own separate set. Thus, for any given singleton type\n T, an attribute can be made readonly after setting it, with T.readonly_attrs.add().\n """\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n # Make sure each derived class gets a separate copy.\n type(self).readonly_attrs = set(type(self).readonly_attrs)\n\n # Prevent callers from reading or writing attributes without locking, except for\n # reading attributes listed in threadsafe_attrs, and methods specifically marked\n # with @threadsafe_method. Such methods should perform the necessary locking to\n # ensure thread safety for the callers.\n\n @staticmethod\n def assert_locked(self):\n lock = type(self)._lock\n assert lock.acquire(blocking=False), (\n "ThreadSafeSingleton accessed without locking. Either use with-statement, "\n "or if it is a method or property, mark it as @threadsafe_method or with "\n "@autolocked_method, as appropriate."\n )\n lock.release()\n\n def __getattribute__(self, name):\n value = object.__getattribute__(self, name)\n if name not in (type(self).threadsafe_attrs | type(self).readonly_attrs):\n if not getattr(value, "is_threadsafe_method", False):\n ThreadSafeSingleton.assert_locked(self)\n return value\n\n def __setattr__(self, name, value):\n assert name not in type(self).readonly_attrs, "This attribute is read-only."\n if name not in type(self).threadsafe_attrs:\n ThreadSafeSingleton.assert_locked(self)\n return object.__setattr__(self, name, value)\n\n\ndef threadsafe_method(func):\n """Marks a method of a ThreadSafeSingleton-derived class as inherently thread-safe.\n\n A method so marked must either not use any singleton state, or lock it appropriately.\n """\n\n func.is_threadsafe_method = True\n return func\n\n\ndef autolocked_method(func):\n """Automatically synchronizes all calls of a method of a ThreadSafeSingleton-derived\n class by locking the singleton for the duration of each call.\n """\n\n @functools.wraps(func)\n @threadsafe_method\n def lock_and_call(self, *args, **kwargs):\n with self:\n return func(self, *args, **kwargs)\n\n return lock_and_call\n
|
.venv\Lib\site-packages\debugpy\common\singleton.py
|
singleton.py
|
Python
| 7,851 | 0.95 | 0.297297 | 0.186207 |
react-lib
| 293 |
2025-03-12T14:48:18.956022
|
MIT
| false |
79539bf5c549e813e9d73ef6a95f70b3
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nimport socket\nimport sys\nimport threading\n\nfrom debugpy.common import log\nfrom debugpy.common.util import hide_thread_from_debugger\n\n\ndef create_server(host, port=0, backlog=socket.SOMAXCONN, timeout=None):\n """Return a local server socket listening on the given port."""\n\n assert backlog > 0\n if host is None:\n host = "127.0.0.1"\n if port is None:\n port = 0\n\n try:\n server = _new_sock()\n if port != 0:\n # If binding to a specific port, make sure that the user doesn't have\n # to wait until the OS times out the socket to be able to use that port\n # again.if the server or the adapter crash or are force-killed.\n if sys.platform == "win32":\n server.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)\n else:\n try:\n server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n except (AttributeError, OSError): # pragma: no cover\n pass # Not available everywhere\n server.bind((host, port))\n if timeout is not None:\n server.settimeout(timeout)\n server.listen(backlog)\n except Exception: # pragma: no cover\n server.close()\n raise\n return server\n\n\ndef create_client():\n """Return a client socket that may be connected to a remote address."""\n return _new_sock()\n\n\ndef _new_sock():\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)\n\n # Set TCP keepalive on an open socket.\n # It activates after 1 second (TCP_KEEPIDLE,) of idleness,\n # then sends a keepalive ping once every 3 seconds (TCP_KEEPINTVL),\n # and closes the connection after 5 failed ping (TCP_KEEPCNT), or 15 seconds\n try:\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)\n except (AttributeError, OSError): # pragma: no cover\n pass # May not be available everywhere.\n try:\n sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1)\n except (AttributeError, OSError): # pragma: no cover\n pass # May not be available everywhere.\n try:\n sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)\n except (AttributeError, OSError): # pragma: no cover\n pass # May not be available everywhere.\n try:\n sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)\n except (AttributeError, OSError): # pragma: no cover\n pass # May not be available everywhere.\n return sock\n\n\ndef shut_down(sock, how=socket.SHUT_RDWR):\n """Shut down the given socket."""\n sock.shutdown(how)\n\n\ndef close_socket(sock):\n """Shutdown and close the socket."""\n try:\n shut_down(sock)\n except Exception: # pragma: no cover\n pass\n sock.close()\n\n\ndef serve(name, handler, host, port=0, backlog=socket.SOMAXCONN, timeout=None):\n """Accepts TCP connections on the specified host and port, and invokes the\n provided handler function for every new connection.\n\n Returns the created server socket.\n """\n\n assert backlog > 0\n\n try:\n listener = create_server(host, port, backlog, timeout)\n except Exception: # pragma: no cover\n log.reraise_exception(\n "Error listening for incoming {0} connections on {1}:{2}:", name, host, port\n )\n host, port = listener.getsockname()\n log.info("Listening for incoming {0} connections on {1}:{2}...", name, host, port)\n\n def accept_worker():\n while True:\n try:\n sock, (other_host, other_port) = listener.accept()\n except (OSError, socket.error):\n # Listener socket has been closed.\n break\n\n log.info(\n "Accepted incoming {0} connection from {1}:{2}.",\n name,\n other_host,\n other_port,\n )\n handler(sock)\n\n thread = threading.Thread(target=accept_worker)\n thread.daemon = True\n hide_thread_from_debugger(thread)\n thread.start()\n\n return listener\n
|
.venv\Lib\site-packages\debugpy\common\sockets.py
|
sockets.py
|
Python
| 4,353 | 0.95 | 0.217054 | 0.104762 |
python-kit
| 947 |
2025-06-23T02:17:34.661536
|
BSD-3-Clause
| false |
a90cc6410c4f6f8d4aab0fdc31068f1b
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\n"""Provides facilities to dump all stacks of all threads in the process.\n"""\n\nimport os\nimport sys\nimport time\nimport threading\nimport traceback\n\nfrom debugpy.common import log\n\n\ndef dump():\n """Dump stacks of all threads in this process, except for the current thread."""\n\n tid = threading.current_thread().ident\n pid = os.getpid()\n\n log.info("Dumping stacks for process {0}...", pid)\n\n for t_ident, frame in sys._current_frames().items():\n if t_ident == tid:\n continue\n\n for t in threading.enumerate():\n if t.ident == tid:\n t_name = t.name\n t_daemon = t.daemon\n break\n else:\n t_name = t_daemon = "<unknown>"\n\n stack = "".join(traceback.format_stack(frame))\n log.info(\n "Stack of thread {0} (tid={1}, pid={2}, daemon={3}):\n\n{4}",\n t_name,\n t_ident,\n pid,\n t_daemon,\n stack,\n )\n\n log.info("Finished dumping stacks for process {0}.", pid)\n\n\ndef dump_after(secs):\n """Invokes dump() on a background thread after waiting for the specified time."""\n\n def dumper():\n time.sleep(secs)\n try:\n dump()\n except:\n log.swallow_exception()\n\n thread = threading.Thread(target=dumper)\n thread.daemon = True\n thread.start()\n
|
.venv\Lib\site-packages\debugpy\common\stacks.py
|
stacks.py
|
Python
| 1,588 | 0.95 | 0.209677 | 0.06383 |
node-utils
| 204 |
2023-09-03T02:59:24.598954
|
MIT
| false |
049a8771305d92fc1e19147a7b67f4bd
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\n"""Provides monotonic timestamps with a resetable zero.\n"""\n\nimport time\n\n__all__ = ["current", "reset"]\n\n\ndef current():\n return time.monotonic() - timestamp_zero\n\n\ndef reset():\n global timestamp_zero\n timestamp_zero = time.monotonic()\n\n\nreset()\n
|
.venv\Lib\site-packages\debugpy\common\timestamp.py
|
timestamp.py
|
Python
| 432 | 0.95 | 0.136364 | 0.230769 |
vue-tools
| 972 |
2025-05-05T05:17:30.605156
|
Apache-2.0
| false |
ee99995c7baf053d5a24478fe9d0e93d
|
# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import annotations\nimport os\nimport typing\n\nif typing.TYPE_CHECKING:\n __all__: list[str]\n\n__all__ = []\n\n# The lower time bound for assuming that the process hasn't spawned successfully.\nPROCESS_SPAWN_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_SPAWN_TIMEOUT", 60)) or None\n\n# The lower time bound for assuming that the process hasn't exited gracefully.\nPROCESS_EXIT_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_EXIT_TIMEOUT", 30)) or None\n
|
.venv\Lib\site-packages\debugpy\common\__init__.py
|
__init__.py
|
Python
| 627 | 0.95 | 0.222222 | 0.384615 |
vue-tools
| 167 |
2024-12-30T10:48:52.495804
|
GPL-3.0
| false |
4ac9aed02b5c94d7217bc03c2091b2fd
|
\n\n
|
.venv\Lib\site-packages\debugpy\common\__pycache__\json.cpython-313.pyc
|
json.cpython-313.pyc
|
Other
| 12,696 | 0.95 | 0.123288 | 0.008 |
awesome-app
| 488 |
2025-06-24T01:35:11.366103
|
MIT
| false |
9f908cec111f570041d7d01a9f31f56c
|
\n\n
|
.venv\Lib\site-packages\debugpy\common\__pycache__\log.cpython-313.pyc
|
log.cpython-313.pyc
|
Other
| 17,829 | 0.95 | 0.017391 | 0.038835 |
react-lib
| 314 |
2024-08-29T00:03:45.471823
|
BSD-3-Clause
| false |
b91bdb564827bc2650139b05198ffa75
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.