id
stringlengths 4
10
| text
stringlengths 4
2.14M
| source
stringclasses 2
values | created
timestamp[s]date 2001-05-16 21:05:09
2025-01-01 03:38:30
| added
stringdate 2025-04-01 04:05:38
2025-04-01 07:14:06
| metadata
dict |
---|---|---|---|---|---|
1834374814
|
HLS with fLaC and Opus fail
Have you read the FAQ and checked for duplicate open issues?
Yes
What version of Shaka Player are you using?
4.3.8
Can you reproduce the issue with our latest release version?
Yes
Can you reproduce the issue with the latest code from main?
Yes
Are you using the demo app or your own custom app?
Demo
What browser and OS are you using?
macOS: Firefox, Chrome, Safari
What are the manifest and license server URIs?
Can't find a public stream
What configuration are you using? What is the output of player.getConfiguration()?
Not important, just the default demo settings:
{
"drm": {
"retryParameters": {
"maxAttempts": 2,
"baseDelay": 1000,
"backoffFactor": 2,
"fuzzFactor": 0.5,
"timeout": 30000,
"stallTimeout": 5000,
"connectionTimeout": 10000
},
"servers": {},
"clearKeys": {},
"advanced": {},
"delayLicenseRequestUntilPlayed": false,
"persistentSessionOnlinePlayback": false,
"persistentSessionsMetadata": [],
"logLicenseExchange": false,
"updateExpirationTime": 1,
"preferredKeySystems": [],
"keySystemsMapping": {},
"parseInbandPsshEnabled": false,
"minHdcpVersion": ""
},
"manifest": {
"retryParameters": {
"maxAttempts": 2,
"baseDelay": 1000,
"backoffFactor": 2,
"fuzzFactor": 0.5,
"timeout": 30000,
"stallTimeout": 5000,
"connectionTimeout": 10000
},
"availabilityWindowOverride": null,
"disableAudio": false,
"disableVideo": false,
"disableText": false,
"disableThumbnails": false,
"defaultPresentationDelay": 0,
"segmentRelativeVttTiming": false,
"raiseFatalErrorOnManifestUpdateRequestFailure": false,
"dash": {
"clockSyncUri": "",
"ignoreDrmInfo": false,
"disableXlinkProcessing": false,
"xlinkFailGracefully": false,
"ignoreMinBufferTime": false,
"autoCorrectDrift": true,
"initialSegmentLimit": 1000,
"ignoreSuggestedPresentationDelay": false,
"ignoreEmptyAdaptationSet": false,
"ignoreMaxSegmentDuration": false,
"keySystemsByURI": {
"urn:uuid:1077efec-c0b2-4d02-ace3-3c1e52e2fb4b": "org.w3.clearkey",
"urn:uuid:e2719d58-a985-b3c9-781a-b030af78d30e": "org.w3.clearkey",
"urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed": "com.widevine.alpha",
"urn:uuid:9a04f079-9840-4286-ab92-e65be0885f95": "com.microsoft.playready",
"urn:uuid:79f0049a-4098-8642-ab92-e65be0885f95": "com.microsoft.playready",
"urn:uuid:f239e769-efa3-4850-9c16-a903c6932efb": "com.adobe.primetime"
},
"sequenceMode": false
},
"hls": {
"ignoreTextStreamFailures": false,
"ignoreImageStreamFailures": false,
"defaultAudioCodec": "mp4a.40.2",
"defaultVideoCodec": "avc1.42E01E",
"ignoreManifestProgramDateTime": false,
"mediaPlaylistFullMimeType": "video/mp2t; codecs=\"avc1.42E01E,mp4a.40.2\"",
"useSafariBehaviorForLive": true,
"liveSegmentsDelay": 3,
"sequenceMode": true,
"ignoreManifestTimestampsInSegmentsMode": false
},
"mss": {
"sequenceMode": false,
"keySystemsBySystemId": {
"9a04f079-9840-4286-ab92-e65be0885f95": "com.microsoft.playready",
"79f0049a-4098-8642-ab92-e65be0885f95": "com.microsoft.playready"
}
}
},
"streaming": {
"retryParameters": {
"maxAttempts": 2,
"baseDelay": 1000,
"backoffFactor": 2,
"fuzzFactor": 0.5,
"timeout": 30000,
"stallTimeout": 5000,
"connectionTimeout": 10000
},
"rebufferingGoal": 2,
"bufferingGoal": 10,
"bufferBehind": 30,
"ignoreTextStreamFailures": false,
"alwaysStreamText": false,
"startAtSegmentBoundary": false,
"gapDetectionThreshold": 0.5,
"durationBackoff": 1,
"safeSeekOffset": 5,
"stallEnabled": true,
"stallThreshold": 1,
"stallSkip": 0.1,
"useNativeHlsOnSafari": true,
"inaccurateManifestTolerance": 2,
"lowLatencyMode": false,
"autoLowLatencyMode": false,
"forceHTTPS": false,
"preferNativeHls": false,
"updateIntervalSeconds": 1,
"dispatchAllEmsgBoxes": false,
"observeQualityChanges": false,
"maxDisabledTime": 30,
"parsePrftBox": false,
"segmentPrefetchLimit": 0,
"liveSync": false,
"liveSyncMaxLatency": 1,
"liveSyncPlaybackRate": 1.1
},
"mediaSource": {
"sourceBufferExtraFeatures": "",
"forceTransmux": false
},
"offline": {
"usePersistentLicense": true,
"numberOfParallelDownloads": 5
},
"abr": {
"enabled": true,
"useNetworkInformation": true,
"defaultBandwidthEstimate": 1000000,
"switchInterval": 8,
"bandwidthUpgradeTarget": 0.85,
"bandwidthDowngradeTarget": 0.95,
"restrictions": {
"minWidth": 0,
"maxWidth": null,
"minHeight": 0,
"maxHeight": null,
"minPixels": 0,
"maxPixels": null,
"minFrameRate": 0,
"maxFrameRate": null,
"minBandwidth": 0,
"maxBandwidth": null
},
"advanced": {
"minTotalBytes": 128000,
"minBytes": 16000,
"fastHalfLife": 2,
"slowHalfLife": 5
},
"restrictToElementSize": false,
"restrictToScreenSize": false,
"ignoreDevicePixelRatio": false
},
"autoShowText": 3,
"preferredAudioLanguage": "de",
"preferredAudioLabel": "",
"preferredTextLanguage": "de",
"preferredVariantRole": "",
"preferredTextRole": "",
"preferredAudioChannelCount": 2,
"preferredVideoHdrLevel": "AUTO",
"preferredVideoCodecs": [],
"preferredAudioCodecs": [],
"preferForcedSubs": false,
"preferredDecodingAttributes": [],
"restrictions": {
"minWidth": 0,
"maxWidth": null,
"minHeight": 0,
"maxHeight": null,
"minPixels": 0,
"maxPixels": null,
"minFrameRate": 0,
"maxFrameRate": null,
"minBandwidth": 0,
"maxBandwidth": null
},
"playRangeStart": 0,
"playRangeEnd": null,
"cmcd": {
"enabled": false,
"sessionId": "",
"contentId": "",
"useHeaders": false
},
"lcevc": {
"enabled": false,
"dynamicPerformanceScaling": true,
"logLevel": 0,
"drawLogo": false
},
"ads": {}
}
What did you do?
I tried to play a HLS stream with the following master playlist:
#EXTM3U
#EXT-X-STREAM-INF:BANDWIDTH=1096908,CODECS="fLaC"
main.m3u8?...
If I change fLaC to flac, it plays fine. The same issue exists for Opus vs. opus.
What did you expect to happen?
The stream should play, as the codec string conforms to the specification:
https://datatracker.ietf.org/doc/html/rfc8216#section-4.3.4.2
https://datatracker.ietf.org/doc/html/rfc6381#section-3.3
https://mp4ra.org/#/codecs
What actually happened?
The player throws the error:
{
"severity": 2,
"category": 4,
"code": 4032,
"data": [],
"handled": false,
"message": "Shaka Error MANIFEST.CONTENT_UNSUPPORTED_BY_BROWSER ()",
"stack": "@https://shaka-player-demo.appspot.com/lib/util/error.js:94:17\ncheckPlayableVariants_@https://shaka-player-demo.appspot.com/lib/player.js:6495:13\nfilterManifestWithStreamUtils_@https://shaka-player-demo.appspot.com/lib/player.js:5432:10\n"
}
I will fix the issue in an upcoming PR
|
gharchive/issue
| 2023-08-03T06:32:28 |
2025-04-01T06:40:23.049805
|
{
"authors": [
"lonebyte"
],
"repo": "shaka-project/shaka-player",
"url": "https://github.com/shaka-project/shaka-player/issues/5453",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
1996576466
|
SegmentTemplate@media not updated after change in DASH manifest
Have you read the FAQ and checked for duplicate open issues?
yes
If the problem is related to FairPlay, have you read the tutorial?
not related
What version of Shaka Player are you using?
v4.5.0 (uncompiled)
Can you reproduce the issue with our latest release version?
yes
Can you reproduce the issue with the latest code from main?
did not try
Are you using the demo app or your own custom app?
demo app
What browser and OS are you using?
Google Chrome 119 @ Fedora Linux 38
What are the manifest and license server URIs?
sent via e-mail to shaka-player-maintainers@googlegroups.com
What configuration are you using? What is the output of player.getConfiguration()?
default demo app configuration
What did you do?
played the DASH stream
What did you expect to happen?
I expect the player to amend segments URI after SegmentTeplate@media has been changed in DASH manifest during playback.
What actually happened?
The player still requests segments with the URI computed during the initial DASH manifest load. Any changes in SegmentTeplate@media during playback are dropped.
Shaka Player v3.2.1, which is used in my production environment, updated the URI of the segment after the SegmentTeplate@media was changed during playback.
I just checked and the problem still exists in version v4.6.0.
|
gharchive/issue
| 2023-11-16T10:46:56 |
2025-04-01T06:40:23.057138
|
{
"authors": [
"pszemus"
],
"repo": "shaka-project/shaka-player",
"url": "https://github.com/shaka-project/shaka-player/issues/5898",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
2431694953
|
Bad subtitle rendering HTML Character Entities how to fix
Have you read the Tutorials?
Yes.
Have you read the FAQ and checked for duplicate open issues?
Yes.
If the question is related to FairPlay, have you read the tutorial?
Not related.
What version of Shaka Player are you using?
4.10.2
What browser and OS are you using?
Latest Chrome. Not reproducible on Safari due to the rendering of Closed Captions being handled by the device directly.
Please ask your question
i am using XML form and embedded in MP4 and not showing correctly
document.addEventListener("DOMContentLoaded", function () {
shaka.polyfill.installAll();
const videoElement = document.getElementById("video");
const videoContainer = document.getElementById("video-container");
const player = new shaka.Player(videoElement);
window.player = player;
const thumbnailContainer = document.getElementById(
"thumbnail-container"
);
const thumbnail = document.getElementById("thumbnail");
// UI setup
const ui = new shaka.ui.Overlay(player, videoContainer, videoElement);
const controls = ui.getControls();
const defaultConfig = {
controlPanelElements: [
"backward",
"play_pause",
"forward",
"time_and_duration",
"spacer",
"mute",
"volume",
// "language",
// "text_settings",
"overflow_menu", // 3 dots inside quality speed and pip
//"playback_rate",
// "cast",
//"lock",
"picture_in_picture",
//"quality",
"fullscreen",
],
overflowMenuButtons: [
"quality",
"captions",
"language",
"playback_rate",
],
seekBarColors: {
base: "rgba(99, 99, 99, 1)",
buffered: "rgba(255, 147, 58, 1)",
played: "rgba(255, 147, 58, 1)",
},
enableTooltips: true,
// useNativeHlsOnSafari: true,
// textTrackVisibility: true,
// playbackRates: [0.5, 0.75, 1, 1.25, 1.5, 1.75, 2],
// fastForwardRates: [2, 4, 8, 1],
// rewindRates: [-1, -2, -4, -8],
// customContextMenu: true,
// contextMenuElements: ["statistics"],
// statisticsList: ["width", "height", "playTime", "bufferingTime"],
};
ui.configure(defaultConfig);
ui.getControls();
async function loadVideo() {
try {
const url = "myVideo.mpd";
await player.load(url);
await player.setTextTrackVisibility(true);
} catch (error) {
console.error("Error loading manifest:", error);
}
}
loadVideo();
});
how to fix
|
gharchive/issue
| 2024-07-26T08:21:05 |
2025-04-01T06:40:23.062196
|
{
"authors": [
"fakeDev9"
],
"repo": "shaka-project/shaka-player",
"url": "https://github.com/shaka-project/shaka-player/issues/7110",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
153291235
|
Upgrading from 3.0 to 5.0
Wanted to know the best steps to upgrade from 3.0 to 5.0.
I didn't see any guides for 3,4 or 5 in the changelog.md (like you had for 1->2 or 2->3)
I've already done the gem update but wasn't clear on changes to get the latest react / react router or any other changes in /client
Thanks
You can find out a lot by looking at the history and latest state of the https://github.com/shakacode/react-webpack-rails-tutorial example app. E.g. if you look at the git history of when the gem/npm module was bumped you can see what changed at the same time.
@murtali that means you didn't have to do anything. If you want to use the new features, that's up to you. There might be some deprecations.
|
gharchive/issue
| 2016-05-05T18:18:46 |
2025-04-01T06:40:23.065097
|
{
"authors": [
"justin808",
"murtali",
"thewoolleyman"
],
"repo": "shakacode/react_on_rails",
"url": "https://github.com/shakacode/react_on_rails/issues/414",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
218115971
|
Clear shared redux stores when server rendering
Per https://github.com/shakacode/react_on_rails/issues/774#issuecomment-290328842, we should clear the redux stores before populating it.
The potential bug is that there are some stores with data from unrelated requests sitting around.
This can chew up memory and lead to other unexpected consequences.
Steps to fix:
Add a function in the JS library to clear all shared redux stores.
Call that function from https://github.com/shakacode/react_on_rails/blob/master/app/helpers/react_on_rails_helper.rb#L346, prepending this to the result.
See #774 for more details.
@udovenko indicated interest, so I'll flag this one as taken.
@justin808 Not sure I see the way to reset the store. Each project has its own set of reducers and its own logic for bootstrapping the data to store. So resetting the store state should be individual for each JS server bundle.
Just clear the global map.
https://github.com/shakacode/react_on_rails/blob/master/node_package/src/StoreRegistry.js#L4
hydratedStores.clear() or something like that...
You mean clear stores registration completely? Feels like we will loose some benefits form having initiated stores on the next request...
For the reasons you raised, we don't want any hydrated stores on the next call.
Ok, I'll see what I can do. Can't promise anything for sure unfortunatenly...
@justin808 I've forked them gem and tried to reproduce an issue. But it seems that ReactOnRails creates and re-registers Redux store before every component is renderd on the server. It allways calls storeGenerator(reduxProps, railsContext) on https://github.com/shakacode/react_on_rails/blob/master/app/helpers/react_on_rails_helper.rb#L345. Store generator returns new store created with combineReducers and createStore. So it seems store state cannot be shared across components render on the server.
To test it a bit more I added consoel.log('new store!') to https://github.com/shakacode/react_on_rails/blob/master/spec/dummy/client/app/stores/SharedReduxStore.jsx#L12. Here is a console replay from server for http://0.0.0.0:5000/server_side_hello_world_shared_store:
[SERVER] new store!
[SERVER] RENDERED ReduxSharedStoreApp to dom node with id: ReduxSharedStoreApp-react-component-0 with railsContext: {"inMailer":false,"i18nLocale":"en","i18nDefaultLocale":"en","href":"http://0.0.0.0:5000/server_side_hello_world_shared_store","location":"/server_side_hello_world_shared_store","scheme":"http","host":"0.0.0.0","port":5000,"pathname":"/server_side_hello_world_shared_store","search":null,"httpAcceptLanguage":"ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4","somethingUseful":"REALLY USEFUL","serverSide":true}
[SERVER] This is a script:"</div>"(/script> <script>alert('WTF1')(/script>
[SERVER] Script2:"</div>"(/script xx> <script>alert('WTF2')(/script xx>
[SERVER] Script3:"</div>"(/script xx> <script>alert('WTF3')(/script xx>
[SERVER] Script4"</div>"(/script <script>alert('WTF4')(/script>
[SERVER] Script5:"</div>"(/script> <script>alert('WTF5')(/script>
[SERVER] railsContext.serverSide is truehttps://github.com/shakacode/react_on_rails/issues/774
[SERVER] new store!
[SERVER] RENDERED ReduxSharedStoreApp to dom node with id: ReduxSharedStoreApp-react-component-1 with railsContext: {"inMailer":false,"i18nLocale":"en","i18nDefaultLocale":"en","href":"http://0.0.0.0:5000/server_side_hello_world_shared_store","location":"/server_side_hello_world_shared_store","scheme":"http","host":"0.0.0.0","port":5000,"pathname":"/server_side_hello_world_shared_store","search":null,"httpAcceptLanguage":"ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4","somethingUseful":"REALLY USEFUL","serverSide":true}
[SERVER] This is a script:"</div>"(/script> <script>alert('WTF1')(/script>
[SERVER] Script2:"</div>"(/script xx> <script>alert('WTF2')(/script xx>
[SERVER] Script3:"</div>"(/script xx> <script>alert('WTF3')(/script xx>
[SERVER] Script4"</div>"(/script <script>alert('WTF4')(/script>
[SERVER] Script5:"</div>"(/script> <script>alert('WTF5')(/script>
[SERVER] railsContext.serverSide is true
Two components were rendered in the same js_context but store was recreated for each render. So it seems I can't reproduce the issue.
I think the case I've mentioned in #774 - "As I can remember I've even experienced an issue when store state from previous request was affecting next request. At least I thought that was the reason for my bug that time..." - was when I was creating the store manually from component generator function.
If you're insist that the issue still exists, can you please show how to reproduce it?
@udovenko For any stores being registered, they are 100% created new. I'm concerned that you could have a store that sticks around from the previous request. And the component rendering code might be accidentaly using that store by a different name.
So to reproduce:
Register 2 stores, A & B.
Run one request that uses store A
Run a second request that users store B. In the code that uses store B, print the contents of all stores. You could put in a debug line somewhere that prints out (at least the keys) of hydratedStores from this file.
If Store A's values are still there, then we have an issue.
Keep in mind that if one is careful to hydrate all the used stores for a given request, then no bug would ever be seen. However, there's certainly some extra memory that could be GC'd.
Even if the store is stick around from previous request, https://github.com/shakacode/react_on_rails/blob/master/app/helpers/react_on_rails_helper.rb#L346 will replace it with new instance anyway. Am I missing someting?
@justin808 Ok, I see what do you mean. The purpose for cleaning stores left from previous request is generally for GC only. Because they cannot affect components rendering.
Or they can affect components rendering due to developer mistake...
@justin808 PR #785
|
gharchive/issue
| 2017-03-30T08:17:10 |
2025-04-01T06:40:23.078527
|
{
"authors": [
"justin808",
"udovenko"
],
"repo": "shakacode/react_on_rails",
"url": "https://github.com/shakacode/react_on_rails/issues/780",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
256492192
|
CSS Modules SSR doesn't output anything
I can see the class being applied, but the CSS is not there. I'm using the server config only:
https://github.com/shakacode/react-webpack-rails-tutorial/blob/master/client/webpack.server.rails.build.config.js#L55-L65
If you want personalized help on this, please consider our Coaching Plan.
Your question is specific to your project rather than this project, so it's best handled privately.
|
gharchive/issue
| 2017-09-10T07:45:42 |
2025-04-01T06:40:23.080727
|
{
"authors": [
"chhuang",
"justin808"
],
"repo": "shakacode/react_on_rails",
"url": "https://github.com/shakacode/react_on_rails/issues/931",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1715019588
|
🛑 Shakedown Social - Uploads is down
In f95420d, Shakedown Social - Uploads (https://files.shakedown.social/accounts/avatars/109/357/389/718/265/462/original/dd80d2c95e48cfd2.jpeg) was down:
HTTP code: 403
Response time: 287 ms
Resolved: Shakedown Social - Uploads is back up in 3f33bdd.
|
gharchive/issue
| 2023-05-18T05:43:37 |
2025-04-01T06:40:23.083593
|
{
"authors": [
"clifff"
],
"repo": "shakedown-social/upptime",
"url": "https://github.com/shakedown-social/upptime/issues/5526",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
213477472
|
spread props
say I have a bunch of props to spread like in jsx, how can I achieve that? The following doesn't work
const props = `x=${x} y=${y}`
return bel`<div ${props} />`
I believe you can just do
const props = {x: 1, y: 2}
return bel`<div ${props} />`
Thank you!
|
gharchive/issue
| 2017-03-10T23:16:01 |
2025-04-01T06:40:23.086374
|
{
"authors": [
"itajaja",
"timwis"
],
"repo": "shama/bel",
"url": "https://github.com/shama/bel/issues/66",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
539736619
|
Update pt-br translation
Short description of what this resolves:
The current translation to Portuguese (Brazil) is all machine-translated nonsense.
I have translated all the strings from the original English file.
Nice job, Rodrigo.
|
gharchive/pull-request
| 2019-12-18T15:02:39 |
2025-04-01T06:40:23.087467
|
{
"authors": [
"erickvils",
"rodrigost23"
],
"repo": "shanalikhan/code-settings-sync",
"url": "https://github.com/shanalikhan/code-settings-sync/pull/1105",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
353223164
|
why execute time is too long than original datasource
Use the same simple sql, original Datasource execute time is about 8ms per request
but, replace the datasource to Sharding jdbc, execute time will increase to 27ms per request, i loged the executing time what I found are SS only select from one of the ten tables, SS router time is about 2ms, and SS execute time is about 2ms , what does the other 10ms time cause???
use id hash, 10 sub tables , test sql is SELECT id, name FROM i_test where id = "1"
sharding jdbc core 2.0.3
Thank you
could you explain why you close by issue so quickly that create by yourself without any reason?
the question were fixed by myself, It is not issue of SS, thank you for reply.
|
gharchive/issue
| 2018-08-23T04:49:04 |
2025-04-01T06:40:23.105127
|
{
"authors": [
"baisijian",
"terrymanu"
],
"repo": "sharding-sphere/sharding-sphere",
"url": "https://github.com/sharding-sphere/sharding-sphere/issues/1173",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
260915560
|
循环查询分表,空指针异常
条件:
sharding-jdbc 1.5.4
mybatis 3.3.0
mybatis-spring 1.2.3
用sharding-jdbc对某张表做了分表
配置:
问题描述:
执行下面伪代码查询,第一次循环查询正常,第二次循环查询报空指针异常:
伪代码
for(String id : voucherGroup){
Map<String, Object> map = new HashMap<String, Object>();
map.put("periodId", id);
int count = accountEntryMapper.selectCountByPeriodId(map);
}
sql
异常
请在github上提供完整重现DEMO,以便跟进调试。
未提供版本号,怀疑是版本过旧导致。请用最新版本尝试。如仍有问题,请提供可重现的demo。这个isdue我先关了
由于长时间无反馈,目前的信息并不足以定位问题。只能怀疑该问题与#387是一个问题,因此先关闭了。
|
gharchive/issue
| 2017-09-27T09:57:59 |
2025-04-01T06:40:23.109425
|
{
"authors": [
"haocao",
"panhaicheng",
"terrymanu"
],
"repo": "shardingjdbc/sharding-jdbc",
"url": "https://github.com/shardingjdbc/sharding-jdbc/issues/389",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1955033761
|
新)トランザクション
バックエンドにて、
・トランザクション実装
CRUDが正常に機能するように
現状必要なしかも…
|
gharchive/issue
| 2023-10-20T21:28:51 |
2025-04-01T06:40:23.115163
|
{
"authors": [
"sharin-sushi"
],
"repo": "sharin-sushi/0016go_next_relation",
"url": "https://github.com/sharin-sushi/0016go_next_relation/issues/40",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2060762421
|
Allow copy code snippet with cursor
Right now on the website under getting started If I want to copy a text I can do it via the copy button:
It would be nice if we could copy the text by marking it with the mouse as well, for example:
Thanks Tamir. We need to rewrite the documentation section anyway :)
But it's a very good suggestion.
|
gharchive/issue
| 2023-12-30T14:30:04 |
2025-04-01T06:40:23.128377
|
{
"authors": [
"Tamir198",
"idodav"
],
"repo": "sharkio-dev/sharkio",
"url": "https://github.com/sharkio-dev/sharkio/issues/358",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1654780485
|
没有找到RFLA的实现
作者你好,代码库里没有找到RFLA相关的实现。请问文章中RFLA的结果(TABLE 6最后一行),它的base模型是什么?
here's the official implementation about RFLA : https://github.com/Chasel-Tsui/mmdet-rfla
|
gharchive/issue
| 2023-04-05T01:16:44 |
2025-04-01T06:40:23.131282
|
{
"authors": [
"fuqianya",
"shaunyuan22"
],
"repo": "shaunyuan22/SODA-mmdetection",
"url": "https://github.com/shaunyuan22/SODA-mmdetection/issues/5",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
1122493573
|
اضافه کردن درگاه پرداخت فن آوا کارت
من قصد استفاده از درگاه پرداخت فن آوا کارت رو داشتم اما توی پکیج شما این درگاه پرداخت موجود نبود
این مورد در pull request زیر رفع شد
https://github.com/shetabit/multipay/pull/131
|
gharchive/issue
| 2022-02-02T23:30:49 |
2025-04-01T06:40:23.233758
|
{
"authors": [
"honarkar98"
],
"repo": "shetabit/multipay",
"url": "https://github.com/shetabit/multipay/issues/126",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
195512131
|
Cricitcal Error in authcheck.php
In authcheck.php the bot tries to get 'corpTicker' from the json but it should be 'ticker' this caused all my tickers to be removed. Fixed it by replacing
$corpTicker = (string)$corporationDetails['corpTicker'];
on line 335
with:
$corpTicker = (string)$corporationDetails['ticker'];
Will push something out now thanks!
|
gharchive/issue
| 2016-12-14T11:55:54 |
2025-04-01T06:40:23.235732
|
{
"authors": [
"eifelgamedev",
"shibdib"
],
"repo": "shibdib/Dramiel",
"url": "https://github.com/shibdib/Dramiel/issues/108",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1971604261
|
求助:2机8卡训练SFT时卡住
2机8卡训练SFT时卡住
单机4卡测试训练PT和SFT都没有任何问题,但是在2机8卡测试分布式训练时,会在SFT中卡住。
求助求助求助!请大佬帮忙看下什么原因?!
具体脚本和日志如下:
脚本(主机):
CUDA_VISIBLE_DEVICES=0,1,2,3 \
torchrun \
--nproc_per_node 4 \
--nnodes 2 \
--master_addr 10.130.1.109 \
--master_port 7860 \
--node_rank 0 \
supervised_finetuning.py \
--model_type bloom \
--model_name_or_path merged-pt \
--train_file_dir ./data/finetune \
--validation_file_dir ./data/finetune \
--per_device_train_batch_size 4 \
--per_device_eval_batch_size 4 \
--do_train \
--do_eval \
--use_peft True \
--fp16 \
--max_train_samples 1000 \
--max_eval_samples 10 \
--num_train_epochs 1 \
--learning_rate 2e-5 \
--warmup_ratio 0.05 \
--weight_decay 0.05 \
--logging_strategy steps \
--logging_steps 10 \
--eval_steps 50 \
--evaluation_strategy steps \
--save_steps 500 \
--save_strategy steps \
--save_total_limit 3 \
--gradient_accumulation_steps 1 \
--preprocessing_num_workers 1 \
--output_dir outputs-sft-v1 \
--overwrite_output_dir \
--ddp_timeout 30000 \
--logging_first_step True \
--target_modules all \
--lora_rank 8 \
--lora_alpha 16 \
--lora_dropout 0.05 \
--torch_dtype float16 \
--device_map auto \
--report_to tensorboard \
--ddp_find_unused_parameters False \
--gradient_checkpointing True
脚本(副机):
CUDA_VISIBLE_DEVICES=0,1,2,3 \
torchrun \
--nproc_per_node 4 \
--nnodes 2 \
--master_addr 10.130.1.109 \
--master_port 7860 \
--node_rank 1 \
supervised_finetuning.py \
...<同上>...
日志(主机):
...<省略>...
labels: [tensor([ -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, 99464, 2],
device='cuda:0'), tensor([ -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, 6768, 7786, 130015, 2],
device='cuda:0'), tensor([ -100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, -100, -100, -100, -100, -100, -100, -100, -100,
-100, 41381, 355, 9759, 8589, 12402, 1533, 1616, 2386,
15388, 1570, 76353, 11111, 706, 9602, 7786, 2269, 4587,
32622, 355, 21920, 67956, 1262, 21397, 74869, 9110, 19471,
25011, 355, 706, 4198, 2405, 8107, 11812, 175337, 420,
2293, 9759, 8589, 12402, 1570, 29434, 8967, 355, 100006,
17549, 9759, 8589, 11575, 9759, 8589, 6167, 355, 90899,
44498, 7640, 77684, 355, 67517, 5731, 17846, 5197, 11111,
246141, 355, 11225, 168726, 6167, 4007, 117443, 420, 2],
device='cuda:0')]
2023-11-01 10:40:24.574 | DEBUG | __main__:main:1279 - Decode input_ids[0]: <pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad>A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.</s>USER: 原发性巨球蛋白血症的治愈率是多少? ASSISTANT:40%</s>
2023-11-01 10:40:24.579 | DEBUG | __main__:main:1282 - Decode labels[0]: <pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad><pad>40%</s>
trainable params: 3,145,728 || all params: 562,360,320 || trainable%: 0.5593794384354857trainable params: 3,145,728 || all params: 562,360,320 || trainable%: 0.5593794384354857
2023-11-01 10:40:24.722 | INFO | __main__:main:1274 - *** Train ***
2023-11-01 10:40:24.722 | INFO | __main__:main:1274 - *** Train ***
日志(副机):
...<省略>...
The argument `trust_remote_code` is to be used with Auto classes. It has no effect here and is ignored.
2023-11-01 10:40:18.920 | DEBUG | __main__:main:1097 - A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.</s>USER: 治疗阳痿吃什么药呢?,性生活一直很正常的,但是这段时间感觉性欲变低了,有时勃起都感觉很困难,试过许多的方法都没效果,听朋友说我这种情况可能会是早泄,想知道治疗早泄的药物? ASSISTANT:男子早泄、早泄病症的再次发生,多由恣情纵欲,或青年误犯性交,至命门火衰,精气虚寒;或思量忧郁,伤损心脾;或因恐惧伤肾,也有因湿热下注,宗筋弛而痿的。但主要是肾阳虚衰而痿。肾阳为那身阳气之根本,有温煦形体,蒸化水液,增进围产生长发育等功能。肾阳虚衰则温煦失责,气化无权。因而再次发生畏寒肢冷,性机能减退。故见男子早泄不举或不坚,且伴发头晕目眩。</s>
The argument `trust_remote_code` is to be used with Auto classes. It has no effect here and is ignored.
2023-11-01 10:40:20.649 | INFO | __main__:main:1213 - Fine-tuning method: LoRA(PEFT)
2023-11-01 10:40:20.649 | INFO | __main__:main:1218 - Init new peft model
2023-11-01 10:40:20.650 | INFO | __main__:main:1227 - Peft target_modules: ['dense', 'dense_4h_to_h', 'dense_h_to_4h', 'query_key_value']
2023-11-01 10:40:20.650 | INFO | __main__:main:1228 - Peft lora_rank: 8
2023-11-01 10:40:20.947 | INFO | __main__:main:1213 - Fine-tuning method: LoRA(PEFT)
2023-11-01 10:40:20.947 | INFO | __main__:main:1218 - Init new peft model
2023-11-01 10:40:20.947 | INFO | __main__:main:1213 - Fine-tuning method: LoRA(PEFT)
2023-11-01 10:40:20.947 | INFO | __main__:main:1218 - Init new peft model
2023-11-01 10:40:20.947 | INFO | __main__:main:1227 - Peft target_modules: ['dense', 'dense_4h_to_h', 'dense_h_to_4h', 'query_key_value']
2023-11-01 10:40:20.947 | INFO | __main__:main:1228 - Peft lora_rank: 8
2023-11-01 10:40:20.947 | INFO | __main__:main:1227 - Peft target_modules: ['dense', 'dense_4h_to_h', 'dense_h_to_4h', 'query_key_value']
2023-11-01 10:40:20.947 | INFO | __main__:main:1228 - Peft lora_rank: 8
2023-11-01 10:40:20.948 | INFO | __main__:main:1213 - Fine-tuning method: LoRA(PEFT)
2023-11-01 10:40:20.948 | INFO | __main__:main:1218 - Init new peft model
2023-11-01 10:40:20.949 | INFO | __main__:main:1227 - Peft target_modules: ['dense', 'dense_4h_to_h', 'dense_h_to_4h', 'query_key_value']
2023-11-01 10:40:20.949 | INFO | __main__:main:1228 - Peft lora_rank: 8
trainable params: 3,145,728 || all params: 562,360,320 || trainable%: 0.5593794384354857
2023-11-01 10:40:24.389 | INFO | __main__:main:1274 - *** Train ***
trainable params: 3,145,728 || all params: 562,360,320 || trainable%: 0.5593794384354857
trainable params: 3,145,728 || all params: 562,360,320 || trainable%: 0.5593794384354857
trainable params: 3,145,728 || all params: 562,360,320 || trainable%: 0.5593794384354857
2023-11-01 10:40:24.690 | INFO | __main__:main:1274 - *** Train ***
2023-11-01 10:40:24.695 | INFO | __main__:main:1274 - *** Train ***
2023-11-01 10:40:24.709 | INFO | __main__:main:1274 - *** Train ***
nvidia-smi 查看到的显卡状态:
+-----------------------------------------------------------------------------+
| NVIDIA-SMI 515.65.01 Driver Version: 515.65.01 CUDA Version: 11.7 |
|-------------------------------+----------------------+----------------------+
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|===============================+======================+======================|
| 0 Tesla V100S-PCI... On | 00000000:00:0D.0 Off | 0 |
| N/A 41C P0 56W / 250W | 1917MiB / 32768MiB | 100% Default |
| | | N/A |
+-------------------------------+----------------------+----------------------+
| 1 Tesla V100S-PCI... On | 00000000:00:0E.0 Off | 0 |
| N/A 39C P0 51W / 250W | 1917MiB / 32768MiB | 100% Default |
| | | N/A |
+-------------------------------+----------------------+----------------------+
| 2 Tesla V100S-PCI... On | 00000000:00:0F.0 Off | 0 |
| N/A 39C P0 53W / 250W | 1917MiB / 32768MiB | 100% Default |
| | | N/A |
+-------------------------------+----------------------+----------------------+
| 3 Tesla V100S-PCI... On | 00000000:00:10.0 Off | 0 |
| N/A 40C P0 53W / 250W | 1917MiB / 32768MiB | 100% Default |
| | | N/A |
+-------------------------------+----------------------+----------------------+
+-----------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=============================================================================|
| 0 N/A N/A 4049068 C ...iant/anaconda3/bin/python 1913MiB |
| 1 N/A N/A 4049069 C ...iant/anaconda3/bin/python 1913MiB |
| 2 N/A N/A 4049070 C ...iant/anaconda3/bin/python 1913MiB |
| 3 N/A N/A 4049071 C ...iant/anaconda3/bin/python 1913MiB |
+-----------------------------------------------------------------------------+
卡在1286行:
train_result = trainer.train(resume_from_checkpoint=checkpoint)
通信问题?我不清楚你的环境,多机多卡可以用deepspeed,ds_report给下
我现在是全量SFT,单机6卡,在保存权重后会卡住。
|
gharchive/issue
| 2023-11-01T02:57:29 |
2025-04-01T06:40:23.245823
|
{
"authors": [
"mymong",
"nuoma",
"shibing624"
],
"repo": "shibing624/MedicalGPT",
"url": "https://github.com/shibing624/MedicalGPT/issues/250",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
1501404944
|
CI: Add MacOs to runs-on, add rust fmt check
#441
ld: library not found for -lm
This means clang failed to load libm. Maybe we don't need the -lm option in runner.rs on macs?
Merged. Nice try :-)
|
gharchive/pull-request
| 2022-12-17T14:10:46 |
2025-04-01T06:40:23.274292
|
{
"authors": [
"AI-Mozi",
"yhara"
],
"repo": "shiika-lang/shiika",
"url": "https://github.com/shiika-lang/shiika/pull/442",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2295193010
|
【FEAT】ISSUEのラベルを編集
概要
ISSUEのラベルを編集
close
|
gharchive/issue
| 2024-05-14T11:37:38 |
2025-04-01T06:40:23.291867
|
{
"authors": [
"mrs1669"
],
"repo": "shilokuma-inc/IOSTemplateApp",
"url": "https://github.com/shilokuma-inc/IOSTemplateApp/issues/20",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
365618695
|
Getting all connected camera devices
Is there any way to get a list of all connected camera devices without using AForge?
Since OpenCV is crossplattform these kind of functions are not supported, but maybe someone here has an idea.
I can't use AForge due licence issues.
I have been using DirectShowLib, however it doesn't really show which cameras are connected rather which camera drivers are installed. In my case it doesn't really matter if the user picks the wrong camera so I didn't try to see if there is a way to check if the camera is connected. Going to check if there is a way to do it though.
There is way by using the Microsoft Expression Encoder (https://www.microsoft.com/en-us/download/details.aspx?id=27870)
var vidDevices = EncoderDevices.FindDevices(EncoderDeviceType.Video);
It's not what I am looking for, but it may help with your issue.
I created a simple device enumerator. Next step: Get friendly device names.
https://github.com/thohemp/OpenCVSharpCameraDeviceEnumerator
The following code gives you a list of connected devices and their supported configurations using MediaFoundation:
https://github.com/vvvv/VL.OpenCV/blob/master/src/VideoInInfo.cs
Hope that helps
So I got around to updating from v.3 => v.4.........now I just lost ALL my Cameras 😢
|
gharchive/issue
| 2018-10-01T19:58:54 |
2025-04-01T06:40:23.297636
|
{
"authors": [
"XamtasticPenn",
"ravazquez",
"thohemp"
],
"repo": "shimat/opencvsharp",
"url": "https://github.com/shimat/opencvsharp/issues/538",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
228580536
|
make '/remote/system returns 200 test' work
Hi,
just jumped in and don't know my way around yet but this seems to make the failing test work.
Coverage remained the same at 100.0% when pulling 1af1482ea544c44fb767ee4cb6afb674b43ce47e on alandyer:test-failing-0.1.7 into 35a080cf9264de5ccfcf5459b1592d2a92e6005b on shinyscorpion:master.
Coverage remained the same at 100.0% when pulling 39b1632d90ab42f77b4c97df2752c63b7554f29a on alandyer:test-failing-0.1.7 into 35a080cf9264de5ccfcf5459b1592d2a92e6005b on shinyscorpion:master.
|
gharchive/pull-request
| 2017-05-15T00:11:23 |
2025-04-01T06:40:23.353396
|
{
"authors": [
"alandyer",
"coveralls"
],
"repo": "shinyscorpion/wobserver",
"url": "https://github.com/shinyscorpion/wobserver/pull/39",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
2511187289
|
enhancement: Move player to another channel
I'm not quite sure if this is already possible, but is there a method to moving the bot / connected player to another channel?
As of now it seems, leaveVoiceChannel() and joinVoiceChannel() are the only way exposed in the library (I'm not familiar with the lavalink protocol, so manually doing this with the internal methods is out of reach for me).
It would be nice to have an easy to use function like moveVoiceChannel(). Or, any other sort of method to change channels without interrupting the player. (eg. changing "connection.channelId" and calling "updateConnection" or simply connection.update({chanelID: <id>}))
UPDATE: After a long while of toying about, I found that using connection.setStateUpdate() to update the channelId, setting the connection to State.RECONNECTING and calling connection.connect() worked! I still wish this was one function though, so I am updating the issue.
|
gharchive/issue
| 2024-09-06T21:00:10 |
2025-04-01T06:40:23.355673
|
{
"authors": [
"tehtnaz"
],
"repo": "shipgirlproject/Shoukaku",
"url": "https://github.com/shipgirlproject/Shoukaku/issues/199",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
999705650
|
Use tekton automation to generate operatorhub release artifacts
Follow Tekton's example to use automation to generate operatorhub release artifacts
@adambkaplan
@sbose78
https://github.com/tektoncd/operator/pull/307
Fixed by #27
|
gharchive/issue
| 2021-09-17T20:41:07 |
2025-04-01T06:40:23.359530
|
{
"authors": [
"adambkaplan",
"gabemontero"
],
"repo": "shipwright-io/operator",
"url": "https://github.com/shipwright-io/operator/issues/21",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
189882013
|
[1.12] filtering, warning and formatting fixes
Updated reactable to latest version to prevent console warnings
Fixed filtering on service inspect page
Fixed node column on service inspect page
Removed unnecessary console output for auth saga
Depends on #904
LGTM
|
gharchive/pull-request
| 2016-11-16T23:24:10 |
2025-04-01T06:40:23.361095
|
{
"authors": [
"ehazlett",
"tombee"
],
"repo": "shipyard/shipyard",
"url": "https://github.com/shipyard/shipyard/pull/906",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1802113408
|
Update protobuf@8.0
Build protobuf@8.0
:beers: @BrewTestBot has triggered a merge.
|
gharchive/pull-request
| 2023-07-13T04:02:45 |
2025-04-01T06:40:23.364474
|
{
"authors": [
"shivammathur"
],
"repo": "shivammathur/homebrew-extensions",
"url": "https://github.com/shivammathur/homebrew-extensions/pull/2905",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2274546164
|
Update grpc@8.2
Build grpc@8.2
:beers: @BrewTestBot has triggered a merge.
|
gharchive/pull-request
| 2024-05-02T04:02:24 |
2025-04-01T06:40:23.365530
|
{
"authors": [
"shivammathur"
],
"repo": "shivammathur/homebrew-extensions",
"url": "https://github.com/shivammathur/homebrew-extensions/pull/3919",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2051473800
|
Update php@8.4
Build php@8.4
:beers: @BrewTestBot has triggered a merge.
|
gharchive/pull-request
| 2023-12-21T00:09:57 |
2025-04-01T06:40:23.366814
|
{
"authors": [
"shivammathur"
],
"repo": "shivammathur/homebrew-php",
"url": "https://github.com/shivammathur/homebrew-php/pull/2170",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1368877968
|
Expose ParseTreeNode
Exposes ParseTreeNode to allow full matching of ParseTrees. Before, outputs of parsing couldn't be fully accessed because of the inability of construct variants of ParseTreeNode to match. Now, variants can be constructed and used for matching the ParseTreeNode enum, fully exposing ParseTree.
Coverage remained the same at 92.829% when pulling 8b7d754cfd695fae9a40a2171933cbaa172f00db on Axiomatic-Mind:main into 895aaa792c3d2dcb1f60d2db262eef3aab3970d1 on shnewto:main.
|
gharchive/pull-request
| 2022-09-11T08:50:47 |
2025-04-01T06:40:23.417069
|
{
"authors": [
"Axiomatic-Mind",
"coveralls"
],
"repo": "shnewto/bnf",
"url": "https://github.com/shnewto/bnf/pull/106",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1506425894
|
[Bug] false resolving of home route
The resolving of the home route like it is done right now is not really safe.
https://github.com/shopware/frontends/blob/acd70e94cd252f05fb8c0f2032bac66b090d1934/packages/composables/src/useNavigationSearch.ts#L16
I see that there is a TODO-comment, but it doesn't really explain what the problem is.
For example we have multiple root-categories, one for the "normal" category tree, one for the footer-navigation and so on.
In our case, we do not get the correct root-category (as configured in the saleschannel) as the home category, but the footer category.
Hey @niklaswolf ! thanks for studying the code 💪🏻
You are right, the problem occures when there are more than one root category in the repository.
It's been changed to something more bullet-proof:
currently, for / path resolving, which means normally a "homepage", we use sessionContext.value?.salesChannel?.navigationCategoryId from the context. which is a wrapper category for main navigation:
https://github.com/shopware/frontends/commit/bf5549885516541e237c60ce1a2fda00080e1aff#diff-4cf41b529fdbc9ede59652256198b99eb400c04017588aaca4c912ce2bf3b7fbR23
Leave your feedback please, if it will work for you 🙏🏻
@mkucmus yes, that's exactly what was needed. With this change, the configuration from the saleschannel is respected. Thanks! :)
I'll therefore close this issue.
|
gharchive/issue
| 2022-12-21T14:53:04 |
2025-04-01T06:40:23.451440
|
{
"authors": [
"mkucmus",
"niklaswolf"
],
"repo": "shopware/frontends",
"url": "https://github.com/shopware/frontends/issues/32",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
729395238
|
Redirect loop when changing e-mail during guest checkout
Description
Steps to reproduce:
Put random item into cart
Start checkout
Choose guest checkout on /checkout/register page
On /checkout/confirm notice that your email address is wrong
Click browser back button
Change email in form
Submit
Expected behaviour:
A new guest account/session is created with the newly entered email and the customer is again on /checkout/confirm
Actual behaviour:
A redirect loop occurs
Possible Solution
My proposed solution to break this cycle would be to change the first check in \Shopware\Storefront\Controller\RegisterController::register as follows:
if ($context->getCustomer() && !$context->getCustomer()->getGuest()) {
return $this->redirectToRoute('frontend.account.home.page');
}
A more detailed look into the handling of the various steps with regards to guest account handling couldn't hurt though.
Additional context
The redirect is triggered in \Shopware\Storefront\Controller\RegisterController::register, where if ($context->getCustomer()) leads to a redirect to frontend.account.home.page regardless of whether it's a guest account or not. \Shopware\Storefront\Controller\AccountProfileController::index(the target action) however starts with $this->denyAccessUnlessLoggedIn(); (default parameter $allowGuest = false), which redirects to the Login page, but \Shopware\Storefront\Controller\AuthController::loginPage again only checks for if ($context->getCustomer()) and redirects back to the account page - the endless loop begins. (The redirectTo parameter of the login page is set to the account page when the \Shopware\Core\Checkout\Cart\Exception\CustomerNotLoggedInException is handled in \Shopware\Storefront\Framework\Routing\StorefrontSubscriber::customerNotLoggedInHandler)
Added in Shopware issue tracker: https://issues.shopware.com/issues/NEXT-11790
Hey @lars-feyerabend, thank you for reporting this, and sorry for the late response.
I'm closing this issue, as this landed in our triage and we can not reproduce this on current shopware release.
This issue has since been fixed by Shopware. https://issues.shopware.com/issues/NEXT-17938
|
gharchive/issue
| 2020-10-26T09:20:24 |
2025-04-01T06:40:23.458824
|
{
"authors": [
"lars-feyerabend",
"lernhart",
"mstegmeyer"
],
"repo": "shopware/platform",
"url": "https://github.com/shopware/platform/issues/1452",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
828978304
|
enable custom component config options to be mixed with native fields (custom sort order)
1. Why is this change necessary?
Currently if multiple config options are defined as mixed "input-field" and "component", "component"'s always have to be at the end of each card due to the constraints of <xsd:sequence>
2. What does this change do, exactly?
The change enables plugin developers to define components and input-fields in arbitrary order.
3. Describe each step to reproduce the issue or behaviour.
Define the following config options:
<card>
<title lang="de-DE">Card Title</title>
<input-field type="bool">
<name>firstOption</name>
</input-field>
<component name="my-component">
<name>secondOption</name>
</component>
<input-field type="bool">
<name>thirdOption</name>
</input-field>
</card>
You will get the following error:
Encountered an error while loading the configuration:Unable to parse file ".../config.xml". Message: [ERROR 1871] Element 'input-field': This element is not expected. Expected is ( component ). (in /var/www/shopware/public/ - line 93, column 0)
4. Please link to the relevant issues (if any).
5. Checklist
[ ] I have written tests and verified that they fail without my change
[ ] I have squashed any insignificant commits
[ ] I have created a changelog file with all necessary information about my changes
[] I have written or adjusted the documentation according to my changes
[] This change has comments for package types, values, functions, and non-obvious lines of code
[x] I have read the contribution requirements and fulfil them.
Hey @netzkollektiv, could you also please add an changelog?
@J-Rahe feel free to edit the pull request :-)
@netzkollektiv if you give me the requried data i may actually do that. if you take a look into our contribution guideline, we NEED your name(can be github name) and email(can be github mail). Without these we cant merge your PR.
Please use netzkollektiv & corporate at netzkollektiv.com.
Hello,
thank you for creating this pull request.
I have opened an issue on our Issue Tracker for you. See the issue link: https://issues.shopware.com/issues/NEXT-14211
Please use this issue to track the state of your pull request.
Hey @netzkollektiv, thanks for the PR, we have just merged it : )
(And the next time please provide the changelog from the beginning, you have an example and an template in the changelog folder and there is the bin/console changelog:create command to help you a bit).
|
gharchive/pull-request
| 2021-03-11T09:44:43 |
2025-04-01T06:40:23.467220
|
{
"authors": [
"J-Rahe",
"netzkollektiv",
"shopwareBot"
],
"repo": "shopware/platform",
"url": "https://github.com/shopware/platform/pull/1699",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2429540652
|
Only select used fields of translation tables
1. Why is this change necessary?
Previously anytime a translated field was part of a query a full sub-select of the respective _translation table was joined to the main query. A "full sub-select" means a SELECT of all columns of the _translation table. This can result in a lot of overhead because depending on the storage engine of the database, e.g. mediumtext fields like product_translation.description require more page reads (i.e. disk access).
In complex, high cardinality queries, especially when they involve e.g. the product table, which doubles the number of joined translation tables due to requiring entity inheritance, this overhead can negatively influence the query execution plans. In fact, the same query (filtering orders based on their line items and respective products) that took a reasonable time in Shopware 6.5 ballooned to several hundred seconds with Shopware 6.6 due to a correction in filter evaluation, which leads to more joins of sub-selects in general.
2. What does this change do, exactly?
This PR reduces the selected fields of joined _translation tables to those that are actually needed for filtering or resolving TranslatedFields. It does not affect queries of _translation tables that are needed for loading translation associations.
This PR renames fallback_ join aliases in the generated queries to use override_ aliases, because their values in fact override the default translations, if available. This is just cosmetics, but IMHO makes the code (and generated queries) easier to understand.
This PR adds some more escaping to the queries that join _translation tables.
This PR currently does not add any new tests. ~I would like to get feedback first on the general approach as well as what kind of tests (unit, integration) are expected for these changes.~ All tests added in https://github.com/shopware/shopware/pull/3839 should still pass.
3. Describe each step to reproduce the issue or behaviour.
For simply seeing the difference in the generated SQL queries turn on your database's query log and send the following request to the admin API:
POST /api/search/product
{
"filter": [
{
"type": "contains",
"field": "name",
"value": "foo"
}
]
}
Generated SQL (old)
SELECT
`product`.`id`,
`product`.`product_number`,
`product`.`auto_increment`
FROM `product`
LEFT JOIN `product` `product.parent`
ON `product`.`parent_id` = `product.parent`.`id`
AND `product`.`parent_version_id` = `product.parent`.`version_id`
LEFT JOIN (
SELECT
`product.translation`.product_id,
`product.translation`.product_version_id,
`product.translation`.meta_description AS `product.translation.metaDescription`,
`product.translation`.name AS `product.translation.name`,
`product.translation`.keywords AS `product.translation.keywords`,
`product.translation`.description AS `product.translation.description`,
`product.translation`.meta_title AS `product.translation.metaTitle`,
`product.translation`.pack_unit AS `product.translation.packUnit`,
`product.translation`.pack_unit_plural AS `product.translation.packUnitPlural`,
`product.translation`.custom_search_keywords AS `product.translation.customSearchKeywords`,
`product.translation`.slot_config AS `product.translation.slotConfig`,
`product.translation`.custom_fields AS `product.translation.customFields`,
`product.translation`.created_at AS `product.translation.createdAt`,
`product.translation`.updated_at AS `product.translation.updatedAt`,
`product.translation`.product_id AS `product.translation.productId`,
`product.translation`.language_id AS `product.translation.languageId`,
`product.translation`.product_version_id AS `product.translation.productVersionId`,
`product.translation.fallback_1`.meta_description AS `product.translation.fallback_1.metaDescription`,
`product.translation.fallback_1`.name AS `product.translation.fallback_1.name`,
`product.translation.fallback_1`.keywords AS `product.translation.fallback_1.keywords`,
`product.translation.fallback_1`.description AS `product.translation.fallback_1.description`,
`product.translation.fallback_1`.meta_title AS `product.translation.fallback_1.metaTitle`,
`product.translation.fallback_1`.pack_unit AS `product.translation.fallback_1.packUnit`,
`product.translation.fallback_1`.pack_unit_plural AS `product.translation.fallback_1.packUnitPlural`,
`product.translation.fallback_1`.custom_search_keywords AS `product.translation.fallback_1.customSearchKeywords`,
`product.translation.fallback_1`.slot_config AS `product.translation.fallback_1.slotConfig`,
`product.translation.fallback_1`.custom_fields AS `product.translation.fallback_1.customFields`,
`product.translation.fallback_1`.created_at AS `product.translation.fallback_1.createdAt`,
`product.translation.fallback_1`.updated_at AS `product.translation.fallback_1.updatedAt`,
`product.translation.fallback_1`.product_id AS `product.translation.fallback_1.productId`,
`product.translation.fallback_1`.language_id AS `product.translation.fallback_1.languageId`,
`product.translation.fallback_1`.product_version_id AS `product.translation.fallback_1.productVersionId`
FROM `product_translation` `product.translation`
LEFT JOIN `product_translation` `product.translation.fallback_1`
ON `product.translation`.`product_id` = `product.translation.fallback_1`.`product_id`
AND `product.translation.fallback_1`.language_id = :languageId
AND `product.translation`.product_version_id = `product.translation.fallback_1`.product_version_id
WHERE
`product.translation`.language_id = :languageId
) `product.product_translation`
ON `product.product_translation`.`product_id` = `product`.`id`
AND `product.product_translation`.product_version_id = `product`.version_id
LEFT JOIN (
SELECT
`product.parent.translation`.product_id,
`product.parent.translation`.product_version_id,
`product.parent.translation`.meta_description AS `product.parent.translation.metaDescription`,
`product.parent.translation`.name AS `product.parent.translation.name`,
`product.parent.translation`.keywords AS `product.parent.translation.keywords`,
`product.parent.translation`.description AS `product.parent.translation.description`,
`product.parent.translation`.meta_title AS `product.parent.translation.metaTitle`,
`product.parent.translation`.pack_unit AS `product.parent.translation.packUnit`,
`product.parent.translation`.pack_unit_plural AS `product.parent.translation.packUnitPlural`,
`product.parent.translation`.custom_search_keywords AS `product.parent.translation.customSearchKeywords`,
`product.parent.translation`.slot_config AS `product.parent.translation.slotConfig`,
`product.parent.translation`.custom_fields AS `product.parent.translation.customFields`,
`product.parent.translation`.created_at AS `product.parent.translation.createdAt`,
`product.parent.translation`.updated_at AS `product.parent.translation.updatedAt`,
`product.parent.translation`.product_id AS `product.parent.translation.productId`,
`product.parent.translation`.language_id AS `product.parent.translation.languageId`,
`product.parent.translation`.product_version_id AS `product.parent.translation.productVersionId`,
`product.parent.translation.fallback_1`.meta_description AS `product.parent.translation.fallback_1.metaDescription`,
`product.parent.translation.fallback_1`.name AS `product.parent.translation.fallback_1.name`,
`product.parent.translation.fallback_1`.keywords AS `product.parent.translation.fallback_1.keywords`,
`product.parent.translation.fallback_1`.description AS `product.parent.translation.fallback_1.description`,
`product.parent.translation.fallback_1`.meta_title AS `product.parent.translation.fallback_1.metaTitle`,
`product.parent.translation.fallback_1`.pack_unit AS `product.parent.translation.fallback_1.packUnit`,
`product.parent.translation.fallback_1`.pack_unit_plural AS `product.parent.translation.fallback_1.packUnitPlural`,
`product.parent.translation.fallback_1`.custom_search_keywords AS `product.parent.translation.fallback_1.customSearchKeywords`,
`product.parent.translation.fallback_1`.slot_config AS `product.parent.translation.fallback_1.slotConfig`,
`product.parent.translation.fallback_1`.custom_fields AS `product.parent.translation.fallback_1.customFields`,
`product.parent.translation.fallback_1`.created_at AS `product.parent.translation.fallback_1.createdAt`,
`product.parent.translation.fallback_1`.updated_at AS `product.parent.translation.fallback_1.updatedAt`,
`product.parent.translation.fallback_1`.product_id AS `product.parent.translation.fallback_1.productId`,
`product.parent.translation.fallback_1`.language_id AS `product.parent.translation.fallback_1.languageId`,
`product.parent.translation.fallback_1`.product_version_id AS `product.parent.translation.fallback_1.productVersionId`
FROM `product_translation` `product.parent.translation`
LEFT JOIN `product_translation` `product.parent.translation.fallback_1`
ON `product.parent.translation`.`product_id` = `product.parent.translation.fallback_1`.`product_id`
AND `product.parent.translation.fallback_1`.language_id = :languageId
AND `product.parent.translation`.product_version_id = `product.parent.translation.fallback_1`.product_version_id
WHERE
`product.parent.translation`.language_id = :languageId
) `product.product_translation.parent`
ON `product.product_translation.parent`.`product_id` = `product.parent`.`id`
AND `product.product_translation.parent`.product_version_id = `product.parent`.version_id
WHERE
(`product`.`version_id` = :versionId)
AND ((
COALESCE(
`product.translation.fallback_1.name`,
`product.parent.translation.fallback_1.name`,
`product.translation.name`,
`product.parent.translation.name`
) LIKE :query
))
LIMIT 500
Generated SQL (new)
SELECT
`product`.`id`,
`product`.`product_number`,
`product`.`auto_increment`
FROM `product`
LEFT JOIN `product` `product.parent`
ON `product`.`parent_id` = `product.parent`.`id`
AND `product`.`parent_version_id` = `product.parent`.`version_id`
LEFT JOIN (
SELECT
`product.translation`.`product_id`,
`product.translation`.`product_version_id`,
`product.translation`.`name` AS `product.translation.name`,
`product.translation.override_1`.`name` AS `product.translation.override_1.name`
FROM `product_translation` `product.translation`
LEFT JOIN `product_translation` `product.translation.override_1`
ON `product.translation`.`product_id` = `product.translation.override_1`.`product_id`
AND `product.translation.override_1`.`language_id` = :languageId
AND `product.translation`.`product_version_id` = `product.translation.override_1`.`product_version_id`
WHERE
`product.translation`.`language_id` = :languageId
) `product.product_translation`
ON `product.product_translation`.`product_id` = `product`.`id`
AND `product.product_translation`.`product_version_id` = `product`.`version_id`
LEFT JOIN (
SELECT
`product.parent.translation`.`product_id`,
`product.parent.translation`.`product_version_id`,
`product.parent.translation`.`name` AS `product.parent.translation.name`,
`product.parent.translation.override_1`.`name` AS `product.parent.translation.override_1.name`
FROM `product_translation` `product.parent.translation`
LEFT JOIN `product_translation` `product.parent.translation.override_1`
ON `product.parent.translation`.`product_id` = `product.parent.translation.override_1`.`product_id`
AND `product.parent.translation.override_1`.`language_id` = :languageId
AND `product.parent.translation`.`product_version_id` = `product.parent.translation.override_1`.`product_version_id`
WHERE
`product.parent.translation`.`language_id` = :languageId
) `product.parent.product_translation`
ON `product.parent.product_translation`.`product_id` = `product.parent`.`id`
AND `product.parent.product_translation`.`product_version_id` = `product.parent`.`version_id`
WHERE
(`product`.`version_id` = :versionId)
AND ((
COALESCE(
`product.translation.override_1.name`,
`product.parent.translation.override_1.name`,
`product.translation.name`,
`product.parent.translation.name`
) LIKE :query
))
LIMIT 500
That simple example will usually not show a difference in performance (nor query execution plan). The following request is taken from one of our applications and will drastically improve in execution time with the fix (we noticed ~60x speed up in production environments; 215s (old) vs. 3,5s (new); 20k orders, 80k order line items, 1,5k products, MySQL v8.0.34):
POST /api/search/order
{
"filter": [
{
"type": "multi",
"operator": "and",
"queries": [
{
"type": "equals",
"field": "lineItems.type",
"value": "product"
},
{
"type": "not",
"operator": "and",
"queries": [
{
"type": "equals",
"field": "lineItems.product.id",
"value": null
}
]
},
{
"type": "equalsAny",
"field": "stateMachineState.technicalName",
"value": [
"open",
"in_progress"
]
},
{
"type": "equalsAny",
"field": "transactions.stateMachineState.technicalName",
"value": [
"paid",
"refunded_partially"
]
},
{
"type": "multi",
"operator": "or",
"queries": [
{
"type": "prefix",
"field": "orderNumber",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.firstName",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.lastName",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.company",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.department",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.street",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.additionalAddressLine1",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.additionalAddressLine2",
"value": "Foo"
},
{
"type": "contains",
"field": "billingAddress.zipcode",
"value": "Foo"
},
{
"type": "contains",
"value": "Foo",
"field": "billingAddress.city"
},
{
"type": "contains",
"field": "billingAddress.country.name",
"value": "Foo"
},
{
"type": "contains",
"field": "customerComment",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.firstName",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.lastName",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.company",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.department",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.street",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.additionalAddressLine1",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.additionalAddressLine2",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.zipcode",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.city",
"value": "Foo"
},
{
"type": "contains",
"field": "deliveries.shippingOrderAddress.country.name",
"value": "Foo"
},
{
"field": "deliveries.shippingMethod.name",
"type": "contains",
"value": "Foo"
},
{
"type": "contains",
"field": "lineItems.product.name",
"value": "Foo"
},
{
"type": "contains",
"field": "lineItems.product.productNumber",
"value": "Foo"
},
{
"type": "contains",
"field": "lineItems.product.ean",
"value": "Foo"
},
{
"type": "contains",
"field": "lineItems.product.manufacturer.name",
"value": "Foo"
},
{
"type": "contains",
"field": "lineItems.product.manufacturerNumber",
"value": "Foo"
}
]
}
]
}
]
}
4. Please link to the relevant issues (if any).
n/a
5. Checklist
[x] I have rebased my changes to remove merge conflicts
[ ] I have written tests and verified that they fail without my change
[x] I have created a changelog file with all necessary information about my changes
[ ] I have written or adjusted the documentation according to my changes
[x] This change has comments for package types, values, functions, and non-obvious lines of code
[x] I have read the contribution requirements and fulfil them.
@svenmuennich Thanks for the PR, general approach looks fine. We are slightly worried that some breaks might occur so we would appreciate if you tackle the tests and then we can do another round of reviews, wdyt?
@svenmuennich yep integration tests would be useful here.
Hi,
LGTM 👍
It would be good when you test the following scenarios:
Filter with multiple fields in different levels of a Criteria
Filter with multiple languages and multiple fields
Full text search via term
Full text search criteria.queries
Mixed searches with filters and queries
I added the integration tests in a separate PR (https://github.com/shopware/shopware/pull/3839) to ensure that they cover the current behavior. I also rebased this PR accordingly.
@svenmuennich could you take a look at the phpstan failures please?
Linting looks good now ...
Sorry for the delay @svenmuennich - I was on holiday then battling with pipeline issues :D - Thanks for the improvements, it's merged now!
|
gharchive/pull-request
| 2024-07-25T09:56:03 |
2025-04-01T06:40:23.487908
|
{
"authors": [
"AydinHassan",
"OliverSkroblin",
"svenmuennich"
],
"repo": "shopware/shopware",
"url": "https://github.com/shopware/shopware/pull/3819",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2597373974
|
DRAFT: Try fix partial dataloading
The entity hydrator now only checks if criteria.fields is filled. If that's the case, all entities are loaded as partial entities.
However, entities are still being loaded and collections filled in the EntityReader. That's why there's also a one-time check here: $partial = !empty($criteria.fields).
See: https://github.com/shopware/shopware/issues/5114#issuecomment-2422344507
Thank you for your submission! We really appreciate it. Like many open source projects, we ask that you sign our Contributor License Agreement before we can accept your contribution.Oliver Skroblin seems not to be a GitHub user. You need a GitHub account to be able to sign the CLA. If you have already a GitHub account, please add the email address used for this commit to your account.You have signed the CLA already but the status is still pending? Let us recheck it.
|
gharchive/pull-request
| 2024-10-18T12:16:58 |
2025-04-01T06:40:23.493129
|
{
"authors": [
"CLAassistant",
"OliverSkroblin"
],
"repo": "shopware/shopware",
"url": "https://github.com/shopware/shopware/pull/5169",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
201695178
|
[BUGFIX] Use encodeURIComponent to encode semicolons
Description
encodeURI does not encode semicolons, which are not allowed in cookie values. encodeURIComponents does. Using encodeURI, the cookie will only be stored up to the first semicolon and thus cannot be completely retrieved afterwards.
Questions
Answers
BC breaks?
no
Tests pass?
yes
Related tickets?
SW-17497
How to test?
On javascript console: StorageManager.getLocalStorage().setItem("Foo", "Bar; Baz"); reload the page; StorageManager.getLocalStorage().getItem("Foo"); // -> JSON decoding exception
Hey @m-knabe-reply,
thanks for your contribution. Your change looks good to me and makes sense.
Hello,
thank you for creating this pull request.
I have opened an issue on our Issue Tracker for you. See the issue link: https://issues.shopware.com/issues/SW-17509.
Please use this issue to track the state of your pull request.
Thank you for your contribution. This issue was fixed with commit 805c11e. I will close this PR now.
|
gharchive/pull-request
| 2017-01-18T21:19:18 |
2025-04-01T06:40:23.498095
|
{
"authors": [
"htkassner",
"klarstil",
"m-knabe-reply",
"shopwareBot"
],
"repo": "shopware/shopware",
"url": "https://github.com/shopware/shopware/pull/913",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2124362851
|
Update metrics crate
The shortcut macros like counter! have been removed and the register macros like register_counter! have been renamed like counter!.
The register macros can be used in place of the shortcut macros since the shortcut macros were just registering and dropping a metric internally anyway.
I am no longer seeing the performance regression on CI or locally since https://github.com/shotover/shotover-proxy/pull/1463 was merged
|
gharchive/pull-request
| 2024-02-08T04:59:39 |
2025-04-01T06:40:23.524989
|
{
"authors": [
"rukai"
],
"repo": "shotover/shotover-proxy",
"url": "https://github.com/shotover/shotover-proxy/pull/1462",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
1269274704
|
Simplify storybook config
Why
Hey I was checking out the repo after seeing the talk at appjs conf and I came accross some storybook config that I thought could be simplified.
Congrats on launching the project, you've made something really cool here!
A lot of packages are included by default in addon-react-native-web so you don't need them listed explicitly. I've also changed the addon it to use a partial match for package names, so unless there are showtime packages you don't want to include then just @showtime is enough
How
I updated the main.js to remove packages from the module to transpile list if they are already included by default
Test Plan
I ran the storybook after making changes and it seems to run fine. If you have chromatic setup (which I think you do) then the visual tests will be a good way to validate this.
Thank you @dannyhw ❤️
|
gharchive/pull-request
| 2022-06-13T11:07:17 |
2025-04-01T06:40:23.528498
|
{
"authors": [
"dannyhw",
"intergalacticspacehighway"
],
"repo": "showtime-xyz/showtime-frontend",
"url": "https://github.com/showtime-xyz/showtime-frontend/pull/1169",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1689094891
|
feat/interoperability
This PR adds a new exercise about contract interoperability.
I had to update the compiler version to the last main for it to work.
Time spent: 3 hours
This item belongs to payment request #19A990 on OnlyDust:
from shramee to enitrat
14 items included
$500 for 1 day of work
|
gharchive/pull-request
| 2023-04-28T20:26:59 |
2025-04-01T06:40:23.532336
|
{
"authors": [
"VeryDustyBot",
"enitrat"
],
"repo": "shramee/starklings-cairo1",
"url": "https://github.com/shramee/starklings-cairo1/pull/113",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
370117930
|
Flicker effect when skipping multiple stories
Hi Team,
First of all great work with the library. I am having a small problem in it. when i am uploading multiple picture to stories like for an example i have 4 picture to stories and out of which 3 have been seen and then i again click on the story section to view the fourth one, then there is a flicker in the rest of the 3 images which has been already read.
I don't want this skipping effect ,i want it to start directly from particular story number
Below is the video link You can observe the flicker effect when skipping progress bars
https://drive.google.com/file/d/1kpYIjovY3n6RLWrXHHrq0gE_ItDrFToU/view?usp=drivesdk
@Kitlabs Hi, can you find a solution. I have same problem, can you help please. Thanks.
@Kitlabs Are you using a transparent Theme?? Thats likely the cause.
|
gharchive/issue
| 2018-10-15T11:25:47 |
2025-04-01T06:40:23.563120
|
{
"authors": [
"Kitlabs",
"ahmedmolawale",
"mehmetergul"
],
"repo": "shts/StoriesProgressView",
"url": "https://github.com/shts/StoriesProgressView/issues/16",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
223105937
|
Border-less Markdown tables
I love how easy it is to reformat the current table, or all, in a Markdown file, but I'm not too found of the styling. I much prefer borderless tables.
Any chance you could add that?
There is another table formatter, https://marketplace.visualstudio.com/items?itemName=darkriszty.markdown-table-prettify, which detects the table style automatically. Would be really nice if you could do that too!
Thanks for using.
And sorry for my late reply.
I understood the request.
I was unexpected it, but good function!
I will consider it.
Perhaps, in May as adding config.
Thank you for waiting.
I released v1.2.1 and added borderless table configration.
Try tableformatter.markdown.tableEdgesType = "Borderless".
Awesome! I like it!
Thanks.
|
gharchive/issue
| 2017-04-20T15:26:03 |
2025-04-01T06:40:23.568258
|
{
"authors": [
"LeviticusMB",
"shuGH"
],
"repo": "shuGH/vscode-table-formatter",
"url": "https://github.com/shuGH/vscode-table-formatter/issues/7",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1119608066
|
Problem with date year < 1900 and year = 0000
Hello there is a Problem with date's which are smaller then 1900 like '1876-01-30'
and also Problem with year "0000-00-00".
Last one I fixed it by
if ( $year === 0 || (int) $year == 0 ) {
return $excelTime;
}
fixed in 1.0.23
use raw values
$cell = "\0".'1985-01-13';
not "german time", there OS date/time/datetime format by default
yes I know it's using OS date format. But the Script is changing it automatic. So its Mixing now RAW Types and OS date. So it would be nice to tell the format for both. But this is maybe only an edge case.
if u have mixed dates <1900 and >1900, then use raw (string) values for all your dates
But the Script is changing it automatic -- setlocale temporary resets LC_NUMERIC to neгtral for correct cast numbers to strings
|
gharchive/issue
| 2022-01-31T15:52:39 |
2025-04-01T06:40:23.579334
|
{
"authors": [
"fapth",
"shuchkin"
],
"repo": "shuchkin/simplexlsxgen",
"url": "https://github.com/shuchkin/simplexlsxgen/issues/51",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
855052097
|
输入变量问题
请问输入变量中,groups和reduction_ratio是什么意义?
用于降低输入channel数,计算kernel时减少计算量,请参考原论文
|
gharchive/issue
| 2021-04-10T12:02:03 |
2025-04-01T06:40:23.602146
|
{
"authors": [
"lhy823436493",
"shuuchen"
],
"repo": "shuuchen/involution.pytorch",
"url": "https://github.com/shuuchen/involution.pytorch/issues/3",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2019204849
|
Use this project with recent GPU ?
Hello I have a 4080 (sm_89), according to the warning log CUDA is not compatible.
Is there a way to use a more recent version of CUDA/pytorch ?
I installed the project through the requirement package.
it results obviously by a :
Regards
same problem, no answers
In our case, we've opted for switching to a GPU compatible with CUDA11, specifically with version sm_75. While this approach is functional, it's not an optimal solution. Installing and configuring CUDA with a specific version can be challenging.
we switch to GTX 20xx
I would generally like to run the project somehow, even on a CPU, just to make it work even slowly. do I have a chance to do this?
I don't think you can run it on a CPU; TensorFlow requires a GPU. For now, I managed to run the project by changing the GPU, but I'm having issues generating the landmarks.
I managed to run it on an RTX4060.
It took me a couple hours, but I am happy with the results.
I managed to run it on an RTX4060.
It took me a couple hours to set up, but I am happy with the results.
Nice! Can you tell us how did you solve this problem?
|
gharchive/issue
| 2023-11-30T17:46:37 |
2025-04-01T06:40:23.628302
|
{
"authors": [
"OchotonaPrinceps",
"antoinersw",
"imxtx",
"sebaturen",
"vladgohn"
],
"repo": "sicxu/Deep3DFaceRecon_pytorch",
"url": "https://github.com/sicxu/Deep3DFaceRecon_pytorch/issues/169",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
543735143
|
Should we alway set engine_options.pool_timeout to zero?
engine_options = {
'pool_size': 2,
'pool_timeout': 1,
'max_overflow': 0
},
Once the concurrency exceed pool_size+max_overflow, the program will deadlock.
I think the reason is the new-coming request blocked at waiting for a connection,
Since tornado is single-thread model, this kind blocking will cause deadlock (util timeout ).
Below is what I got from gdb, the program is waiting for a connection.
Traceback (most recent call first):
File "/usr/lib/python3.7/threading.py", line 300, in wait
gotit = waiter.acquire(True, timeout)
File "/usr/lib/python3.7/site-packages/sqlalchemy/util/queue.py", line 162, in get
self.not_empty.wait(remaining)
File "/usr/lib/python3.7/site-packages/sqlalchemy/pool/impl.py", line 117, in _do_get
return self._pool.get(wait, self._timeout)
File "/usr/lib/python3.7/site-packages/sqlalchemy/pool/base.py", line 492, in checkout
rec = pool._do_get()
File "/usr/lib/python3.7/site-packages/sqlalchemy/pool/base.py", line 760, in _checkout
fairy = _ConnectionRecord.checkout(pool)
File "/usr/lib/python3.7/site-packages/sqlalchemy/pool/base.py", line 363, in connect
return _ConnectionFairy._checkout(self)
File "/usr/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 2276, in _wrap_pool_connect
return fn()
File "/usr/lib/python3.7/site-packages/sqlalchemy/engine/base.py", line 2242, in _contextual_connect
self._wrap_pool_connect(self.pool.connect, None),
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 432, in _connection_for_bind
conn = bind._contextual_connect()
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 420, in _connection_for_bind
conn = self._parent._connection_for_bind(bind, execution_options)
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 313, in connection
return self._connection_for_bind(bind, execution_options)
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/persistence.py", line 1604, in _connections_for_states
connection = uowtransaction.transaction.connection(base_mapper)
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/persistence.py", line 374, in _organize_states_for_save
base_mapper, uowtransaction, states
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/persistence.py", line 213, in save_obj
) in _organize_states_for_save(base_mapper, states, uowtransaction):
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/unitofwork.py", line 589, in execute
uow,
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/unitofwork.py", line 422, in execute
rec.execute(self)
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 2577, in _flush
flush_context.execute()
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 2479, in flush
self._flush(objects)
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 482, in _prepare_impl
self.session.flush()
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 503, in commit
self._prepare_impl()
File "/usr/lib/python3.7/site-packages/sqlalchemy/orm/session.py", line 1036, in commit
self.transaction.commit()
File "examples/multi_databases.py", line 63, in get
session.commit()
File "/usr/lib/python3.7/site-packages/tornado/web.py", line 1699, in _execute
result = await result
<built-in method run of Context object at remote 0x7fc857aeb910>
File "/usr/lib/python3.7/asyncio/events.py", line 88, in _run
self._context.run(self._callback, *self._args)
File "/usr/lib/python3.7/asyncio/base_events.py", line 1771, in _run_once
handle._run()
File "/usr/lib/python3.7/asyncio/base_events.py", line 534, in run_forever
self._run_once()
File "/usr/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 148, in start
self.asyncio_loop.run_forever()
File "examples/multi_databases.py", line 106, in <module>
IOLoop.current().start()
What happens if you skip pool_timeout altogether and just rely on SQLAlchemy to handle it?
The default pool_timeout is 30, after 30s the blocking coroutine will raise Timeout Error and response 500. but the other new-coming request will continue block at waiting for connection. The deadlock will never be broken, unless there are no new-coming requests.
True, but if the timeout is disabled, then at some point the database would complain about too many connections and the responses would 500 anyway.
My proposal would be to not do anything of our own here, and just keep the SQLAlchemy defaults. Database connection bugs are tricky to debug, so I'd rather not interfere with those settings. :)
Yeah, It can't solve the 500 problem. User should decide how to deal with it by themselves.
|
gharchive/issue
| 2019-12-30T04:59:12 |
2025-04-01T06:40:23.640283
|
{
"authors": [
"dingyaguang117",
"siddhantgoel"
],
"repo": "siddhantgoel/tornado-sqlalchemy",
"url": "https://github.com/siddhantgoel/tornado-sqlalchemy/issues/88",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
2085645061
|
gitconfig includeIf directives does not work properly inside distrobox
[includeIf "gitdir:~/work/"]
path = work.gitconfig
This config should load the config work.gitignore when in a repo which is a subdirectory to ~/work, and it works well on the host. However, it does not work when using git inside a distrobox for some reason.
Current solution is to map the distrobox's git to the host's
|
gharchive/issue
| 2024-01-17T08:13:13 |
2025-04-01T06:40:23.689125
|
{
"authors": [
"HerrNaN"
],
"repo": "sidusIO/sediment",
"url": "https://github.com/sidusIO/sediment/issues/43",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
2413248668
|
fix: tree grouping breaks if rows are assigned statically
Fixes the issue where tree grouping breaks if rows are directly set on ngx-datatable without doing any async calls.
What kind of change does this PR introduce? (check one with "x")
[x] Bugfix
[ ] Feature
[ ] Code style update (formatting, local variables)
[ ] Refactoring (no functional changes, no api changes)
[ ] Build related changes
[ ] CI related changes
[ ] Other... Please describe:
What is the current behavior? (You can also link to an open issue here)
What is the new behavior?
Does this PR introduce a breaking change? (check one with "x")
[ ] Yes
[x] No
If this PR contains a breaking change, please describe the impact and migration path for existing applications: ...
Other information:
@timowolf Could you please review this PR?
Thanks!
|
gharchive/pull-request
| 2024-07-17T10:33:38 |
2025-04-01T06:40:23.699881
|
{
"authors": [
"chintankavathia"
],
"repo": "siemens/ngx-datatable",
"url": "https://github.com/siemens/ngx-datatable/pull/77",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
326973630
|
can't connect: message type: closed
Hi,
I've been using this project for a while without problems but since the last 2 days I've not been able to connect.
I get every time the following message on Chrome console while trying to connect to he API:
got message {from: "meta", type: "closed"}
Above I get the following message:
WebSocket connection to 'ws://localhost:2019/' failed: Connection closed before receiving a handshake response
This happens every time I click on the Click to connect to API button.
Backend, API server and HTTP server seems to be running fine.
Hmm that's odd... Someone else had a similar error at #5 and #48. Which operating system do you use? Might it be a firewall issue for you?
Same here, it's been working well but it stopped. getting a
["Cmd",{"type":"update"}]
Not getting the ref like usual when trying to generate the QR code
@sigalor I am using Ubuntu 17.10
I followed the steps on #5 and everything seems fine.
Accesing http://10.135.1.5:2019/ (10.135.1.5 is the machine I am running the service on) shows a page with the following text:
Upgrade Required
@UrbanSwati Thanks for this info, now I know the cause of @nilsburg's error. Looking at the generateQRCode function of backend/whatsapp.py, you can see that the script currently wants the server to use WA Web version 0.2.9229, but apparently, the server does not support it anymore. Just exchange 9229 by 9547, because when you look at the websocket log of original WA Web using the Chrome developer console, you can see it requesting version 0.2.9547, thus this is the newest one.
A question to everyone: Is it possible to query the server for the newest WA Web version directly, i.e. without possibly receiving the ["Cmd",{"type":"update"}] message beforehand? This would allow the Python script to not rely on using a hardcoded version number.
@nilsburg, the Upgrade Required text is absolutely plausible, because a WebSocket server is listening on port 2019. Accessing it with a HTTP request does not really make sense.
Hi,
Yes! It's now working, but it seems that the JSON object with the messages has changed quite a bit so I will have to adapt my app.
Thank you very much for your help!
I now realise that the error I was getting on chrome was my fault because the client script was trying to connect to localhost:2019 while the service was launched on another machine. Sorry for the mistake.
But updating the WA Web version solved my problem... so thanks again!
|
gharchive/issue
| 2018-05-28T10:12:11 |
2025-04-01T06:40:23.709015
|
{
"authors": [
"UrbanSwati",
"nilsburg",
"sigalor"
],
"repo": "sigalor/whatsapp-web-reveng",
"url": "https://github.com/sigalor/whatsapp-web-reveng/issues/50",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2456632589
|
🛑 Höllgrotten is down
In 89bc09f, Höllgrotten (https://www.hoellgrotten.ch/) was down:
HTTP code: 0
Response time: 0 ms
Resolved: Höllgrotten is back up in 50ad311 after 9 minutes.
|
gharchive/issue
| 2024-08-08T21:13:34 |
2025-04-01T06:40:23.924691
|
{
"authors": [
"signalwerk"
],
"repo": "signalwerk/uptime",
"url": "https://github.com/signalwerk/uptime/issues/802",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1231728689
|
Fearure Request NFT: add tokens and SIGNA flow to owner
Note: I do not know how to format for SIPS.
Request NFT: add tokens and SIGNA flow to owner.
During the NFT portal screen to add your graphic
a) connected XT wallet would know what existing tokens the account has
b) a drop down to show what tokens are selectable
c) a box for how many tokens to include with NFT minting
Have code for the Minted NFT that when SIGNA lands in the smart contract address of the NFT that the new SIGNA gets sent to owner of the NFT.
This would be to take an existing token and include it with a unique NFT graphic that says how many tokens are included. And when the token sends out dividends then the NFT smart contract passes the received SIGNA to the NFT owner's SIGNA address.
Not sure about royalties or platform fees.
This should be in NFT feedback, not SIP's
I close the ticket please repon if still needed on https://github.com/signum-nft/feedback-signumart
|
gharchive/issue
| 2022-05-10T21:36:09 |
2025-04-01T06:40:23.927574
|
{
"authors": [
"frankTheTank72",
"paulpoco",
"rogerwim"
],
"repo": "signum-network/SIPs",
"url": "https://github.com/signum-network/SIPs/issues/53",
"license": "Unlicense",
"license_type": "permissive",
"license_source": "github-api"
}
|
830135219
|
Initial release plans for cosign
I reeallly want to try to sign the first release of cosign, with cosign, if we can figure out a way that makes sense.
I think we'll have to do it manually, since we won't have a signed release to use in automation yet though. Thankfully we can skip some of the issues with reproducible builds.
Here's a rough plan:
The trick is that we can use Go to build a reproducible binary.
Setup github actions to build and publish a cosign binary on each commit to main
cosign builds should be reproducible, with some care.
We'll make these build logs public, showing the sha of each binary we build.
One or more of us will generate keys specific to this initial release.
The more the merrier!
We can commit these public keys to a file here in the repo.
We can pick a commit to tag our initial release at.
We'll have several of us build our own versions of cosign from it, and check the SHAs against the ones from the GitHub action. We can publish those in an issue.
If they match, we can tag the release. We'll then sign the first binaries and git tag (v0.1.0)!
We'll publish the signatures in Rekor and in the GitHub release.
The keys to verify the release will be included in the git repo at the matching commit!
After this, we can change our automation to use it's own public/private key-pair (also stored in this repo), and the last signed cosign binary release (v0.1.0) in CI to sign continuous builds. Signatures can get published to Rekor, the build log, and wherever we host the binaries.
Actual tagged releases will also be signed by the automation, and one or more maintainers. We'll sign the git commits as well as the resulting binaries. These can get published to the GitHub release. These public keys (and their mapping to maintainers) will also be stored in the repo.
We can write a verification script to help people verify.
Setup github actions to build and publish a cosign binary on each commit to main
Won't this make the repo quite difficult for new contributors trying to clone? Since we wouldn't want to rewrite the history in the future in a project like this, I think this is something to avoid?
Won't this make the repo quite difficult for new contributors trying to clone? Since we wouldn't want to rewrite the history in the future in a project like this, I think this is something to avoid?
Sorry! I think that wasn't clear. How about: "publish a binary to after each commit to main"?
Not things back into main :) I mean we should publish CI builds somewhere (GCS, Github artifacts) after each build.
All done here! Release 2 coming soon :)
|
gharchive/issue
| 2021-03-12T13:47:18 |
2025-04-01T06:40:23.941793
|
{
"authors": [
"ahmetb",
"dlorenc"
],
"repo": "sigstore/cosign",
"url": "https://github.com/sigstore/cosign/issues/82",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
1213717631
|
Handle context cancelled properly + tests.
Signed-off-by: Ville Aikas vaikas@chainguard.dev
Summary
Handle closing of the context properly and return an error for it.
Ticket Link
Fixes
Release Note
Codecov Report
Merging #1796 (a0df4f9) into main (0c4cf2e) will increase coverage by 0.01%.
The diff coverage is 66.66%.
@@ Coverage Diff @@
## main #1796 +/- ##
==========================================
+ Coverage 32.56% 32.58% +0.01%
==========================================
Files 147 147
Lines 9297 9303 +6
==========================================
+ Hits 3028 3031 +3
- Misses 5915 5917 +2
- Partials 354 355 +1
Impacted Files
Coverage Δ
pkg/cosign/kubernetes/webhook/validator.go
75.55% <66.66%> (+0.32%)
:arrow_up:
pkg/cosign/tuf/client.go
61.68% <0.00%> (-0.82%)
:arrow_down:
Continue to review full report at Codecov.
Legend - Click here to learn more
Δ = absolute <relative> (impact), ø = not affected, ? = missing data
Powered by Codecov. Last update 0c4cf2e...a0df4f9. Read the comment docs.
|
gharchive/pull-request
| 2022-04-24T17:36:00 |
2025-04-01T06:40:23.951857
|
{
"authors": [
"codecov-commenter",
"vaikas"
],
"repo": "sigstore/cosign",
"url": "https://github.com/sigstore/cosign/pull/1796",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
1549276627
|
Do a JavaScript release
See #3.
This would entail:
setting up an npm project for this repo
automating the procedure to generate JS bindings to the protos (Makefile), generally checking in the generated code (gen/ and .github/workflows/)
automating/documenting the procedure to do releases of the generated code (RELEASE.md plus any GH Actions) on a tag, preferably releases/js/v0.1.1 or whatever.
Then you could start consuming it in sigstore-js.
CC @bdehamer @feelepxyz @eddiezane
Can we refrain from checking in the generated code?
It makes contributions hard as it enforced everybody to install all the tools, and sometimes it causes checking in binary code like in https://github.com/sigstore/protobuf-specs/blob/16541696de137c6281d66d075a4924d9bbd181ff/gen/pb-go/bundle/v1/sigstore_bundle.pb.go#L308-L329
I argued for exactly that in https://github.com/sigstore/protobuf-specs/pull/12 but was outvoted. The main issue is that Go kinda requires it :( I'd prefer not to reopen that can of worms. CC @asraa @kommendorkapten @woodruffw for visibility though
If it's substantially more idiomatic we can skip for JS; we already skip it for Java.
That said, the only tooling really needed here seems to be Docker and make; are you having issues with those?
The main issue is that Go kinda requires it
Can Go store the generated source into a release branch then?
Frankly speaking, it is annoying that PRs like https://github.com/sigstore/protobuf-specs/pull/64 require updating and committing generated code along with the changes themselves.
I'm willing to change my opinion, but I'm still a soft 👎 on removing checked-in codegen for Python: it's relatively idiomatic to do so, and shouldn't have any significant developer burden (since as @znewman01 mentioned everything should be Dockerized).
I haven't run into any significant issues with either Go or Python's codegen, despite having some issues with Fulcio's similar codegen (which is not Dockerized).
I think it makes sense to keep as-is, as we get to exercise the codegen frequently to avoid any hiccups during a planned release. And as already said, it's dockerized so it should be of minimal extra burden for a contributer (albeit I understand that it's easy to forget to run the code generation and so get some possible unexpected failures).
codegen for Python: it's relatively idiomatic to do so
It thought Python was using pypi or something like that rather than fetching dependencies from the source code.
I haven't run into any significant issues with either Go or Python's codegen
I wasn't able to get make to work: https://github.com/sigstore/protobuf-specs/pull/64#issuecomment-1400201925
I might need to try harder, however, previously, docker worked for me.
I see how checking in the generated code might be helpful in case the generator is complicated to set up.
For instance, suppose there's a "database engine" project, and it might have a generated SQL parser. Suppose, the parser generator is not available on all the platforms, so it might help if the generated parser was committed under source control, so everybody can work on the database without spending time on installing the generator.
On the other hand, sigstore/protobuf-specs does not fit that pattern. The only purpose of the repository is to collect .proto files, and there is no other business logic.
I am not sure I want to install toolchains (even in Docker) for all the ecosystems. It is more-or-less fine when make fetches docker images for go and python only, however, I am afraid it will go wild as the number of generators increases: go, python, javascript, rust, java, clojure, haskell, c#, elm, you name it.
I would refrain from invoking a build that downloads and installs all those dependencies, especially, when the only thing I want is to fix a typo in the proto definition and/or add an annotation.
It reduces the likelihood of codegen errors and regressions: a regression in of protoc or betterproto can't result in us silently publishing a broken package
I do not see how committing the generated code prevents regressions.
I can easily see how running tests with the newly generated code could prevent regressions, however, committing the generated code does not make it tremendously better.
It matches "publish what you know": someone who reports a bug in the Python bindings can send us a permalink to the exact line that's causing the error, without having to share their own copy of the generated code.
If you want something for documentation purposes, then it might live on a documentation branch/repository.
|
gharchive/issue
| 2023-01-19T14:29:49 |
2025-04-01T06:40:23.964900
|
{
"authors": [
"kommendorkapten",
"vlsi",
"woodruffw",
"znewman01"
],
"repo": "sigstore/protobuf-specs",
"url": "https://github.com/sigstore/protobuf-specs/issues/61",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
2071733519
|
chore: remove "Signing publications" from the logging output
Summary
The message was not adding much, and it was emitted during configuration phase, so it was not even aligned with the actual signing work.
Release Note
NONE
Documentation
NONE
gotta dco it
Frankly, DCO is useless since GitHub's terms of service include already require that any content someone posts on GitHub must comply with the target's license: https://docs.github.com/en/site-policy/github-terms/github-terms-of-service#6-contributions-under-repository-license
See https://ben.balter.com/2018/01/02/why-you-probably-shouldnt-add-a-cla-to-your-open-source-project/#if-a-license-isnt-good-enough-for-maintainers-you-shouldnt-subject-users-to-it
Can we somehow lift DCO restriction?
I'm running into this DCO validation every time, and it does take time to re-commit and re-push
Yeah, it's just applied across all sigstore repos. So I dunno :shrug:
|
gharchive/pull-request
| 2024-01-09T06:48:07 |
2025-04-01T06:40:23.969097
|
{
"authors": [
"loosebazooka",
"vlsi"
],
"repo": "sigstore/sigstore-java",
"url": "https://github.com/sigstore/sigstore-java/pull/603",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
682391897
|
Is it safe to share a read only PatriciaMap across multiple thread?
In another word, is it safe to add unsafe impl<V> Sync for Node<V> {} ?
I'm not 100% sure but it seems okay to implement Sync for Node<V>.
You need to implement it like,
unsafe impl<V: Sync> Sync for Node<V> {}
unsafe impl<V: Send> Send for Node<V> {}
which is something I got wrong initially
@leshow You're right. Thank you for your advice!
|
gharchive/issue
| 2020-08-20T03:56:29 |
2025-04-01T06:40:23.986728
|
{
"authors": [
"12101111",
"leshow",
"sile"
],
"repo": "sile/patricia_tree",
"url": "https://github.com/sile/patricia_tree/issues/9",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
206158602
|
Added option validator.mapping.use_annotation
Added option validator.mapping.use_annotation to change the default usage of StaticMethodLoader to AnnotationLoader.
Added option validator.mapping.cache to allow passing a CacheInterface for usage in the mapping loader.
This should simplify the creation of a validator that reads annotations instead of static methods.
@SpacePossum, Ok, I will write some tests and a documentation update. I'm not very sure how to use github to do that in this same pull request. Any idea? Or should I make a new pull request?
I'm not very sure how to use github to do that in this same pull request. Any idea? Or should I make a new pull request?
Just push to your branch and GitHub will update the according pull request automatically ;)
Any thoughts on this?
Why just add a flag and not introduce a new validator.mapping.loader
service that users could override?
This would let developers use any of the available loaders
This would also let them configure the AnnotationReader they
want to use
Something like this would be better IMHO:
$app['validator.mapping.cache'] = null;
$app['validator.mapping.loader'] = function ($app) {
return new StaticMethodLoader();
}
$app['validator.mapping.class_metadata_factory'] = function ($app) {
return new LazyLoadingMetadataFactory($app['validator.mapping.loader'], $app['validator.mapping.cache']);
};
If you wish, you could then add an example in the documentation to show how
to use the AnnotationLoader by overriding the new service:
$app->register(new ValidatorServiceProvider(), array(
'validator.mapping.loader' => function ($app) {
return new AnnotationLoader($app['annotations']);
},
));
@jlHertel Do you think you'd be able to update this soon and resolve the conflicts? Thanks!
I guess this should be closed as well as per https://github.com/silexphp/Silex/issues/1588#issuecomment-368390861
@hkdobrev That's correct. Let's close.
|
gharchive/pull-request
| 2017-02-08T10:41:04 |
2025-04-01T06:40:23.993925
|
{
"authors": [
"fabpot",
"hkdobrev",
"jdreesen",
"jlHertel",
"skalpa"
],
"repo": "silexphp/Silex",
"url": "https://github.com/silexphp/Silex/pull/1484",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
177968796
|
Androidify node-nnpack
Tested to make sure that node-nnpack compiles fine for both OSX and Android with this change. I haven't tested running this on an Android device yet. Would merge once I am done testing.
I am able to require node-nnpack on the device without any missing dependency errors. Merging this change now so I can import this module as subtree in silk-core.
|
gharchive/pull-request
| 2016-09-20T05:35:31 |
2025-04-01T06:40:23.997683
|
{
"authors": [
"jainanshul"
],
"repo": "silklabs/node-nnpack",
"url": "https://github.com/silklabs/node-nnpack/pull/1",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1649885198
|
Feature/text selection toolbar
Starter copy, bookmark, highlight, and note functionality
We merged this code in #170
|
gharchive/pull-request
| 2023-03-31T19:12:38 |
2025-04-01T06:40:24.003817
|
{
"authors": [
"chrisvire",
"saidbrandonsaid"
],
"repo": "sillsdev/appbuilder-pwa",
"url": "https://github.com/sillsdev/appbuilder-pwa/pull/167",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1588594515
|
Pin changes
There are a couple of problems that need to be resolved with the pins.
pins 19 and 23 on the RaspberryPiZero are switched.
Pins on the H1 and H2 connectors will have to be updated to match the EPS.
These issues are with respect to Release 1.0
Systems have chosen the replacement pins
H2-12 -> H2->22
H2-14 -> H2->24
Update:
Pins 19 and 23 from RaspberryPiZero to pins 1 and 3 on the max3100(U5) called BCM_11_SCLK and BCM_10_MOSI are switched and need to be fixed
|
gharchive/issue
| 2023-02-17T00:54:38 |
2025-04-01T06:40:24.023562
|
{
"authors": [
"Danny6151",
"edwardsnj"
],
"repo": "silver-sat/Payload_Board",
"url": "https://github.com/silver-sat/Payload_Board/issues/2",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
324395977
|
Update template styles
[ ] Change background color on all pages to white
[ ] Match fonts and link styles to mozilla.org
https://github.com/mozilla/kitsune/projects/3
|
gharchive/issue
| 2018-05-18T12:43:33 |
2025-04-01T06:40:24.025228
|
{
"authors": [
"sgarrity"
],
"repo": "silverorange/kitsune",
"url": "https://github.com/silverorange/kitsune/issues/4",
"license": "BSD-3-Clause",
"license_type": "permissive",
"license_source": "github-api"
}
|
200536583
|
AttributesHTML() not following exclude parameters
If you use something like CreditCardField, in the template it has AttributesHTML which have exclude parameters. The attributes as far as I can tell don't make it to the function in FormField.php and so all the attributes are returned which for the case of CreditCardField, breaks the form.
Creating a duplicate function and calling it from the template sends only the first attribute.
Not too sure whats going on with it but as far as I can tell it's a bug.
Tested it on two different SilverStripe installations, currently 3.4.1
Yeah there's a bug in the template. In order to pass parameters to a getter, we need to use the FULL method name. I.e. $getAttributesHTML not $AttributesHTML. You can substitute it in your code as such if you want to test it locally. :)
This has been discussed repeatedly - "getters" should not accept parameters. This "bug" can't be fixed in a patch release because the API would need to change in a breaking way (remove getHTMLAttributes) and we're not going to accept sending params to getters via the template.
Thanks @tractorcow that worked perfectly.
@dhensby While I understand what you're saying with the getters, currently the CreditCardField doesn't work as intended and you have to have your own template to get it to work (it produces invalid HTML currently). Just changing $AttributesHTML to $getAttributesHTML as @tractorcow explains fixes the issue and only changes the template rather than breaking the API.
I know my issue raised was related to the "getters" issue however ultimately I just want CreditCardField to work and was trying to be helpful with my debugging findings. I can also do the change if it helps.
Sure - if we can fix a core issue here by just amending a core template we should do it.
Fixed with https://github.com/silverstripe/silverstripe-framework/pull/6504
|
gharchive/issue
| 2017-01-13T03:00:09 |
2025-04-01T06:40:24.034156
|
{
"authors": [
"Rudigern",
"dhensby",
"tractorcow"
],
"repo": "silverstripe/silverstripe-framework",
"url": "https://github.com/silverstripe/silverstripe-framework/issues/6497",
"license": "BSD-3-Clause",
"license_type": "permissive",
"license_source": "github-api"
}
|
265084188
|
ENHANCEMENT Allow extensions to intercept incorrect deletes on unpublish
The issue with the existing behaviour is that some queried tables are NOT a part of the hierarchy, and the subsequent delete could unintentionally delete objects outside of this object.
For instance, fluent uses a localisation table as an inner join. This existing behaviour deletes these locales, but using the ID of the record, not the ID of the localisation (which is a separate ID itself).
With the given extension points extensions are able to intercept these queries and rewrite the delete safely without violating database integrity.
For instance, this is a page that has three failover locales. When deleted it performs these deletes:
"SiteTree_Live, ID = 3"
"SiteTree_Localised_Live, ID = 3"
"SiteTree_Localised_Live, ID = 3"
"SiteTree_Localised_Live, ID = 3"
Those following three deletes should be RecordID = 3 not ID = 3.
Seems reasonable, and it doesn't change anything if there were no extensions :)
|
gharchive/pull-request
| 2017-10-12T21:00:51 |
2025-04-01T06:40:24.036662
|
{
"authors": [
"flamerohr",
"tractorcow"
],
"repo": "silverstripe/silverstripe-framework",
"url": "https://github.com/silverstripe/silverstripe-framework/pull/7477",
"license": "BSD-3-Clause",
"license_type": "permissive",
"license_source": "github-api"
}
|
404797099
|
FIX: Block Manifest of the compatibility class Object and lean on the compatibility auto loader
On PHP 7.2 initial dev/build of a SilverStripe 3.7.1 site can throw a Fatal error: Cannot use 'Object' as class name as it is reserved, I experienced this on SilverStripe Platform causing the deployment to fail and rollback.
This pull request simply adds a manifest exclude file in core/compat which causes the framework to lean on the auto loader defined in model/fieldtypes/compat/autoload.php instead.
Will this make it into the upcoming 3.7.3 release? or will it need to wait until 3.7.4? Trying to plan ahead with one of our projects that's moving to SilverStripe Platform on a stack that's currently configured for PHP 7.2, but without this pull being merged would need to be downgraded to PHP 7.1.
|
gharchive/pull-request
| 2019-01-30T14:30:11 |
2025-04-01T06:40:24.038836
|
{
"authors": [
"UndefinedOffset"
],
"repo": "silverstripe/silverstripe-framework",
"url": "https://github.com/silverstripe/silverstripe-framework/pull/8772",
"license": "BSD-3-Clause",
"license_type": "permissive",
"license_source": "github-api"
}
|
162657123
|
style for Placeholder(title)
I need to css placehoder on my site (with another coler) but i cant change style because when i select an option it have same color with placeholder
See guidelines for contributing.
Bug reports
A bug is a demonstrable problem that is caused by the code in the repository.
Good bug reports are extremely helpful - thank you!
Guidelines for bug reports:
Use the GitHub issue search. Check if the issue has already been reported.
Check if the issue has been fixed. Try to reproduce it using the latest master or development branch in the repository.
Provide environment details. Provide your operating system, browser(s), jQuery version, Bootstrap version, and bootstrap-select version.
Create an isolated and reproducible test case. Create a reduced test case.
Include a live example. Make use of jsFiddle or jsBin to share your isolated test cases.
Style via the bs-placeholder class.
|
gharchive/issue
| 2016-06-28T10:52:00 |
2025-04-01T06:40:24.058293
|
{
"authors": [
"caseyjhol",
"nov23"
],
"repo": "silviomoreto/bootstrap-select",
"url": "https://github.com/silviomoreto/bootstrap-select/issues/1432",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
212377860
|
replace=False preserve mask in StackView when updating frame number
useful for exploring 3D stacks
This will close issue #650
Looks good to me.
|
gharchive/pull-request
| 2017-03-07T09:38:51 |
2025-04-01T06:40:24.061991
|
{
"authors": [
"PiRK",
"alemirone",
"t20100"
],
"repo": "silx-kit/silx",
"url": "https://github.com/silx-kit/silx/pull/651",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1571645798
|
Easy way to determine if two rows have the same columns?
I have this method to help me determine whether old row and excluded row have same fields. I am using insert method with DoUpdate.withExcluded parameter which has where clause with (old, excluded) => isDifferent(old, excluded)
Is there any way to automatize the process? I am comparing almost all columns with same name of old with excluded. As this way is prone to logical errors that will be hard to detect, can this be done with looping through same-name columns? Maybe comparing them with something like reduce method while excluding some fields that we don't want to be compared?
More explanation is available in issue https://github.com/simolus3/drift/issues/2299#issue-1571171981.
@override
Expression<bool> isDifferent(Users old, Users excluded) {
return (old.dateJoined.isExp(excluded.dateJoined) &
old.email.isExp(excluded.email) &
old.username.isExp(excluded.username) &
old.username.isExp(excluded.username) &
old.syncState.isExp(excluded.syncState) &
old.isGuest.isExp(excluded.isGuest) &
old.isVerified.isExp(excluded.isVerified) &
old.isDeletedLocally.isExp(excluded.isDeletedLocally))
.not();
}
You can always cast from a DSL table instance (Users) to the internal table object (TableInfo). This interface gives you full access to the columns defined in the table, which allows doing reflection over tables:
Expression<bool> isDifferent(Users old, Users excluded) {
final oldTbl = old as TableInfo;
final excludedTbl = excluded as TableInfo;
return oldTbl.$columns
.where((c) => true) // Skip columns you don't want to compare
.map((column) {
final excludedColumn = excludedTbl.columnsByName[column.name]!;
return column.equalsExp(excludedColumn);
})
.reduce((a, b) => a & b)
.not();
}
|
gharchive/issue
| 2023-02-05T22:42:53 |
2025-04-01T06:40:24.089414
|
{
"authors": [
"mateoKutnjak",
"simolus3"
],
"repo": "simolus3/drift",
"url": "https://github.com/simolus3/drift/issues/2302",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
2392802970
|
Migration Error when altered same table twice.
I have table called Box .
The Scenario :
I want to add refferenceId column and add it as uniqueKey on version 2
Then in Version 3 , I need to add another column called refferenceNumber and also add it as uniqueKey.
This Is My Migration Strategy :
onUpgrade: (Migrator m, int from, int to) async {
await transaction(() async {
if (from < 2) {
await m.alterTable(
TableMigration(
box,
newColumns: [box.refferenceId],
),
);
}
if (from < 3) {
await m.alterTable(
TableMigration(
box,
newColumns: [box.refferenceNumber],
),
);
}
});
}
The Case is :
when My App is upgrading from version 1 to 2 and then upgrade it again from version 2 to 3, The Migration Works Fine.
But, When My App is directly upgrade from version 1 to 3, i got this error :
SqliteException(1): while executing, no such column: refference_number, SQL logic error (code 1): Causing statement: INSERT INTO tmp_for_copy_box ("id", "box_number", "type", "amount", "refference_number") SELECT "id", "box_number", "type", "amount","refference_number" FROM "box";, parameters:
So I Assumed that because when the app is upgrading directly from version 1 to 3, the box table is altered twice during onUpgrade
Please tell me, is this a bug or there is a mistake in my code.
Thank you
Indeed, the problem is that alterTable would get called twice when upgrading from 1 to 3 directly. The complicated solution is to account for this in your onUpgrade handler by always upgrading to the latest version in every branch, e.g.
if (from < 2) {
await m.alterTable( // 1 -> current
TableMigration(
box,
newColumns: [box.refferenceId, box.refferenceNumber],
),
);
} else if (from < 3) {
await m.alterTable( // 2 -> current
TableMigration(
box,
newColumns: [box.refferenceNumber],
),
);
}
Of course, writing migrations that just do a single step and then chaining them is much easier and scales better. However, it requires you to actually migrate towards an intermediate, outdated schema (2) in your migration logic. Since box is generated for the current table though, that's not directly possible.
To do this, drift provides tools that generate step-by-step migrations based on exported database schemas. To set this up, you'd have to export your old schemas once (you can probably checkout the revision where you've updated the schema and run dart run drift_dev schema dump on that tree). Then, drift can generate all intermediate versions of your schema into a compressed format which provides enough information to write migrations like this:
onUpgrade: stepByStep(
from1To2: (m, schema) async {
await m.alterTable(schema.box, newColumns: [schema.box.refferenceId]);
},
from2To3: (m, schema) async {
await m.alterTable(schema.box, newColumns: [schema.box.refferenceId]);
},
),
|
gharchive/issue
| 2024-07-05T15:25:33 |
2025-04-01T06:40:24.095856
|
{
"authors": [
"Guang-B",
"simolus3"
],
"repo": "simolus3/drift",
"url": "https://github.com/simolus3/drift/issues/3078",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
556689066
|
moor_generator >=2.0.1 is incompatible with json_serializable ^3.2.5
Please help, I can't start using moor because I got this error when I added moor_generator to pubspec.yaml. thanks
Running "flutter pub get" in do_not...
Because analyzer_plugin >=0.2.1 depends on analyzer >=0.35.3 <0.39.0 and analyzer_plugin >=0.1.0 <0.2.1 depends on analyzer >=0.35.3 <0.38.0, analyzer_plugin >=0.1.0 requires analyzer >=0.35.3 <0.39.0.
And because moor_generator >=2.0.1 depends on analyzer_plugin >=0.1.0 <0.3.0, moor_generator >=2.0.1 requires analyzer >=0.35.3 <0.39.0.
And because json_serializable 3.2.5 depends on analyzer ^0.39.0 and no versions of json_serializable match >3.2.5 <4.0.0, moor_generator >=2.0.1 is incompatible with json_serializable ^3.2.5.
So, because do_not depends on both json_serializable ^3.2.5 and moor_generator ^2.3.1, version solving failed.
pub get failed (1; So, because do_not depends on both json_serializable ^3.2.5 and moor_generator ^2.3.1, version solving failed.)
You can use json_serializable: ^3.2.3 until next version if is not important to use 3.2.5
I just released moor and moor_generator version 2.4.0, which support the latest analyzer. You can remove the dependency_overrides section and just depend on ^2.4.0. Let me know if you run into any problems, thanks!
|
gharchive/issue
| 2020-01-29T07:33:03 |
2025-04-01T06:40:24.101159
|
{
"authors": [
"2math",
"RakaAlrian",
"simolus3"
],
"repo": "simolus3/moor",
"url": "https://github.com/simolus3/moor/issues/364",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1708088496
|
Support binding user-defined values to statements
StatementImplementation.execute() requires that all parameters are provided, but only understands a fixed set of types. It would be nice to add a Bindable interface that makes this extensible, e.g.:
+abstract class Bindable {
+ // Binds this value to a statement as parameter `i`.
+ void Bind(CommonPreparedStatement stmt, int i);
+}
... and in StatementImplementation._bindParam():
+ } else if (param is Bindable) {
+ param.Bind(this, i);
This requires that Bind() downcast to PreparedStatement, so maybe Bind() would instead take Pointer but you get the rough idea.
I'm not entirely opposed to this, but I'm not super convinced this feature should be part of the sqlite3 package either. In my mind, that package provides a direct wrapper around sqlite3 that looks like Dart. So we have an OOP API based on sound types, but we don't have stuff that sqlite3 doesn't support either (like creating CRUD statements automatically or a direct transaction API). Adding Bindable isn't much, but it also feels a lot like something that a higher-level package should do.
Do you have an example where it's considerably more convenient to use Bindable instead of just doing the translation manually? The way I see it, Bindable.Bind would just recursively call stmt.bind to set the translated value at the same index? Are you using a sqlite3 library for another language that has that feature?
Ah yeah, that's fair. I wasn't aware of that functionality in sqlite3, but I think we can add basic support for it. (Basic in the sense that it's going to be really low-level, I think even exposing the type pointer as a Dart string would be wrong given how much the document talks about how types must not be dynamic values)
Do you also need an API to read pointer values as arguments from user-defined Dart functions?
My use case is just to use an extension written in C from Dart, but I guess for completeness you would want to be able to consume/return such values from Dart extensions.
In my case there is a C API that knows how to generate such values, so all I need is to get a sqlite_stmt* to pass into that API.
My use case is just to use an extension written in C from Dart, but I guess for completeness you would want to be able to consume/return such values from Dart extensions.
I agree that a complete solution should have that. But given how rarely the feature is used and how poorly it translates to Dart with dynamic strings, I think it might be better to not have support for pointer passing interfaces at all.
From what I can tell, there is no way to bind a user-defined value to a statement at all: you must pass all parameters
Yes, the intention here is to avoid bugs caused by parameters that haven't been bound.
To support your use case, I've followed a different approach in d3382c5310b63dd70ed7882301607581104279d5. It adds a StatementParameters class which explicitly controls the way parameters are bound. This validation can be completely bypassed by using StatementParameters.bindCustom:
stmt.executeWith(StatementParameters.bindCustom((stmt) {
stmt as PreparedStatement;
callMyNativeFunction(stmt.handle); // stmt.handle is a sqlite_stmt*
}));
Thanks! I expect many users of this feature would commonly pass a mix of standard and custom types, but this certainly works and is easiest to implement.
Appreciate the quick turnaround. :)
Just noticed this was committed to the v2 branch and not main, is v2 considered suitable for production use?
Right, I've pushed the change on the v2 branch which will become the next stable release but has some features that are still in development.
I might have to make breaking changes before the final release. But apart from that, the FFI implementation on the v2 branch is working and I consider it suitable for production if you depend on a fixed version (e.g. sqlite3: 2.0.0-dev.0). If breaking changes happen you'd have to migrate eventually, but so do users who are currently on 1.x of this package.
The WASM implementation on the v2 branch is not stable yet.
Just as a single point of feedback:
I've tried working with this a little bit and think the original proposal may be slightly more usable -- otherwise to bind both custom and standard types in the same statement, you need to obtain a RawSqliteStatement in order to call sqlite3_bind_*.
I've added a CustomStatementParameter class in 5ddbf8c7bc7cc0fb21b2c580c9fceb5d8da7127a. It can be used to mix the two modes. So you can do something like execute([123, MyCustomValue()]) where MyCustomValue implements CustomStatementParameter and calls the C function with the statement handle. But there's no need to manually call sqlite3_bind for 123 anymore.
|
gharchive/issue
| 2023-05-12T19:15:08 |
2025-04-01T06:40:24.110968
|
{
"authors": [
"pchx",
"simolus3"
],
"repo": "simolus3/sqlite3.dart",
"url": "https://github.com/simolus3/sqlite3.dart/issues/161",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1507957245
|
🛑 Victoris Webby is down
In 0f96bf3, Victoris Webby (https://$VW_SITE1/) was down:
HTTP code: 500
Response time: 7200 ms
Resolved: Victoris Webby is back up in d64a87e.
|
gharchive/issue
| 2022-12-22T14:10:11 |
2025-04-01T06:40:24.116973
|
{
"authors": [
"simon2871"
],
"repo": "simon2871/fs-websitemonitor",
"url": "https://github.com/simon2871/fs-websitemonitor/issues/39",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2053529851
|
🛑 Victoris Webby is down
In 09054c0, Victoris Webby (https://$VW_SITE1/) was down:
HTTP code: 0
Response time: 0 ms
Resolved: Victoris Webby is back up in 3501193 after 3 minutes.
|
gharchive/issue
| 2023-12-22T07:53:32 |
2025-04-01T06:40:24.119125
|
{
"authors": [
"simon2871"
],
"repo": "simon2871/fs-websitemonitor",
"url": "https://github.com/simon2871/fs-websitemonitor/issues/852",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
480190147
|
8 Report task file not found
changed folder for search
Fixed with 'Build break task failing #10'
|
gharchive/pull-request
| 2019-08-13T14:29:44 |
2025-04-01T06:40:24.127558
|
{
"authors": [
"O-heu"
],
"repo": "simondel/sonar-buildbreaker-vsts",
"url": "https://github.com/simondel/sonar-buildbreaker-vsts/pull/9",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
391364605
|
replicable clusters based on k-means
Thanks for providing such user-friendly package! The results of feature clusters are not repeatable when I use k-means through fluff heatmap although the results are almost the same each time I run the same command. I guess it's because of k-means method. Is there a way to initialize the basic random number generator ?
There is not at the moment, but it's a good point. Essential to have a replicable analysis. I'll add it.
Once version 3.0.3 is available, you can use the -S option to set the random seed for K-means clustering.
|
gharchive/issue
| 2018-12-15T10:16:04 |
2025-04-01T06:40:24.157592
|
{
"authors": [
"JMing-Li",
"simonvh"
],
"repo": "simonvh/fluff",
"url": "https://github.com/simonvh/fluff/issues/80",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
2126307770
|
After 1.0a9, datasette-edit-schema to use alter-table permission
See:
https://github.com/simonw/datasette-edit-schema/issues/56
This is done.
|
gharchive/issue
| 2024-02-09T01:04:19 |
2025-04-01T06:40:24.158941
|
{
"authors": [
"simonw"
],
"repo": "simonw/datasette",
"url": "https://github.com/simonw/datasette/issues/2264",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
2094732666
|
03 - Create Initial JS
This is where most of the work will be focused. Throughout this step, I will be updated the html and css to match the js. This is still just an initial version. The goal is for most of the file to be working, however, it's fine if everything does not work at this stage.
Everything is not working yet. This initial push was much more basic than I had planned. The next JS issue has more code.
|
gharchive/issue
| 2024-01-22T20:54:05 |
2025-04-01T06:40:24.261583
|
{
"authors": [
"sinclairems"
],
"repo": "sinclairems/JavaScript-Quiz",
"url": "https://github.com/sinclairems/JavaScript-Quiz/issues/4",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
428586414
|
Change window title to Preference Pane title
Found this advice in the HIG today: https://developer.apple.com/design/human-interface-guidelines/macos/app-architecture/preferences/
So maybe after #6 is merged,
[ ] rename PreferencePane.toolbarTitle to simply title or preferencePaneTitle
[ ] use the title for the window title
This also makes #12 obsolete.
I have seen it, but I have intentionally ignored it. I personally don't think it makes much sense to repeat the tab title in the window title. It's already clear which tab it's on. I was kinda hoping no one would notice 😝 Also, a lot of Apple's apps and even macOS don't follow the HIG entirely.
But I guess we should do this to follow the HIG...
Update the window's title to reflect the currently visible preference pane. For example, if your preferences window has a General preference pane, the window’s title should be General when that pane is active.
This makes the window title unusably generic in the window list. It will just show "General", which does not make it clear that the window is a preference window...
We also need to take into account this:
If your window doesn’t have multiple preference panes, then its title should be App Name Preferences.
Some observations:
Finder.app shows "Finder Preferences" as the window title for all the panes.
Which is a violation of:
Update the window's title to reflect the currently visible preference pane.
Messages.app has the minimize button enabled.
Which is a violation of:
Disable the Minimize and Zoom buttons.
Mail.app has the minimize button enabled.
Which is a violation of:
Disable the Minimize and Zoom buttons.
iTunes.app doesn't apply the changes immediately.
Which is a violation of:
Apply preference changes immediately.
Dictionary.app has the zoom button enabled.
Which is a violation of:
Disable the Minimize and Zoom buttons.
Books.app uses the tab title and the word "Preferences" as the window title, for example, "General Preferences".
Which is a violation of:
For example, if your preferences window has a General preference pane, the window’s title should be General when that pane is active.
TextEdit.app uses the window title "Preferences".
Which is a violation of:
If your window doesn’t have multiple preference panes, then its title should be App Name Preferences.
FaceTime.app uses the window title "Preferences".
Which is a violation of:
If your window doesn’t have multiple preference panes, then its title should be App Name Preferences.
Keychain Access.app uses the window title "Preferences".
Which is a violation of:
If your window doesn’t have multiple preference panes, then its title should be App Name Preferences.
Script Editor.app has the minimize button enabled.
Which is a violation of:
Disable the Minimize and Zoom buttons.
Audio MIDI Setup.app has the minimize and zoom button enabled.
Which is a violation of:
Disable the Minimize and Zoom buttons.
Xcode.app shows the tab as the window title, but then modifies the "Window" menu to show "Preferences - General", admitting that the tab title is not clear enough of a window title.
Apple, as usual, has such a double standard. They expect third-party apps to follow their HIG, but they couldn't care less to follow it themselves.
I have filed a Radar: https://openradar.appspot.com/radar?id=5029274855669760
Wow, good detective work :) I wonder what their response is going to be.
I actually like how Xcode prepends "Preferences - " to the window title list best, just to add my 2¢.
I actually like how Xcode prepends "Preferences - " to the window title list best, just to add my 2¢.
Me too.
Should we rename toolbarItemTitle to preferencePaneTitle now that we'll use it for both the toolbar item and the window title? Kinda makes sense, since it's not really a "toolbar item" when using the segmented controls.
I didn't find a way to implement the nice addition of Preferences -- to the window list title. I am afraid this is a menu customization. We could offer a drop-in controller/service object that people tie to their window menu and that (1) removes the preference window from the regular window list, and (2) adds it as a custom entry above the regular list.
After all, Xcode's window list does look different from e.g. Finder's:
That's a strong indicator for me that they replaced the system default functionality.
Or we set the window title to "Preferences -- (preferencePaneTitle)" and replace the titlebar's label with a custom one that drops the prefix.
That actually makes more sense, as you would want to improved title in the window list and mission control too. But I think we should think about the problem a little bit more before implementing this, and it should obviously be opt-in.
Inspecting Xcode windows, it actually does not use the Preferences -- prefix in the actual window title. Folks in a Slack proposed changes during menu validations.
|
gharchive/issue
| 2019-04-03T06:23:42 |
2025-04-01T06:40:24.284868
|
{
"authors": [
"DivineDominion",
"sindresorhus"
],
"repo": "sindresorhus/Preferences",
"url": "https://github.com/sindresorhus/Preferences/issues/15",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
638633786
|
Add Scientific Writing
By @writing-resources @ashwinvis and @maehr
Discussion can be found here https://github.com/sindresorhus/awesome/pull/1700
https://github.com/writing-resources/awesome-scientific-writing
Open source tools for painless academic writing.
This should be included, because academic writing is a hassle.
PRs:
https://github.com/sindresorhus/awesome/pull/1735
https://github.com/sindresorhus/awesome/pull/1635
https://github.com/sindresorhus/awesome/pull/1644
By submitting this pull request I confirm I've read and complied with the below requirements 🖖
Please read it multiple times. I spent a lot of time on these guidelines and most people miss a lot.
Requirements for your pull request
Don't waste my time. Do a good job, adhere to all the guidelines, and be responsive.
You have to review at least 2 other open pull requests.
Try to prioritize unreviewed PRs, but you can also add more comments to reviewed PRs. Go through the below list when reviewing. This requirement is meant to help make the Awesome project self-sustaining. Comment here which PRs you reviewed. You're expected to put a good effort into this and to be thorough. Look at previous PR reviews for inspiration.
You have read and understood the instructions for creating a list.
This pull request has a title in the format Add Name of List.
✅ Add Swift
✅ Add Software Architecture
❌ Update readme.md
❌ Add Awesome Swift
❌ Add swift
❌ Adding Swift
❌ Added Swift
Your entry here should include a short description about the project/theme of the list. It should not describe the list itself. The first character should be uppercase and the description should end in a dot. It should be an objective description and not a tagline or marketing blurb.
✅ - [iOS](…) - Mobile operating system for Apple phones and tablets.
✅ - [Framer](…) - Prototyping interactive UI designs.
❌ - [iOS](…) - Resources and tools for iOS development.
❌ - [Framer](…)
❌ - [Framer](…) - prototyping interactive UI designs
Your entry should be added at the bottom of the appropriate category.
The suggested Awesome list complies with the below requirements.
Requirements for your Awesome list
Has been around for at least 30 days.That means 30 days from either the first real commit or when it was open-sourced. Whatever is most recent.
Don't open a Draft / WIP pull request while you work on the guidelines. A pull request should be 100% ready and should adhere to all the guidelines when you open it.
Run awesome-lint on your list and fix the reported issues. If there are false-positives or things that cannot/shouldn't be fixed, please report it.
The default branch should be named main, not master.
Includes a succinct description of the project/theme at the top of the readme. (Example)
✅ Mobile operating system for Apple phones and tablets.
✅ Prototyping interactive UI designs.
❌ Resources and tools for iOS development.
❌ Awesome Framer packages and tools.
It's the result of hard work and the best I could possibly produce.
If you have not put in considerable effort into your list, your pull request will be immediately closed.
The repo name of your list should be in lowercase slug format: awesome-name-of-list.
✅ awesome-swift
✅ awesome-web-typography
❌ awesome-Swift
❌ AwesomeWebTypography
The heading title of your list should be in title case format: # Awesome Name of List.
✅ # Awesome Swift
✅ # Awesome Web Typography
❌ # awesome-swift
❌ # AwesomeSwift
Non-generated Markdown file in a GitHub repo.
The repo should have awesome-list & awesome as GitHub topics. I encourage you to add more relevant topics.
Not a duplicate. Please search for existing submissions.
Only has awesome items. Awesome lists are curations of the best, not everything.
Does not contain items that are unmaintained, has archived repo, deprecated, or missing docs. If you really need to include such items, they should be in a separate Markdown file.
Includes a project logo/illustration whenever possible.
Either centered, fullwidth, or placed at the top-right of the readme. (Example)
The image should link to the project website or any relevant website.
The image should be high-DPI. Set it to maximum half the width of the original image.
Entries have a description, unless the title is descriptive enough by itself. It rarely is though.
Includes the Awesome badge.
Should be placed on the right side of the readme heading.
Can be placed centered if the list has a centered graphics header.
Should link back to this list.
Has a Table of Contents section.
Should be named Contents, not Table of Contents.
Should be the first section in the list.
Should only have one level of nested lists, preferably none.
Has an appropriate license.
We strongly recommend the CC0 license, but any Creative Commons license will work.
Tip: You can quickly add it to your repo by going to this URL: https://github.com/<user>/<repo>/community/license/new?branch=master&template=cc0-1.0 (replace <user> and <repo> accordingly).
A code license like MIT, BSD, Apache, GPL, etc, is not acceptable. Neither are WTFPL and Unlicense.
Place a file named license or LICENSE in the repo root with the license text.
Do not add the license name or text to the readme. GitHub already shows the license name at the top of the repo.
To verify that you've read all the guidelines, please comment on your pull request with just the word unicorn.
Has contribution guidelines.
The file should be named contributing.md. Casing is up to you.
Has consistent formatting and proper spelling/grammar.
The link and description are separated by a dash. Example: - [AVA](…) - JavaScript test runner.
The description starts with an uppercase character and ends with a period.
Consistent and correct naming. For example, Node.js, not NodeJS or node.js.
Doesn't include a Travis badge.You can still use Travis for list linting, but the badge has no value in the readme.
Doesn't include an Inspired by awesome-foo or Inspired by the Awesome project kinda link at the top of the readme. The Awesome badge is enough.
Go to the top and read it again.
Do not add the license name or text to the readme. GitHub already shows the license name at the top of the repo.
Do not add the license name or text to the readme. GitHub already shows the license name at the top of the repo.
thank you @sindresorhus we fixed it accordingly
The default branch should be named main, not master.
This is a very recent adition to the guidelines, so I don't know how strict @sindresorhus wants to be about it when it comes to previously opened PRs.
Doesn't hurt to change though 😃
@mourarthur Done
Tweet: https://twitter.com/awesome__re/status/1274373790714048513
|
gharchive/pull-request
| 2020-06-15T08:04:17 |
2025-04-01T06:40:24.315221
|
{
"authors": [
"ashwinvis",
"maehr",
"mourarthur",
"sindresorhus"
],
"repo": "sindresorhus/awesome",
"url": "https://github.com/sindresorhus/awesome/pull/1798",
"license": "CC0-1.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
659539372
|
Add Outsourcing
https://github.com/morenoh149/awesome-outsourcing
The business practice of hiring a party outside a company to perform services and create goods. It should be added because it provides valuable information if you are thinking about outsourcing software development. Where do you start, how does this process work, etc.
I reviewed #1817 , #1816 and #1813 .
I confirm I've read and complied with the awesome requirements
(I made a new pr because I accidentally deleted the first fork, re https://github.com/sindresorhus/awesome/pull/1825)
unicorn
|
gharchive/pull-request
| 2020-07-17T19:03:42 |
2025-04-01T06:40:24.318131
|
{
"authors": [
"morenoh149"
],
"repo": "sindresorhus/awesome",
"url": "https://github.com/sindresorhus/awesome/pull/1826",
"license": "CC0-1.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
307371940
|
New Yarn global directory on Windows
With the release of Yarn 1.5.1, Yarn's global installation directory on Windows has changed. The global module storage path now includes "Data" such as AppData/Local/Yarn/Data/global/node_modules
Reference:
https://github.com/yarnpkg/yarn/pull/5336
This results in global-dirs reporting the incorrect global directory with Yarn 1.5.1 on Windows. Subsequently, is-installed-globally fails to report global installations with Yarn 1.5.1 on Windows.
@issuehunt has funded $30.00 to this issue.
Submit pull request via IssueHunt to receive this reward.
Want to contribute? Chip in to this issue via IssueHunt.
Checkout the IssueHunt Issue Explorer to see more funded issues.
Need help from developers? Add your repository on IssueHunt to raise funds.
@sindresorhus has rewarded $27.00 to @tiagodanin. See it on IssueHunt
:moneybag: Total deposit: $30.00
:tada: Repository reward(0%): $0.00
:wrench: Service fee(10%): $3.00
|
gharchive/issue
| 2018-03-21T18:42:28 |
2025-04-01T06:40:24.330902
|
{
"authors": [
"IssuehuntBot",
"ttamj"
],
"repo": "sindresorhus/global-dirs",
"url": "https://github.com/sindresorhus/global-dirs/issues/3",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
140773428
|
use travis-ci container-based infrastructure
Allows for faster builds. See docs for details.
No longer needed:
For repos we recognize on or after 2015-01-01, linux builds are sent to our container-based infrastructure.
|
gharchive/pull-request
| 2016-03-14T19:22:32 |
2025-04-01T06:40:24.344636
|
{
"authors": [
"Zertz",
"sindresorhus"
],
"repo": "sindresorhus/node-module-boilerplate",
"url": "https://github.com/sindresorhus/node-module-boilerplate/pull/10",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
339114062
|
TypeError: URLParser is not a constructor in Node 6.12.2
The normalization function (version 3.1.0) does not work with node 6.12.2
The problem seems to be at the line:
const URLParser = typeof URL === 'undefined' ? require('url').URL : URL;
My guess is that it needs to be:
const URLParser = typeof URL === 'undefined' ? require('url').parse : URL;
Here's a sample how to reproduce the issues:
$ nvm install 6.12.2
$ nvm exec 6.12.2 node
Running node v6.12.2 (npm v3.10.10)
> norm = require('normalize-url')
[Function]
> norm('http://example.com/some/path')
TypeError: URLParser is not a constructor
at module.exports (/Users/jdoe/workspace/node_modules/normalize-url/index.js:32:17)
at repl:1:1
at sigintHandlersWrap (vm.js:22:35)
at sigintHandlersWrap (vm.js:73:12)
at ContextifyScript.Script.runInThisContext (vm.js:21:12)
at REPLServer.defaultEval (repl.js:340:29)
at bound (domain.js:280:14)
at REPLServer.runBound [as eval] (domain.js:293:12)
at REPLServer.<anonymous> (repl.js:539:10)
at emitOne (events.js:101:20)
> require('./node_modules/normalize-url/package').version
'3.1.0'
You need the latest Node.js 6 version (Node.js 6.14).
|
gharchive/issue
| 2018-07-07T03:42:14 |
2025-04-01T06:40:24.346794
|
{
"authors": [
"gnicolae14",
"sindresorhus"
],
"repo": "sindresorhus/normalize-url",
"url": "https://github.com/sindresorhus/normalize-url/issues/69",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
935945146
|
fix contribution graph's time interval selection
Visiting a profile on github and selecting a contribution year for the contribution graph sets the interval in the url query to the current month, or the last month of the given year.
e.g. https://github.com/kiprasmel
2020: selects https://github.com/kiprasmel?tab=overview&from=2020-12-01&to=2020-12-31 - last month of 2020
2021 (current year): selects https://github.com/kiprasmel?tab=overview&from=2021-07-01&to=2021-07-02 - current month
It seems the expected UX would be to select the whole year?
One would need to change the year selection's <a href attribute and replace the given start date's month to the first month 01. I can try creating a PR if we agree on this fix.
It seems the expected UX would be to select the whole year?
Then it will need to load a year's worth of contributions, which can be very long.
I don't think there is anything to "fix", but rather, you are asking for a "expand" button.
Maybe we can extend infinite-scroll to support profile page.
Or maybe we can extend infinite-scroll to support profile page.
We can't it changes the URL on every load
I tried, it doesn't work. If I set the requested dates it just loads January:
https://github.com/fregante?tab=overview&from=2019-01-01&to=2019-12-31
In short, GitHub expects you to click the year and then go backwards, starting from the "latest month" of that year ("December" for past years or "the current month" for this year).
|
gharchive/issue
| 2021-07-02T17:21:16 |
2025-04-01T06:40:24.355514
|
{
"authors": [
"fregante",
"kidonng",
"kiprasmel",
"yakov116"
],
"repo": "sindresorhus/refined-github",
"url": "https://github.com/sindresorhus/refined-github/issues/4536",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1303317931
|
UI Frameworks Not Working Due to useExternalStore hook being undefined.
Hi,
I am attempting to use ChakraUI with Single-SPA, but I am unable to load ChakraUI, as the useExternalStore hook seems to be undefined.
Issue was with the Single SPA playground using an older version of React and overriding the import.
|
gharchive/issue
| 2022-07-13T11:45:06 |
2025-04-01T06:40:24.367941
|
{
"authors": [
"efeyakinci"
],
"repo": "single-spa/single-spa-react",
"url": "https://github.com/single-spa/single-spa-react/issues/147",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2401279395
|
[Feature Request]: Add Hover Effect on Chatbot Icon
Is there an existing issue for this?
[X] I have searched the existing issues
Feature Description
Requesting the addition of a hover effect on the chatbot icon to enhance user experience and provide visual feedback when the icon is interacted with.
Use Case
Adding a hover effect will make the chatbot icon more interactive and engaging, providing users with a clear indication that the icon is clickable and interactive.
Benefits
Benefits:
Improved user engagement and interaction with the chatbot.
Enhanced visual feedback indicating the icon's interactivity.
A more polished and professional look for the chatbot feature.
Add ScreenShots
Priority
High
Record
[X] I have read the Contributing Guidelines
[X] I'm a GSSOC'24 contributor
[X] I'm a SSOC'24 contributor
[X] I want to work on this issue
This can be done in #464 only, no need for a separate issue to add a hover effect.
|
gharchive/issue
| 2024-07-10T17:08:49 |
2025-04-01T06:40:24.374784
|
{
"authors": [
"HarshadaGirase",
"singodiyashubham87"
],
"repo": "singodiyashubham87/Draw-it-out",
"url": "https://github.com/singodiyashubham87/Draw-it-out/issues/465",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1655748036
|
TadGan multivariate => ValueError: Axis 2 is outside the dimensions of X (1)
Orion version: 0.4.1
Python version: 3.7.16
Operating System: windows
Description
I am writing to seek your assistance with an issue I have been encountering while using the orion-ml package. I am trying to use the package to perform multi-variable anomaly detection on my dataset, which contains data sampled at a frequency of 20 Hz for a period of 1 month, resulting in 72000 data points per second. The dataset consists of 30 variables, and I would like to perform anomaly detection on 1-hour windows of the data.
data format:
timestamp,0,1,2,3
1220000000,0.001714,-0.000164,0.002266,0.000921
1220000003,0.001731,-0.00018,0.002233,0.000516
1220000006,0.001731,-0.000213,0.002266,0.001423
1220000009,0.001714,-0.000213,0.00225,0.000872
.
.
.
What I Did
To do this, I am using the following code:
from orion import Orion
hyperparameters = {
"mlprimitives.custom.timeseries_preprocessing.rolling_window_sequences#1": {
'window_size': 72000,
},
'orion.primitives.tadgan.TadGAN#1': {
'epochs': 5,
'verbose': True,
'input_shape': [72000, 30],
}
}
orion = Orion(
pipeline='tadgan',
hyperparameters=hyperparameters
)
orion.fit(data)
However, when I run this code, I receive the following error message:
"ValueError: Axis 2 is outside the dimensions of X (1)."
I have tried to troubleshoot this error on my own, but I have been unable to identify the cause of the issue. I was hoping that you could help me understand what is causing this error and how I can resolve it.
Additionally, I have a few questions regarding the functionality of the orion-ml package that I hope you could help me with.
Hi @mohammadx0098! Thank you for using Orion!
The issue that you are seeing is because we need to adjust the interval hyperparameter. I noticed that time between one timestamp and another is 3 seconds so I would set interval=3.
hyperparameters = {
"mlprimitives.custom.timeseries_preprocessing.time_segments_aggregate#1": {
'interval': 3
},
"mlprimitives.custom.timeseries_preprocessing.rolling_window_sequences#1": {
'window_size': 72000,
},
'orion.primitives.tadgan.TadGAN#1': {
'epochs': 5,
'verbose': True,
'input_shape': [72000, 30],
}
}
Let me know if this solves your issue
@sarahmish
thanks.
my problem is solved .
I wonder to know if your framework could support multivariate signals?
like this dataset with 30 variables.
you have mentioned before that your framework can just do it for one target column output.
it means that I should run my code for each target column or not?
Great! Yes, precisely. Orion currently only supports detecting anomalies in one target column, therefore, if you would like to detect anomalies in multiple columns, you need run a pipeline for each target column.
Great! Yes, precisely. Orion currently only supports detecting anomalies in one target column, therefore, if you would like to detect anomalies in multiple columns, you need run a pipeline for each target column.
so I should add these lines for that?
target_column and target_shape
"mlprimitives.custom.timeseries_preprocessing.rolling_window_sequences#1": { **'target_column': 0** }, 'orion.primitives.tadgan.TadGAN#1': { 'epochs': 5, 'verbose': True, 'input_shape': [100, 25], **'target_shape': [100, 1],** }
Almost. You will need to go through the columns you have.
So in the first run
"mlprimitives.custom.timeseries_preprocessing.rolling_window_sequences#1": {
'target_column': 0
}
In the second run, you will change it to the second column
"mlprimitives.custom.timeseries_preprocessing.rolling_window_sequences#1": {
'target_column': 1
}
and so forth
thanks :)
|
gharchive/issue
| 2023-04-05T14:33:14 |
2025-04-01T06:40:24.387338
|
{
"authors": [
"mohammadx0098",
"sarahmish"
],
"repo": "sintel-dev/Orion",
"url": "https://github.com/sintel-dev/Orion/issues/405",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
188795224
|
Please update changelog for v0.11
https://github.com/Sirupsen/logrus/blob/master/CHANGELOG.md wasn't updated.
Done!
|
gharchive/issue
| 2016-11-11T16:18:38 |
2025-04-01T06:40:24.408937
|
{
"authors": [
"AlekSi",
"sirupsen"
],
"repo": "sirupsen/logrus",
"url": "https://github.com/sirupsen/logrus/issues/443",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
408585954
|
missing training file while running TestCases scripts
File "CartPole_MCTSRS.py", line 61, in
data = joblib.load("Data/Train/itr_50.pkl")
File "/usr/local/anaconda3/lib/python3.6/site-packages/joblib/numpy_pickle.py", line 590, in load
with open(filename, 'rb') as f:
FileNotFoundError: [Errno 2] No such file or directory: 'Data/Train/itr_50.pkl'
Fixed.
|
gharchive/issue
| 2019-02-10T22:02:42 |
2025-04-01T06:40:24.412782
|
{
"authors": [
"divideby2",
"maxiaoba"
],
"repo": "sisl/AdaptiveStressTestingToolbox",
"url": "https://github.com/sisl/AdaptiveStressTestingToolbox/issues/9",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
110696819
|
"Hosted by"-string printed as b'Hosted by...'
So I was speedtesting my connection when I noticed something weird
Retrieving speedtest.net configuration...
Retrieving speedtest.net server list...
Testing from Ziggo (ip here)...
Selecting best server based on latency...
b'Hosted by SoftLayer Technologies, Inc. (Amsterdam) [32.92 km]: 15.53 ms'
Testing download speed........................................
Download: 68.76 Mbit/s
Testing upload speed..................................................
Upload: 12.13 Mbit/s
The "Hosted by"-string is printed as b'Hosted by...'.
b'Hosted by SoftLayer Technologies, Inc. (Amsterdam) [32.92 km]: 15.53 ms'
This has to do with the UTF-8 encoding as shown in this snippet:
>>> print(("test".encode('UTF-8')))
b'test'
I'm seeing this consistently too (Python 3.4.3 in Alpine Linux).
@maride any reason you didn't make 81fa7c5 into a PR? :smile: (does it not appropriately fix the issue?)
I also see the issue when I do --list:
$ speedtest-cli --list
Retrieving speedtest.net configuration...
Retrieving speedtest.net server list...
b'6412) T-Mobile (North Las Vegas, NV, United States) [7.52 km]\n1431) Switch (Las Vegas, NV, United States) [12.67 km]\n....'
A little less esoteric place I can reproduce easily is Python 3.5.1 from Debian Unstable (installing python3-pip and then doing pip3 install speedtest-cli).
Sorry if this is already obvious (I'll admit openly that my Python-fu is subpar), but it appears that this is caused by the fact that in Python 2, str.encode returns a "string" (https://docs.python.org/2/howto/unicode.html - "which returns an 8-bit string version of the Unicode string") vs Python3 str.encode which returns a "bytes object" (https://docs.python.org/3/library/stdtypes.html#str.encode - "Return an encoded version of the string as a bytes object")
Found more info on the topic: https://docs.python.org/3/howto/pyporting.html#text-versus-binary-data
I will try to make a patch for it.
I just wonder @sivel, why is it even encoded like that?
It will be fixed in a future version. It is already solved in another branch that I have.
Allright, thanks!
|
gharchive/issue
| 2015-10-09T16:49:07 |
2025-04-01T06:40:24.460694
|
{
"authors": [
"sivel",
"tianon",
"tomvanderlee"
],
"repo": "sivel/speedtest-cli",
"url": "https://github.com/sivel/speedtest-cli/issues/192",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
284707682
|
Handle multiple failures in aggregate_failures block
RSpec 3.3 allows to group multiple failures using aggregrate_failures, but formatter doesn't not handle it. It just puts
<testcase classname="spec.sample_test" name="test with aggregate_failures with multiple failures" file="./spec/sample_test.rb" time="0.037076"><failure message="RSpec::Expectations::MultipleExpectationsNotMetError" type="RSpec::Expectations::MultipleExpectationsNotMetError">
./spec/sample_test.rb:4:in `block (2 levels) in <top (required)>'</failure></testcase>
when RSpec output is
Failures:
1) test with aggregate_failures with multiple failures
Got 3 failures from failure aggregation block "multiple failures".
# ./spec/sample_test.rb:4:in `block (2 levels) in <top (required)>'
1.1) Failure/Error: expect(0).to eq 1
expected: 1
got: 0
(compared using ==)
# ./spec/sample_test.rb:5:in `block (3 levels) in <top (required)>'
1.2) Failure/Error: expect(2).to eq 3
expected: 3
got: 2
(compared using ==)
# ./spec/sample_test.rb:6:in `block (3 levels) in <top (required)>'
1.3) Failure/Error: expect(4).to eq 5
expected: 5
got: 4
(compared using ==)
# ./spec/sample_test.rb:7:in `block (3 levels) in <top (required)>'
Is there a way to handle it?
We got exactly the same issue, what's the Issue/PR status?
This had been a minor annoyance for us for a while and as it happens I finally got around to looking into and implementing a fix yesterday. See PR #74 for my patch. I ended up leveraging the same fully_formatted_lines method as in PR #58.
|
gharchive/issue
| 2017-12-27T11:43:46 |
2025-04-01T06:40:24.495999
|
{
"authors": [
"TimAle",
"jasoncodes",
"xuantuan58"
],
"repo": "sj26/rspec_junit_formatter",
"url": "https://github.com/sj26/rspec_junit_formatter/issues/56",
"license": "mit",
"license_type": "permissive",
"license_source": "bigquery"
}
|
594796594
|
Bilateral filtering on depth map is missing w.r.t Original Kinect Fusion paper
Hi,
Is there any particular reason you aren't using bilateral filtering on depth maps before processing them?
In every other paper and even the original Kinect Fusion paper uses bilateral filtering before passing on depth maps for further processing.
I checked the spatial consistency for single frame and found it is promising so that I skipped the filter directly. Still, you can apply the filter to see if there is any gain or not.
I checked the spatial consistency for single frame and found it is promising so that I skipped the filter directly. Still, you can apply the filter to see if there is any gain or not.
It didn't help noticably.
|
gharchive/issue
| 2020-04-06T04:39:09 |
2025-04-01T06:40:24.503442
|
{
"authors": [
"mali-tintash",
"sjy234sjy234"
],
"repo": "sjy234sjy234/KinectFusion-ios",
"url": "https://github.com/sjy234sjy234/KinectFusion-ios/issues/4",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1630905231
|
Add to Favorites enhancement
Hi I'm the one requested the enhancement on sf.
Thank you for the update, it's working great, but there is something I hope you can add.
Previous in favorite window I can double click these options to add to my favorite, the script does not do it.
I added a event for it. Should be okay now.
|
gharchive/issue
| 2023-03-19T11:51:50 |
2025-04-01T06:40:24.511851
|
{
"authors": [
"AlexanderHel",
"sk2589822"
],
"repo": "sk2589822/Exhentai-Enhancer",
"url": "https://github.com/sk2589822/Exhentai-Enhancer/issues/50",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2390572202
|
Thank You message added after submit detail
Description
Added script and for show message prevent to reload page and reset form.
Added thank you message div.
Fixes: #366
Type of change
[ ] Bug fix (non-breaking change which fixes an issue)
[x] New feature (non-breaking change which adds functionality)
[ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
[ ] This change requires a documentation update
Checklist:
[x] My code follows the style guidelines of this project
[x] I have performed a self-review of my own code
[x] I have commented my code, particularly in hard-to-understand areas
[x] I have made corresponding changes to the documentation
[x] My changes generate no new warnings
ATTACH SCREEN-SHOTS / DEPLOYMENT LINK
@sk66641 Please check it and merge !
Thank You !
|
gharchive/pull-request
| 2024-07-04T10:54:34 |
2025-04-01T06:40:24.516142
|
{
"authors": [
"zalabhavy"
],
"repo": "sk66641/Random-Disco-Light-Simulator",
"url": "https://github.com/sk66641/Random-Disco-Light-Simulator/pull/383",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
2406074994
|
[Feature]: ui enhancement
Description
changed the ui of the warning modals
all the 3 warning modals ui has been changed
Fixes: #406
Type of change
[ ] Bug fix (non-breaking change which fixes an issue)
[ ] New feature (non-breaking change which adds functionality)
[ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
[ ] This change requires a documentation update
Checklist:
[ ] My code follows the style guidelines of this project
[ ] I have performed a self-review of my own code
[ ] I have commented my code, particularly in hard-to-understand areas
[ ] I have made corresponding changes to the documentation
[ ] My changes generate no new warnings
ATTACH SCREEN-SHOTS / DEPLOYMENT LINK
Before
After
@sk66641 please review this pull request
|
gharchive/pull-request
| 2024-07-12T17:46:36 |
2025-04-01T06:40:24.521375
|
{
"authors": [
"aditya-bhaumik"
],
"repo": "sk66641/Random-Disco-Light-Simulator",
"url": "https://github.com/sk66641/Random-Disco-Light-Simulator/pull/417",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
1812073908
|
Allow more PopupSettings events
Describe the feature in detail (code, mocks, or screenshots encouraged)
Currently, the popup setting is a bit limited in the case of what events it offers. Would it be possible to add a event to PopupSettings such that the popup stays visible when the user hovers over the popup.
This can open up opportunities like creating a topbar navmenu where when you hover over one of the elements, a popup is shown with the subroutes. You wouldn't really want the popup to go away when you try hovering over it and clicking one of the subroutes. You would instead, want it to go away when the user hovers away from the popup and the element that triggered the popup.
What type of pull request would this be?
New Feature
Provide relevant links or additional information.
No response
We've currently put a hold on new features for the popups.
After v2 there'll be a standalone skeleton-popup library that's going to improve ease of use and setup.
Should also allow for more options and configurability.
Was looking for these options as well (the one where the popup stays open when hovered).
@royce-mathew if you find a way to have the popup stay visible when hovered please let me know!
this is mainly just a simple workaround until we actually reach Skeleton V3
Just to clarify, the popup update will be between v2 and v3. We have a plan for introducing the new standalone features alongside the core features between major releases so folks can slowly migrate over. Then when v3 drops it'll remove the original feature from the core library - that'll be the breaking change it implements. Just so everyone is on the same page!
FYI, as we begin prepping for the new standalone popup package, we're consolidating all known issues for popups into this new thread:
https://github.com/skeletonlabs/skeleton/issues/1916
Your post will now be closed, but has been referenced in the post linked above. Please note that by doing this, your request is being folded into this larger effort. Please feel free to monitor the linked issue if you wish to track progress on this going forward.
|
gharchive/issue
| 2023-07-19T14:24:15 |
2025-04-01T06:40:24.560999
|
{
"authors": [
"LickABrick",
"Sarenor",
"endigo9740",
"royce-mathew"
],
"repo": "skeletonlabs/skeleton",
"url": "https://github.com/skeletonlabs/skeleton/issues/1778",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
615418891
|
Update scalafmt-core to 2.5.2
Updates org.scalameta:scalafmt-core from 2.5.1 to 2.5.2.
GitHub Release Notes - Version Diff
I'll automatically update this PR to resolve conflicts as long as you don't change it yourself.
If you'd like to skip this version, you can just close this PR. If you have any feedback, just mention me in the comments below.
Configure Scala Steward for your repository with a .scala-steward.conf file.
Have a fantastic day writing Scala!
Ignore future updates
Add this to your .scala-steward.conf file to ignore future updates of this dependency:
updates.ignore = [ { groupId = "org.scalameta", artifactId = "scalafmt-core" } ]
labels: library-update, semver-patch
Codecov Report
Merging #361 into master will not change coverage.
The diff coverage is n/a.
@@ Coverage Diff @@
## master #361 +/- ##
=======================================
Coverage 86.49% 86.49%
=======================================
Files 135 135
Lines 1474 1474
Branches 36 36
=======================================
Hits 1275 1275
Misses 199 199
Continue to review full report at Codecov.
Legend - Click here to learn more
Δ = absolute <relative> (impact), ø = not affected, ? = missing data
Powered by Codecov. Last update eb7041e...667d0f3. Read the comment docs.
|
gharchive/pull-request
| 2020-05-10T15:45:31 |
2025-04-01T06:40:24.613869
|
{
"authors": [
"codecov-io",
"scala-steward"
],
"repo": "sksamuel/scapegoat",
"url": "https://github.com/sksamuel/scapegoat/pull/361",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
1946906691
|
Improvements
Python's method resolution order makes it difficult to use the SniperLinkAccountAdapter with another adapter.
Instead, I split it out into a SniperLinkAccountAdapterMixin that only defines the add_message method, and refactored SniperLinkAccountAdapter for backwards compatibility.
I also used a real project in the example when I updated the README.
And then while I was updating the documentation, I made a few more tweaks to the Markdown.
Thank you for this project, sniper links are cool! If you can publish a new release once these changes are merged, I'd appreciate it.
@skulegirl Do you have any feedback on this?
|
gharchive/pull-request
| 2023-10-17T08:55:56 |
2025-04-01T06:40:24.615954
|
{
"authors": [
"blag"
],
"repo": "skulegirl/django-allauth-sniperlinks",
"url": "https://github.com/skulegirl/django-allauth-sniperlinks/pull/2",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
835672140
|
Type of intersection of two object types A and B is not equal to A & B
I am trying to build a generic type with @skunkteam/types that takes an existing (object) type and extends it with a number of fields (id in this example):
import { BaseObjectLikeTypeImpl, intersection, object, string, The, TypeImpl } from '@skunkteam/types';
export type ObjectType<ResultType> = TypeImpl<BaseObjectLikeTypeImpl<ResultType>>;
export type Person = The<typeof Person>;
export const Person = object('Person', { name: string });
export type RestDocument<T> = T & { id: string };
export function RestDocument<T>(type: ObjectType<T>): ObjectType<RestDocument<T>> {
return intersection(`RestDocument<${type.name}>`, [object({ id: string }), type]);
}
Unfortunately this throws a typescript error:
error TS2322: Type 'TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>' is not assignable to type 'TypeImpl<BaseObjectLikeTypeImpl<RestDocument<T>>>'.
Type 'TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>' is not assignable to type 'BaseObjectLikeTypeImpl<RestDocument<T>>'.
The types returned by 'and(...)' are incompatible between these types.
Type 'TypeImpl<IntersectionType<[TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>, any]>>' is not assignable to type 'TypeImpl<IntersectionType<[BaseObjectLikeTypeImpl<RestDocument<T>>, any]>>'.
Type 'TypeImpl<IntersectionType<[TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>, any]>>' is not assignable to type 'IntersectionType<[BaseObjectLikeTypeImpl<RestDocument<T>>, any]>'.
Types of property 'types' are incompatible.
Type '[TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>, any]' is not assignable to type '[BaseObjectLikeTypeImpl<RestDocument<T>>, any]'.
Type 'TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>' is not assignable to type 'BaseObjectLikeTypeImpl<RestDocument<T>>'.
The types returned by 'and(...)' are incompatible between these types.
Type 'TypeImpl<IntersectionType<[TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>, any]>>' is not assignable to type 'TypeImpl<IntersectionType<[BaseObjectLikeTypeImpl<RestDocument<T>>, any]>>'.
Type 'TypeImpl<IntersectionType<[TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>, any]>>' is not assignable to type 'IntersectionType<[BaseObjectLikeTypeImpl<RestDocument<T>>, any]>'.
Types of property 'types' are incompatible.
Type '[TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>, any]' is not assignable to type '[BaseObjectLikeTypeImpl<RestDocument<T>>, any]'.
Type 'TypeImpl<IntersectionType<[TypeImpl<InterfaceType<{ id: TypeImpl<BaseTypeImpl<string>>; }, TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>, TypeImpl<...>]>>' is not assignable to type 'BaseObjectLikeTypeImpl<RestDocument<T>>'.
The types returned by 'typeValidator(...)' are incompatible between these types.
Type 'Result<MergeIntersection<T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>' is not assignable to type 'Result<RestDocument<T>>'.
Type 'Success<MergeIntersection<T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>' is not assignable to type 'Result<RestDocument<T>>'.
Type 'Success<MergeIntersection<T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>>' is not assignable to type 'Success<RestDocument<T>>'.
Type 'MergeIntersection<T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>' is not assignable to type 'RestDocument<T>'.
Type '(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>) | { [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<...>; }' is not assignable to type 'RestDocument<T>'.
Type '{ [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>; }' is not assignable to type 'RestDocument<T>'.
Type '{ [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>; }' is not assignable to type 'T'.
'T' could be instantiated with an arbitrary type which could be unrelated to '{ [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>; }'.
Type 'MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>' is not assignable to type 'T[P]'.
Type '{ id: string; }' is not assignable to type 'RestDocument<T>'.
Type '{ id: string; }' is not assignable to type 'T'.
'T' could be instantiated with an arbitrary type which could be unrelated to '{ id: string; }'.
Type 'MergeIntersection<T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>' is not assignable to type 'T'.
'T' could be instantiated with an arbitrary type which could be unrelated to 'MergeIntersection<T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>>'.
Type '(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>) | { [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<...>; }' is not assignable to type 'T'.
'T' could be instantiated with an arbitrary type which could be unrelated to '(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>) | { [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<...>; }'.
Type '{ [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>; }' is not assignable to type 'T'.
'T' could be instantiated with an arbitrary type which could be unrelated to '{ [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)]: MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>; }'.
Type 'MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]>' is not assignable to type 'T[P]'.
Type '(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P] | { [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]]: MergeIntersection<...>; }' is not assignable to type 'T[P]'.
Type '{ [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P]]: MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[P][P]>; }' is not assignable to type 'T[P]'.
Type 'MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)["id" | keyof T]>' is not assignable to type 'T[P]'.
Type '(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)["id" | keyof T] | { [P in keyof (T & TypeOfProperties<Writable<{ id: TypeImpl<...>; }>>)["id" | keyof T]]: MergeIntersection<...>; }' is not assignable to type 'T[P]'.
Type '(T["id"] & string) | (T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[keyof T]' is not assignable to type 'T[P]'.
Type 'T["id"] & string' is not assignable to type 'T[P]'.
Type 'MergeIntersection<T["id"] & string> | MergeIntersection<(T & TypeOfProperties<Writable<{ id: TypeImpl<BaseTypeImpl<string>>; }>>)[keyof T]>' is not assignable to type 'T[P]'.
Type 'MergeIntersection<T["id"] & string>' is not assignable to type 'T[P]'.
Type '(T["id"] & string) | { [P in keyof (Record<string | number | symbol, unknown> & T["id"] & string)]: MergeIntersection<(Record<string | number | symbol, unknown> & T["id"] & string)[P]>; }' is not assignable to type 'T[P]'.
Type 'T["id"] & string' is not assignable to type 'T[P]'.
Type 'string' is not assignable to type 'T[P]'.
Type '{ id: string; }' is not assignable to type 'T'.
'T' could be instantiated with an arbitrary type which could be unrelated to '{ id: string; }'.
The following should work:
export type Person = The<typeof Person>;
export const Person = object('Person', { name: string });
// This is the trick here: vvvvvvvv
export type RestDocument<T> = Writable<T> & { id: string };
export function RestDocument<T>(type: ObjectType<T>): ObjectType<RestDocument<T>> {
return intersection(`RestDocument<${type.name}>`, [object({ id: string }), type]);
}
Objects that are passed into object to configure the validator are converted to TypeScript types on the fly (during compile time). During this conversion we ensure that any readonly annotation of the properties is removed (using the Writable type that is exported from the @skunkteam/types lib). If we wouldn't do that, a readonly property in the config-object would result in a readonly property in the resulting type and that would be unreasonable I think.
You have to forgive TypeScript for the error-message inception, we are stretching the limits here. Please try the suggested solution above and let me know.
Just came out of a call with @wvanderdeijl and we figured that this needs to be documented in the README. Maybe in the future we can think of a way to remove the mandatory use of Writable.
|
gharchive/issue
| 2021-03-19T08:00:28 |
2025-04-01T06:40:24.630158
|
{
"authors": [
"pavadeli",
"wvanderdeijl"
],
"repo": "skunkteam/types",
"url": "https://github.com/skunkteam/types/issues/25",
"license": "MIT",
"license_type": "permissive",
"license_source": "github-api"
}
|
621052438
|
Simplify adding new probes
Would be nice to have a more straighforward approach to add new probes.
Maybe is it possible to copy how Telegraf handles it.
They add a new folder with the "plugin" plus an import in a file with all the plugins: https://github.com/influxdata/telegraf/pull/7418/files
They relay in the "init()" function of each folder to register the plugin.
We thought about using this approach (instead of calling the Register function manually here https://github.com/skydive-project/skydive/blob/master/agent/topology_probes.go#L48-L60). But, if I remember correctly, we thought it was more explicit to call the Register function than relying on an unnamed import like import _ "github.com/skydive-project/skydive/topology/probes/docker"
|
gharchive/issue
| 2020-05-19T15:04:15 |
2025-04-01T06:40:24.649293
|
{
"authors": [
"adrianlzt",
"lebauce"
],
"repo": "skydive-project/skydive",
"url": "https://github.com/skydive-project/skydive/issues/2214",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
308711378
|
http : fix Basic Auth, as the server need to reply WWW-Authenticate
on 401
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
rerun scale-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
run python-tests
[ci skip skydive-compile-tests]
[ci skip] skydive-compile-tests
[ci skip]
[skip ci]
@safchain all done, you can check/merge
run python-tests
|
gharchive/pull-request
| 2018-03-26T19:50:42 |
2025-04-01T06:40:24.656254
|
{
"authors": [
"nplanel",
"safchain"
],
"repo": "skydive-project/skydive",
"url": "https://github.com/skydive-project/skydive/pull/905",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
747566029
|
Custom Error Handling
Is there any way we can deserialize the json in errorBody ?
Hi,
You can resolve it using the ApiErrorModeMapper.
The map function will receive the raw ApiResponse.Failure.Error<*> model and you can customize it to your own error response. And you can handle the customized model via the map extension.
Thanks 👍
|
gharchive/issue
| 2020-11-20T15:11:10 |
2025-04-01T06:40:24.658059
|
{
"authors": [
"TrueKage",
"skydoves"
],
"repo": "skydoves/Sandwich",
"url": "https://github.com/skydoves/Sandwich/issues/8",
"license": "Apache-2.0",
"license_type": "permissive",
"license_source": "github-api"
}
|
854666432
|
Allow binding of the "Tab" key
I have muscle memory of navigating browser tabs using "Ctrl-Tab" and "Ctrl-Shift-Tab". Unfortunately, lagrange does not allow me to create this particular binding.
Version 1.3
Fixed for v1.3.2.
|
gharchive/issue
| 2021-04-09T16:13:56 |
2025-04-01T06:40:24.664936
|
{
"authors": [
"benthor",
"skyjake"
],
"repo": "skyjake/lagrange",
"url": "https://github.com/skyjake/lagrange/issues/244",
"license": "BSD-2-Clause",
"license_type": "permissive",
"license_source": "github-api"
}
|
787344800
|
[feature] get signatures info using CLI tool
Describe your idea:
@skylot first of all, thank you very much for Jadx. This makes the android RE experience so much easier.
About the feature request, I see that jadx-gui has APK signature view which shows the details of all versions of signatures used. However, I don't see an equivalent option for jadx CLI. It'll be nice to get signature info output in a file containing all info (say, signatures.json) with a switch like --cert-info.
Additional context: I'm working on https://github.com/Surendrajat/APKLab and I'm using Jadx CLI to decompile the apk. I'd very much like to avoid adding yet another jar file just to parse certificates because Jadx already can do that.. just not in CLI yet.
@Surendrajat this can be done.
By the way, do you going to parse that file? Because jadx use info provided by apksig library (check ApkSignature class). So maybe for you, it will be easier to just use that lib.
@Surendrajat this can be done.
By the way, do you going to parse that file? Because jadx use info provided by apksig library (check ApkSignature class). So maybe for you, it will be easier to just use that lib.
@skylot thanks for the reply.
No. I am planning to keep the json(?) file as output in the project root for interested people to see.
Yes, I see that jadx uses apksig but because I'm doing it in typescript and not java, and am potentially trying to avoid just another jar :)
But for now, I think I can parse the little info uber-apk-signer spits and save that until it's done in jadx.
@skylot thanks for the reply.
No. I am planning to keep the json(?) file as output in the project root for interested people to see.
Yes, I see that jadx uses apksig but because I'm doing it in typescript and not java, and am potentially trying to avoid just another jar :)
But for now, I think I can parse the little info uber-apk-signer spits and save that until it's done in jadx.
@skylot Hi, can jadx now have a cli cmd to get an apk's signature?
By the way, I have some questions about how jadx get the signature of one apk. I have used some tools such as apksigner.jar, keytool.jar, etc. to test some apks but their result is "No Jar Signatures". However, although jadx will also get the "No Jar Signatures" on these apks, jadx also have a valid signature info about these apks. How this worked?
@99zikke Jadx uses apksig library Whis is also used by apksigner.
Usually "JAR signature" is the name of APK v1 signature. An APK can have an V2 and/or v3 signature with or without v1 signature. Each signature version is independent of the other versions and can be present or not. Does that solves your confusion?
@99zikke Jadx uses apksig library Whis is also used by apksigner.
Usually "JAR signature" is the name of APK v1 signature. An APK can have an V2 and/or v3 signature with or without v1 signature. Each signature version is independent of the other versions and can be present or not. Does that solves your confusion?
I got it. Thank you very much!
|
gharchive/issue
| 2021-01-16T04:25:31 |
2025-04-01T06:40:24.678080
|
{
"authors": [
"99zikke",
"Surendrajat",
"jpstotz",
"skylot"
],
"repo": "skylot/jadx",
"url": "https://github.com/skylot/jadx/issues/1083",
"license": "apache-2.0",
"license_type": "permissive",
"license_source": "bigquery"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.