file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
cluster_feeder.go
|
v1lister.NewPodLister(indexer)
stopCh := make(chan struct{})
go controller.Run(stopCh)
return podLister
}
// NewPodListerAndOOMObserver creates pair of pod lister and OOM observer.
func NewPodListerAndOOMObserver(kubeClient kube_client.Interface, namespace string) (v1lister.PodLister, oom.Observer) {
oomObserver := oom.NewObserver()
podLister := newPodClients(kubeClient, oomObserver, namespace)
WatchEvictionEventsWithRetries(kubeClient, oomObserver, namespace)
return podLister, oomObserver
}
type clusterStateFeeder struct {
coreClient corev1.CoreV1Interface
specClient spec.SpecClient
metricsClient metrics.MetricsClient
oomChan <-chan oom.OomInfo
vpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter
vpaLister vpa_lister.VerticalPodAutoscalerLister
clusterState *model.ClusterState
selectorFetcher target.VpaTargetSelectorFetcher
memorySaveMode bool
controllerFetcher controllerfetcher.ControllerFetcher
recommenderName string
}
func (feeder *clusterStateFeeder) InitFromHistoryProvider(historyProvider history.HistoryProvider) {
klog.V(3).Info("Initializing VPA from history provider")
clusterHistory, err := historyProvider.GetClusterHistory()
if err != nil {
klog.Errorf("Cannot get cluster history: %v", err)
}
for podID, podHistory := range clusterHistory {
klog.V(4).Infof("Adding pod %v with labels %v", podID, podHistory.LastLabels)
feeder.clusterState.AddOrUpdatePod(podID, podHistory.LastLabels, apiv1.PodUnknown)
for containerName, sampleList := range podHistory.Samples {
containerID := model.ContainerID{
PodID: podID,
ContainerName: containerName,
}
if err = feeder.clusterState.AddOrUpdateContainer(containerID, nil); err != nil {
klog.Warningf("Failed to add container %+v. Reason: %+v", containerID, err)
}
klog.V(4).Infof("Adding %d samples for container %v", len(sampleList), containerID)
for _, sample := range sampleList {
if err := feeder.clusterState.AddSample(
&model.ContainerUsageSampleWithKey{
ContainerUsageSample: sample,
Container: containerID,
}); err != nil {
klog.Warningf("Error adding metric sample for container %v: %v", containerID, err)
}
}
}
}
}
func (feeder *clusterStateFeeder) setVpaCheckpoint(checkpoint *vpa_types.VerticalPodAutoscalerCheckpoint) error {
vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName}
vpa, exists := feeder.clusterState.Vpas[vpaID]
if !exists {
return fmt.Errorf("cannot load checkpoint to missing VPA object %+v", vpaID)
}
cs := model.NewAggregateContainerState()
err := cs.LoadFromCheckpoint(&checkpoint.Status)
if err != nil {
return fmt.Errorf("cannot load checkpoint for VPA %+v. Reason: %v", vpa.ID, err)
}
vpa.ContainersInitialAggregateState[checkpoint.Spec.ContainerName] = cs
return nil
}
func (feeder *clusterStateFeeder) InitFromCheckpoints() {
klog.V(3).Info("Initializing VPA from checkpoints")
feeder.LoadVPAs()
namespaces := make(map[string]bool)
for _, v := range feeder.clusterState.Vpas {
namespaces[v.ID.Namespace] = true
}
for namespace := range namespaces {
klog.V(3).Infof("Fetching checkpoints from namespace %s", namespace)
checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{})
if err != nil {
klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err)
}
for _, checkpoint := range checkpointList.Items {
klog.V(3).Infof("Loading VPA %s/%s checkpoint for %s", checkpoint.ObjectMeta.Namespace, checkpoint.Spec.VPAObjectName, checkpoint.Spec.ContainerName)
err = feeder.setVpaCheckpoint(&checkpoint)
if err != nil {
klog.Errorf("Error while loading checkpoint. Reason: %+v", err)
}
}
}
}
func (feeder *clusterStateFeeder) GarbageCollectCheckpoints() {
klog.V(3).Info("Starting garbage collection of checkpoints")
feeder.LoadVPAs()
namespaceList, err := feeder.coreClient.Namespaces().List(context.TODO(), metav1.ListOptions{})
if err != nil {
klog.Errorf("Cannot list namespaces. Reason: %+v", err)
return
}
for _, namespaceItem := range namespaceList.Items {
namespace := namespaceItem.Name
checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{})
if err != nil {
klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err)
}
for _, checkpoint := range checkpointList.Items {
vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName}
_, exists := feeder.clusterState.Vpas[vpaID]
if !exists {
err = feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).Delete(context.TODO(), checkpoint.Name, metav1.DeleteOptions{})
if err == nil {
klog.V(3).Infof("Orphaned VPA checkpoint cleanup - deleting %v/%v.", namespace, checkpoint.Name)
} else {
klog.Errorf("Cannot delete VPA checkpoint %v/%v. Reason: %+v", namespace, checkpoint.Name, err)
}
}
}
}
}
func implicitDefaultRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector) bool {
return len(selectors) == 0
}
func selectsRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector, name *string) bool {
for _, s := range selectors {
if s.Name == *name {
return true
}
}
return false
}
// Filter VPA objects whose specified recommender names are not default
func filterVPAs(feeder *clusterStateFeeder, allVpaCRDs []*vpa_types.VerticalPodAutoscaler) []*vpa_types.VerticalPodAutoscaler {
klog.V(3).Infof("Start selecting the vpaCRDs.")
var vpaCRDs []*vpa_types.VerticalPodAutoscaler
for _, vpaCRD := range allVpaCRDs {
if feeder.recommenderName == DefaultRecommenderName {
if !implicitDefaultRecommender(vpaCRD.Spec.Recommenders) && !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) {
klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName)
continue
}
} else {
if implicitDefaultRecommender(vpaCRD.Spec.Recommenders) {
klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as %v recommender doesn't process CRDs implicitly destined to %v recommender", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName, DefaultRecommenderName)
continue
}
if !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) {
klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName)
continue
}
}
vpaCRDs = append(vpaCRDs, vpaCRD)
}
return vpaCRDs
}
// LoadVPAs fetches VPA objects and loads them into the cluster state.
func (feeder *clusterStateFeeder) LoadVPAs() {
// List VPA API objects.
allVpaCRDs, err := feeder.vpaLister.List(labels.Everything())
if err != nil {
klog.Errorf("Cannot list VPAs. Reason: %+v", err)
return
}
// Filter out VPAs that specified recommenders with names not equal to "default"
vpaCRDs := filterVPAs(feeder, allVpaCRDs)
klog.V(3).Infof("Fetched %d VPAs.", len(vpaCRDs))
// Add or update existing VPAs in the model.
vpaKeys := make(map[model.VpaID]bool)
for _, vpaCRD := range vpaCRDs {
vpaID := model.VpaID{
Namespace: vpaCRD.Namespace,
|
VpaName: vpaCRD.Name,
|
random_line_split
|
|
cluster_feeder.go
|
clusterState: m.ClusterState,
specClient: spec.NewSpecClient(m.PodLister),
selectorFetcher: m.SelectorFetcher,
memorySaveMode: m.MemorySaveMode,
controllerFetcher: m.ControllerFetcher,
recommenderName: m.RecommenderName,
}
}
// WatchEvictionEventsWithRetries watches new Events with reason=Evicted and passes them to the observer.
func WatchEvictionEventsWithRetries(kubeClient kube_client.Interface, observer oom.Observer, namespace string) {
go func() {
options := metav1.ListOptions{
FieldSelector: "reason=Evicted",
}
watchEvictionEventsOnce := func() {
watchInterface, err := kubeClient.CoreV1().Events(namespace).Watch(context.TODO(), options)
if err != nil {
klog.Errorf("Cannot initialize watching events. Reason %v", err)
return
}
watchEvictionEvents(watchInterface.ResultChan(), observer)
}
for {
watchEvictionEventsOnce()
// Wait between attempts, retrying too often breaks API server.
waitTime := wait.Jitter(evictionWatchRetryWait, evictionWatchJitterFactor)
klog.V(1).Infof("An attempt to watch eviction events finished. Waiting %v before the next one.", waitTime)
time.Sleep(waitTime)
}
}()
}
func watchEvictionEvents(evictedEventChan <-chan watch.Event, observer oom.Observer) {
for {
evictedEvent, ok := <-evictedEventChan
if !ok {
klog.V(3).Infof("Eviction event chan closed")
return
}
if evictedEvent.Type == watch.Added {
evictedEvent, ok := evictedEvent.Object.(*apiv1.Event)
if !ok {
continue
}
observer.OnEvent(evictedEvent)
}
}
}
// Creates clients watching pods: PodLister (listing only not terminated pods).
func newPodClients(kubeClient kube_client.Interface, resourceEventHandler cache.ResourceEventHandler, namespace string) v1lister.PodLister {
// We are interested in pods which are Running or Unknown (in case the pod is
// running but there are some transient errors we don't want to delete it from
// our model).
// We don't want to watch Pending pods because they didn't generate any usage
// yet.
// Succeeded and Failed failed pods don't generate any usage anymore but we
// don't necessarily want to immediately delete them.
selector := fields.ParseSelectorOrDie("status.phase!=" + string(apiv1.PodPending))
podListWatch := cache.NewListWatchFromClient(kubeClient.CoreV1().RESTClient(), "pods", namespace, selector)
indexer, controller := cache.NewIndexerInformer(
podListWatch,
&apiv1.Pod{},
time.Hour,
resourceEventHandler,
cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc},
)
podLister := v1lister.NewPodLister(indexer)
stopCh := make(chan struct{})
go controller.Run(stopCh)
return podLister
}
// NewPodListerAndOOMObserver creates pair of pod lister and OOM observer.
func NewPodListerAndOOMObserver(kubeClient kube_client.Interface, namespace string) (v1lister.PodLister, oom.Observer) {
oomObserver := oom.NewObserver()
podLister := newPodClients(kubeClient, oomObserver, namespace)
WatchEvictionEventsWithRetries(kubeClient, oomObserver, namespace)
return podLister, oomObserver
}
type clusterStateFeeder struct {
coreClient corev1.CoreV1Interface
specClient spec.SpecClient
metricsClient metrics.MetricsClient
oomChan <-chan oom.OomInfo
vpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter
vpaLister vpa_lister.VerticalPodAutoscalerLister
clusterState *model.ClusterState
selectorFetcher target.VpaTargetSelectorFetcher
memorySaveMode bool
controllerFetcher controllerfetcher.ControllerFetcher
recommenderName string
}
func (feeder *clusterStateFeeder) InitFromHistoryProvider(historyProvider history.HistoryProvider) {
klog.V(3).Info("Initializing VPA from history provider")
clusterHistory, err := historyProvider.GetClusterHistory()
if err != nil {
klog.Errorf("Cannot get cluster history: %v", err)
}
for podID, podHistory := range clusterHistory {
klog.V(4).Infof("Adding pod %v with labels %v", podID, podHistory.LastLabels)
feeder.clusterState.AddOrUpdatePod(podID, podHistory.LastLabels, apiv1.PodUnknown)
for containerName, sampleList := range podHistory.Samples {
containerID := model.ContainerID{
PodID: podID,
ContainerName: containerName,
}
if err = feeder.clusterState.AddOrUpdateContainer(containerID, nil); err != nil {
klog.Warningf("Failed to add container %+v. Reason: %+v", containerID, err)
}
klog.V(4).Infof("Adding %d samples for container %v", len(sampleList), containerID)
for _, sample := range sampleList {
if err := feeder.clusterState.AddSample(
&model.ContainerUsageSampleWithKey{
ContainerUsageSample: sample,
Container: containerID,
}); err != nil {
klog.Warningf("Error adding metric sample for container %v: %v", containerID, err)
}
}
}
}
}
func (feeder *clusterStateFeeder)
|
(checkpoint *vpa_types.VerticalPodAutoscalerCheckpoint) error {
vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName}
vpa, exists := feeder.clusterState.Vpas[vpaID]
if !exists {
return fmt.Errorf("cannot load checkpoint to missing VPA object %+v", vpaID)
}
cs := model.NewAggregateContainerState()
err := cs.LoadFromCheckpoint(&checkpoint.Status)
if err != nil {
return fmt.Errorf("cannot load checkpoint for VPA %+v. Reason: %v", vpa.ID, err)
}
vpa.ContainersInitialAggregateState[checkpoint.Spec.ContainerName] = cs
return nil
}
func (feeder *clusterStateFeeder) InitFromCheckpoints() {
klog.V(3).Info("Initializing VPA from checkpoints")
feeder.LoadVPAs()
namespaces := make(map[string]bool)
for _, v := range feeder.clusterState.Vpas {
namespaces[v.ID.Namespace] = true
}
for namespace := range namespaces {
klog.V(3).Infof("Fetching checkpoints from namespace %s", namespace)
checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{})
if err != nil {
klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err)
}
for _, checkpoint := range checkpointList.Items {
klog.V(3).Infof("Loading VPA %s/%s checkpoint for %s", checkpoint.ObjectMeta.Namespace, checkpoint.Spec.VPAObjectName, checkpoint.Spec.ContainerName)
err = feeder.setVpaCheckpoint(&checkpoint)
if err != nil {
klog.Errorf("Error while loading checkpoint. Reason: %+v", err)
}
}
}
}
func (feeder *clusterStateFeeder) GarbageCollectCheckpoints() {
klog.V(3).Info("Starting garbage collection of checkpoints")
feeder.LoadVPAs()
namespaceList, err := feeder.coreClient.Namespaces().List(context.TODO(), metav1.ListOptions{})
if err != nil {
klog.Errorf("Cannot list namespaces. Reason: %+v", err)
return
}
for _, namespaceItem := range namespaceList.Items {
namespace := namespaceItem.Name
checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{})
if err != nil {
klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err)
}
for _, checkpoint := range checkpointList.Items {
vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName}
_, exists := feeder.clusterState.Vpas[vpaID]
if !exists {
err = feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).Delete(context.TODO(), checkpoint.Name, metav1.DeleteOptions{})
if err == nil {
klog.V(3).Infof("Orphaned VPA checkpoint cleanup - deleting %v/%v.", namespace, checkpoint.Name)
} else {
klog.Errorf("Cannot delete VPA checkpoint %v/%v. Reason: %+v", namespace, checkpoint.Name, err)
}
}
}
}
}
func implicitDefaultRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector) bool {
return len(selectors) == 0
}
func selectsRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector, name *
|
setVpaCheckpoint
|
identifier_name
|
buffers_handler.ts
|
PeriodBuffer$ observable once every further Buffers have been
// cleared.
createNextPeriodBuffer$.complete();
// emit destruction signal to the next Buffer first
destroyNextBuffers$.next();
destroyNextBuffers$.complete(); // we do not need it anymore
}),
share() // share side-effects
);
/**
* Will emit when the current buffer should be destroyed.
* @type {Observable}
*/
const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$);
const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe(
mergeMap((
evt : IPeriodBufferEvent
) : Observable<IMultiplePeriodBuffersEvent> => {
const { type } = evt;
if (type === "full-buffer") {
/**
* The Period coming just after the current one.
* @type {Period|undefined}
*/
const nextPeriod = manifest.getPeriodAfter(basePeriod);
if (nextPeriod == null) {
// no more period, emits event
return observableOf(EVENTS.bufferComplete(bufferType));
} else {
// current buffer is full, create the next one if not
createNextPeriodBuffer$.next(nextPeriod);
}
} else if (type === "active-buffer") {
// current buffer is active, destroy next buffer if created
destroyNextBuffers$.next();
}
return observableOf(evt);
}),
share()
);
/**
* Buffer for the current Period.
* @type {Observable}
*/
const currentBuffer$ : Observable<IMultiplePeriodBuffersEvent> =
observableConcat(
observableOf(EVENTS.periodBufferReady(bufferType, basePeriod, adaptation$)),
periodBuffer$.pipe(takeUntil(killCurrentBuffer$)),
observableOf(EVENTS.periodBufferCleared(bufferType, basePeriod))
.pipe(tap(() => {
log.info("destroying buffer for", bufferType, basePeriod);
}))
);
return observableMerge(
currentBuffer$,
nextPeriodBuffer$,
destroyAll$.pipe(ignoreElements())
);
}
/**
* Create single PeriodBuffer Observable:
* - Lazily create (or reuse) a SourceBuffer for the given type.
* - Create a Buffer linked to an Adaptation each time it changes, to
* download and append the corresponding Segments in the SourceBuffer.
* - Announce when the Buffer is full or is awaiting new Segments through
* events
*
* /!\ This Observable has multiple side-effects (creation of SourceBuffers,
* downloading and appending of Segments etc.) on subscription.
*
* @param {string} bufferType
* @param {Period} period - The period concerned
* @param {Observable} adaptation$ - Emit the chosen adaptation.
* Emit null to deactivate a type of adaptation
* @returns {Observable}
*/
function createPeriodBuffer(
bufferType : IBufferType,
period: Period,
adaptation$ : Observable<Adaptation|null>
) : Observable<IPeriodBufferEvent> {
return adaptation$.pipe(switchMap((adaptation) => {
if (adaptation == null) {
log.info(`set no ${bufferType} Adaptation`, period);
let cleanBuffer$ : Observable<null>;
if (sourceBufferManager.has(bufferType)) {
log.info(`clearing previous ${bufferType} SourceBuffer`);
const _queuedSourceBuffer = sourceBufferManager.get(bufferType);
cleanBuffer$ = _queuedSourceBuffer
.removeBuffer({ start: period.start, end: period.end || Infinity })
.pipe(mapTo(null));
} else {
cleanBuffer$ = observableOf(null);
}
return observableConcat(
cleanBuffer$.pipe(mapTo(EVENTS.adaptationChange(bufferType, null, period))),
createFakeBuffer(clock$, wantedBufferAhead$, bufferType, { manifest, period })
);
}
log.info(`updating ${bufferType} adaptation`, adaptation, period);
// 1 - create or reuse the SourceBuffer
let queuedSourceBuffer : QueuedSourceBuffer<any>;
if (sourceBufferManager.has(bufferType)) {
log.info("reusing a previous SourceBuffer for the type", bufferType);
queuedSourceBuffer = sourceBufferManager.get(bufferType);
} else {
const codec = getFirstDeclaredMimeType(adaptation);
const sourceBufferOptions = bufferType === "text" ?
options.textTrackOptions : undefined;
queuedSourceBuffer = sourceBufferManager
.createSourceBuffer(bufferType, codec, sourceBufferOptions);
}
// 2 - create or reuse the associated BufferGarbageCollector and
// SegmentBookkeeper
const bufferGarbageCollector$ = garbageCollectors.get(queuedSourceBuffer);
const segmentBookkeeper = segmentBookkeepers.get(queuedSourceBuffer);
// TODO Clean previous QueuedSourceBuffer for previous content in the period
// // 3 - Clean possible content from a precedent adaptation in this period
// // (take the clock into account to avoid removing "now" for native sourceBuffers)
// // like:
// return clock$.pluck("currentTime").take(1).mergeMap(currentTime => {
// })
// 3 - create the pipeline
const pipelineOptions = getPipelineOptions(
bufferType, options.maxRetry, options.maxRetryOffline);
const pipeline = segmentPipelinesManager
.createPipeline(bufferType, pipelineOptions);
// 4 - create the Buffer
const adaptationBuffer$ = bufferManager.createBuffer(
clock$,
queuedSourceBuffer,
segmentBookkeeper,
pipeline,
wantedBufferAhead$,
{ manifest, period, adaptation }
).pipe(catchError<IAdaptationBufferEvent<any>, never>((error : Error) => {
// non native buffer should not impact the stability of the
// player. ie: if a text buffer sends an error, we want to
// continue streaming without any subtitles
if (!SourceBufferManager.isNative(bufferType)) {
log.error("custom buffer: ", bufferType,
"has crashed. Aborting it.", error);
sourceBufferManager.disposeSourceBuffer(bufferType);
errorStream.next(error);
return createFakeBuffer(
clock$, wantedBufferAhead$, bufferType, { manifest, period });
}
log.error(
"native buffer: ", bufferType, "has crashed. Stopping playback.", error);
throw error; // else, throw
}));
// 5 - Return the buffer and send right events
return observableConcat(
observableOf(EVENTS.adaptationChange(bufferType, adaptation, period)),
observableMerge(adaptationBuffer$, bufferGarbageCollector$)
);
}));
}
}
/**
* @param {string} bufferType
* @param {number} retry
* @param {number} offlineRetry
* @returns {Object} - Options to give to the Pipeline
*/
function getPipelineOptions(
bufferType : string,
retry? : number,
offlineRetry? : number
) : IPipelineOptions<any, any> {
const cache = arrayIncludes(["audio", "video"], bufferType) ?
new InitializationSegmentCache<any>() : undefined;
let maxRetry : number;
let maxRetryOffline : number;
if (bufferType === "image") {
maxRetry = 0; // Deactivate BIF fetching if it fails
} else {
maxRetry = retry != null ?
retry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_ERROR;
}
maxRetryOffline = offlineRetry != null ?
offlineRetry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_OFFLINE;
return {
cache,
maxRetry,
maxRetryOffline,
};
}
/**
* Returns an Observable which emits ``undefined`` and complete when all
* buffers given are _complete_.
*
* A PeriodBuffer for a given type is considered _complete_ when both of these
* conditions are true:
* - it is the last PeriodBuffer in the content for the given type
* - it has finished downloading segments (it is _full_)
*
* Simply put a _complete_ PeriodBuffer for a given type means that every
* segments needed for this Buffer have been downloaded.
*
* When the Observable returned here emits, every Buffer are finished.
* @param {...Observable} buffers
* @returns {Observable}
*/
function buffersAreComplete(
...buffers : Array<Observable<IMultiplePeriodBuffersEvent>>
) : Observable<boolean> {
/**
* Array of Observables linked to the Array of Buffers which emit:
* - true when the corresponding buffer is considered _complete_.
* - false when the corresponding buffer is considered _active_.
* @type {Array.<Observable>}
*/
const isCompleteArray : Array<Observable<boolean>> = buffers
.map((buffer) => {
return buffer.pipe(
filter((evt) => {
return evt.type === "complete-buffer" || evt.type === "active-buffer";
}),
map((evt) => evt.type === "complete-buffer"),
startWith(false),
distinctUntilChanged()
);
});
return observableCombineLatest(...isCompleteArray)
.pipe(
map((areComplete) => areComplete.every((isComplete) => isComplete)),
distinctUntilChanged()
);
}
/**
* Get mimetype string of the first representation declared in the given
* adaptation.
* @param {Adaptation} adaptation
* @returns {string}
*/
function g
|
etFirstDeclaredMimeType(
|
identifier_name
|
|
buffers_handler.ts
|
*/
export default function BuffersHandler(
content : { manifest : Manifest; period : Period },
clock$ : Observable<IBufferClockTick>,
wantedBufferAhead$ : Observable<number>,
bufferManager : BufferManager,
sourceBufferManager : SourceBufferManager,
segmentPipelinesManager : SegmentPipelinesManager<any>,
segmentBookkeepers : WeakMapMemory<QueuedSourceBuffer<any>, SegmentBookkeeper>,
garbageCollectors : WeakMapMemory<QueuedSourceBuffer<any>, Observable<never>>,
options: {
maxRetry? : number;
maxRetryOffline? : number;
textTrackOptions? : ITextTrackSourceBufferOptions;
},
errorStream : Subject<Error | ICustomError>
) : Observable<IBufferHandlerEvent> {
const manifest = content.manifest;
const firstPeriod = content.period;
// Initialize all native source buffers from the first period at the same
// time.
// We cannot lazily create native sourcebuffers since the spec does not
// allow adding them during playback.
//
// From https://w3c.github.io/media-source/#methods
// For example, a user agent may throw a QuotaExceededError
// exception if the media element has reached the HAVE_METADATA
// readyState. This can occur if the user agent's media engine
// does not support adding more tracks during playback.
createNativeSourceBuffersForPeriod(sourceBufferManager, firstPeriod);
const addPeriodBuffer$ = new Subject<IPeriodBufferInfos>();
const removePeriodBuffer$ = new Subject<IPeriodBufferInfos>();
const bufferTypes = getBufferTypes();
/**
* Every PeriodBuffers for every possible types
* @type {Array.<Observable>}
*/
const buffersArray = bufferTypes
.map((bufferType) => {
return manageEveryBuffers(bufferType, firstPeriod)
.pipe(
tap((evt) => {
if (evt.type === "periodBufferReady") {
addPeriodBuffer$.next(evt.value);
} else if (evt.type === "periodBufferCleared") {
removePeriodBuffer$.next(evt.value);
}
}),
share()
);
});
/**
* Emits the active Period every time it changes
* @type {Observable}
*/
const activePeriod$ : Observable<Period> =
ActivePeriodEmitter(bufferTypes, addPeriodBuffer$, removePeriodBuffer$)
.pipe(filter((period) : period is Period => !!period));
/**
* Emits the activePeriodChanged events every time the active Period changes.
* @type {Observable}
*/
const activePeriodChanged$ = activePeriod$
.pipe(
tap((period : Period) => {
log.info("new active period", period);
}),
map(period => EVENTS.activePeriodChanged(period))
);
/**
* Emits an "end-of-stream" event once every PeriodBuffer are complete.
* @type {Observable}
*/
const streamHasEnded$ = buffersAreComplete(...buffersArray)
.pipe(map((areComplete) =>
areComplete ? EVENTS.endOfStream() : EVENTS.resumeStream()
));
return observableMerge(
activePeriodChanged$,
...buffersArray,
streamHasEnded$
);
/**
* Manage creation and removal of Buffers for every Periods.
*
* Works by creating consecutive buffers through the
* manageConsecutivePeriodBuffers function, and restarting it when the clock
* goes out of the bounds of these buffers.
* @param {string} bufferType - e.g. "audio" or "video"
* @param {Period} basePeriod - Initial Period downloaded.
* @returns {Observable}
*/
function manageEveryBuffers(
bufferType : IBufferType,
basePeriod : Period
) : Observable<IMultiplePeriodBuffersEvent> {
/**
* Keep a PeriodList for cases such as seeking ahead/before the
* buffers already created.
* When that happens, interrupt the previous buffers and create one back
* from the new initial period.
* @type {ConsecutivePeriodList}
*/
const periodList = new SortedList<Period>((a, b) => a.start - b.start);
/**
* Returns true if the given time is either:
* - less than the start of the chronologically first Period
* - more than the end of the chronologically last Period
* @param {number} time
* @returns {boolean}
*/
function isOutOfPeriodList(time : number) : boolean {
const head = periodList.head();
const last = periodList.last();
if (head == null || last == null) { // if no period
return true;
}
return head.start > time ||
(last.end || Infinity) < time;
}
/**
* Destroy the current set of consecutive buffers.
* Used when the clocks goes out of the bounds of those, e.g. when the user
* seeks.
* We can then re-create consecutive buffers, from the new point in time.
* @type {Subject}
*/
const destroyCurrentBuffers = new Subject<void>();
const restartBuffers$ = clock$.pipe(
filter(({ currentTime, wantedTimeOffset }) => {
if (!manifest.getPeriodForTime(wantedTimeOffset + currentTime)) {
// TODO Manage out-of-manifest situations
return false;
}
return isOutOfPeriodList(wantedTimeOffset + currentTime);
}),
take(1),
tap(({ currentTime, wantedTimeOffset }) => {
log.info("Current position out of the bounds of the active periods," +
"re-creating buffers.", bufferType, currentTime + wantedTimeOffset);
destroyCurrentBuffers.next();
}),
mergeMap(({ currentTime, wantedTimeOffset }) => {
const newInitialPeriod = manifest
.getPeriodForTime(currentTime + wantedTimeOffset);
if (newInitialPeriod == null) {
throw new MediaError("MEDIA_TIME_NOT_FOUND", null, true);
} else {
// Note: For this to work, manageEveryBuffers should always emit the
// "periodBufferReady" event for the new InitialPeriod synchronously
return manageEveryBuffers(bufferType, newInitialPeriod);
}
})
);
const currentBuffers$ = manageConsecutivePeriodBuffers(
bufferType,
basePeriod,
destroyCurrentBuffers
).pipe(
tap((message) => {
if (message.type === "periodBufferReady") {
periodList.add(message.value.period);
} else if (message.type === "periodBufferCleared") {
periodList.removeFirst(message.value.period);
}
}),
share() // as always, with side-effects
);
return observableMerge(currentBuffers$, restartBuffers$);
}
/**
* Manage creation and removal of Buffers for consecutive Periods.
*
* This function is called recursively for each successive Periods as needed.
*
* This function does not guarantee creation/destruction of the right Buffers
* when the user seeks or rewind in the content.
* It only manages regular playback, another layer should be used to manage
* those cases.
*
* You can know about buffers creation and destruction respectively through
* the "periodBufferReady" and "periodBufferCleared" events.
*
* The "periodBufferReady" related to the given period should be sent synchronously
* on subscription.
* Further "periodBufferReady" for further Periods should be sent each time the
* Buffer for the previous Buffer is full.
*
* Buffers for each Period are cleared ("periodBufferCleared" event) either:
* - when it has finished to play (currentTime is after it)
* - when one of the older Buffers becomes active again, in which case the
* Buffers coming after will be cleared from the newest to the oldest.
* - when the destroy$ observable emits, in which case every created Buffer
* here will be cleared from the newest to the oldest.
*
* TODO The code here can surely be greatly simplified.
* @param {string} bufferType - e.g. "audio" or "video"
* @param {Period} basePeriod - Initial Period downloaded.
* @param {Observable} destroy$ - Emit when/if all created Buffer from this
* point should be destroyed.
* @returns {Observable}
|
function manageConsecutivePeriodBuffers(
bufferType : IBufferType,
basePeriod : Period,
destroy$ : Observable<void>
) : Observable<IMultiplePeriodBuffersEvent> {
log.info("creating new Buffer for", bufferType, basePeriod);
/**
* Emits the chosen adaptation for the current type.
* @type {ReplaySubject}
*/
const adaptation$ = new ReplaySubject<Adaptation|null>(1);
/**
* Emits the Period of the next Period Buffer when it can be created.
* @type {Subject}
*/
const createNextPeriodBuffer$ = new Subject<Period>();
/**
* Emits when the Buffers for the next Periods should be destroyed, if
* created.
* @type {Subject}
*/
const destroyNextBuffers$ = new Subject<void>();
/**
* Emits when the current position goes over the end of the current buffer.
* @type {Subject}
*/
const endOfCurrentBuffer$ = clock$
.pipe(filter
|
*/
|
random_line_split
|
buffers_handler.ts
|
/**
* Allows to destroy each created Buffer, from the newest to the oldest,
* once destroy$ emits.
* @type {Observable}
*/
const destroyAll$ = destroy$.pipe(
take(1),
tap(() => {
// first complete createNextBuffer$ to allow completion of the
// nextPeriodBuffer$ observable once every further Buffers have been
// cleared.
createNextPeriodBuffer$.complete();
// emit destruction signal to the next Buffer first
destroyNextBuffers$.next();
destroyNextBuffers$.complete(); // we do not need it anymore
}),
share() // share side-effects
);
/**
* Will emit when the current buffer should be destroyed.
* @type {Observable}
*/
const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$);
const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe(
mergeMap((
evt : IPeriodBufferEvent
) : Observable<IMultiplePeriodBuffersEvent> => {
const { type } = evt;
if (type === "full-buffer") {
/**
* The Period coming just after the current one.
* @type {Period|undefined}
*/
const nextPeriod = manifest.getPeriodAfter(basePeriod);
if (nextPeriod == null) {
// no more period, emits event
return observableOf(EVENTS.bufferComplete(bufferType));
} else {
// current buffer is full, create the next one if not
createNextPeriodBuffer$.next(nextPeriod);
}
} else if (type === "active-buffer") {
// current buffer is active, destroy next buffer if created
destroyNextBuffers$.next();
}
return observableOf(evt);
}),
share()
);
/**
* Buffer for the current Period.
* @type {Observable}
*/
const currentBuffer$ : Observable<IMultiplePeriodBuffersEvent> =
observableConcat(
observableOf(EVENTS.periodBufferReady(bufferType, basePeriod, adaptation$)),
periodBuffer$.pipe(takeUntil(killCurrentBuffer$)),
observableOf(EVENTS.periodBufferCleared(bufferType, basePeriod))
.pipe(tap(() => {
log.info("destroying buffer for", bufferType, basePeriod);
}))
);
return observableMerge(
currentBuffer$,
nextPeriodBuffer$,
destroyAll$.pipe(ignoreElements())
);
}
/**
* Create single PeriodBuffer Observable:
* - Lazily create (or reuse) a SourceBuffer for the given type.
* - Create a Buffer linked to an Adaptation each time it changes, to
* download and append the corresponding Segments in the SourceBuffer.
* - Announce when the Buffer is full or is awaiting new Segments through
* events
*
* /!\ This Observable has multiple side-effects (creation of SourceBuffers,
* downloading and appending of Segments etc.) on subscription.
*
* @param {string} bufferType
* @param {Period} period - The period concerned
* @param {Observable} adaptation$ - Emit the chosen adaptation.
* Emit null to deactivate a type of adaptation
* @returns {Observable}
*/
function createPeriodBuffer(
bufferType : IBufferType,
period: Period,
adaptation$ : Observable<Adaptation|null>
) : Observable<IPeriodBufferEvent> {
return adaptation$.pipe(switchMap((adaptation) => {
if (adaptation == null) {
log.info(`set no ${bufferType} Adaptation`, period);
let cleanBuffer$ : Observable<null>;
if (sourceBufferManager.has(bufferType)) {
log.info(`clearing previous ${bufferType} SourceBuffer`);
const _queuedSourceBuffer = sourceBufferManager.get(bufferType);
cleanBuffer$ = _queuedSourceBuffer
.removeBuffer({ start: period.start, end: period.end || Infinity })
.pipe(mapTo(null));
} else {
cleanBuffer$ = observableOf(null);
}
return observableConcat(
cleanBuffer$.pipe(mapTo(EVENTS.adaptationChange(bufferType, null, period))),
createFakeBuffer(clock$, wantedBufferAhead$, bufferType, { manifest, period })
);
}
log.info(`updating ${bufferType} adaptation`, adaptation, period);
// 1 - create or reuse the SourceBuffer
let queuedSourceBuffer : QueuedSourceBuffer<any>;
if (sourceBufferManager.has(bufferType)) {
log.info("reusing a previous SourceBuffer for the type", bufferType);
queuedSourceBuffer = sourceBufferManager.get(bufferType);
} else {
const codec = getFirstDeclaredMimeType(adaptation);
const sourceBufferOptions = bufferType === "text" ?
options.textTrackOptions : undefined;
queuedSourceBuffer = sourceBufferManager
.createSourceBuffer(bufferType, codec, sourceBufferOptions);
}
// 2 - create or reuse the associated BufferGarbageCollector and
// SegmentBookkeeper
const bufferGarbageCollector$ = garbageCollectors.get(queuedSourceBuffer);
const segmentBookkeeper = segmentBookkeepers.get(queuedSourceBuffer);
// TODO Clean previous QueuedSourceBuffer for previous content in the period
// // 3 - Clean possible content from a precedent adaptation in this period
// // (take the clock into account to avoid removing "now" for native sourceBuffers)
// // like:
// return clock$.pluck("currentTime").take(1).mergeMap(currentTime => {
// })
// 3 - create the pipeline
const pipelineOptions = getPipelineOptions(
bufferType, options.maxRetry, options.maxRetryOffline);
const pipeline = segmentPipelinesManager
.createPipeline(bufferType, pipelineOptions);
// 4 - create the Buffer
const adaptationBuffer$ = bufferManager.createBuffer(
clock$,
queuedSourceBuffer,
segmentBookkeeper,
pipeline,
wantedBufferAhead$,
{ manifest, period, adaptation }
).pipe(catchError<IAdaptationBufferEvent<any>, never>((error : Error) => {
// non native buffer should not impact the stability of the
// player. ie: if a text buffer sends an error, we want to
// continue streaming without any subtitles
if (!SourceBufferManager.isNative(bufferType)) {
log.error("custom buffer: ", bufferType,
"has crashed. Aborting it.", error);
sourceBufferManager.disposeSourceBuffer(bufferType);
errorStream.next(error);
return createFakeBuffer(
clock$, wantedBufferAhead$, bufferType, { manifest, period });
}
log.error(
"native buffer: ", bufferType, "has crashed. Stopping playback.", error);
throw error; // else, throw
}));
// 5 - Return the buffer and send right events
return observableConcat(
observableOf(EVENTS.adaptationChange(bufferType, adaptation, period)),
observableMerge(adaptationBuffer$, bufferGarbageCollector$)
);
}));
}
}
/**
* @param {string} bufferType
* @param {number} retry
* @param {number} offlineRetry
* @returns {Object} - Options to give to the Pipeline
*/
function getPipelineOptions(
bufferType : string,
retry? : number,
offlineRetry? : number
) : IPipelineOptions<any, any> {
const cache = arrayIncludes(["audio", "video"], bufferType) ?
new InitializationSegmentCache<any>() : undefined;
let maxRetry : number;
let maxRetryOffline : number;
if (bufferType === "image") {
maxRetry = 0; // Deactivate BIF fetching if it fails
} else {
maxRetry = retry != null ?
retry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_ERROR;
}
maxRetryOffline = offlineRetry != null ?
offlineRetry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_OFFLINE;
return {
cache,
maxRetry,
maxRetryOffline,
};
}
/**
* Returns an Observable which emits ``undefined`` and complete when all
* buffers given are _complete_.
*
* A PeriodBuffer for a given type is considered _complete_ when both of these
* conditions are true:
* - it is the last PeriodBuffer in the content for the given type
* - it has finished downloading segments (it is _full_)
*
* Simply put a _complete_ PeriodBuffer for a given type means that every
* segments needed for this Buffer have been downloaded.
*
* When the Observable returned here emits, every Buffer are finished.
* @param {...Observable} buffers
* @returns {Observable}
*/
function buffersAreComplete(
...buffers : Array<Observable<IMultiplePeriodBuffersEvent>>
) : Observable<boolean> {
|
/**
* Array of Observables linked to the Array of Buffers which emit:
* - true when the corresponding buffer is considered _complete_.
* - false when the corresponding buffer is considered _active_.
* @type {Array.<Observable>}
*/
const isCompleteArray : Array<Observable<boolean>> = buffers
.map((buffer) => {
return buffer.pipe(
filter((evt) => {
return evt.type === "complete-buffer" || evt.type === "active-buffer";
}),
map((evt) => evt.type === "complete-buffer"),
startWith(false),
distinctUntilChanged()
);
});
return observableCombineLatest(...isCompleteArray)
|
identifier_body
|
|
buffers_handler.ts
|
.value);
} else if (evt.type === "periodBufferCleared") {
removePeriodBuffer$.next(evt.value);
}
}),
share()
);
});
/**
* Emits the active Period every time it changes
* @type {Observable}
*/
const activePeriod$ : Observable<Period> =
ActivePeriodEmitter(bufferTypes, addPeriodBuffer$, removePeriodBuffer$)
.pipe(filter((period) : period is Period => !!period));
/**
* Emits the activePeriodChanged events every time the active Period changes.
* @type {Observable}
*/
const activePeriodChanged$ = activePeriod$
.pipe(
tap((period : Period) => {
log.info("new active period", period);
}),
map(period => EVENTS.activePeriodChanged(period))
);
/**
* Emits an "end-of-stream" event once every PeriodBuffer are complete.
* @type {Observable}
*/
const streamHasEnded$ = buffersAreComplete(...buffersArray)
.pipe(map((areComplete) =>
areComplete ? EVENTS.endOfStream() : EVENTS.resumeStream()
));
return observableMerge(
activePeriodChanged$,
...buffersArray,
streamHasEnded$
);
/**
* Manage creation and removal of Buffers for every Periods.
*
* Works by creating consecutive buffers through the
* manageConsecutivePeriodBuffers function, and restarting it when the clock
* goes out of the bounds of these buffers.
* @param {string} bufferType - e.g. "audio" or "video"
* @param {Period} basePeriod - Initial Period downloaded.
* @returns {Observable}
*/
function manageEveryBuffers(
bufferType : IBufferType,
basePeriod : Period
) : Observable<IMultiplePeriodBuffersEvent> {
/**
* Keep a PeriodList for cases such as seeking ahead/before the
* buffers already created.
* When that happens, interrupt the previous buffers and create one back
* from the new initial period.
* @type {ConsecutivePeriodList}
*/
const periodList = new SortedList<Period>((a, b) => a.start - b.start);
/**
* Returns true if the given time is either:
* - less than the start of the chronologically first Period
* - more than the end of the chronologically last Period
* @param {number} time
* @returns {boolean}
*/
function isOutOfPeriodList(time : number) : boolean {
const head = periodList.head();
const last = periodList.last();
if (head == null || last == null) { // if no period
return true;
}
return head.start > time ||
(last.end || Infinity) < time;
}
/**
* Destroy the current set of consecutive buffers.
* Used when the clocks goes out of the bounds of those, e.g. when the user
* seeks.
* We can then re-create consecutive buffers, from the new point in time.
* @type {Subject}
*/
const destroyCurrentBuffers = new Subject<void>();
const restartBuffers$ = clock$.pipe(
filter(({ currentTime, wantedTimeOffset }) => {
if (!manifest.getPeriodForTime(wantedTimeOffset + currentTime)) {
// TODO Manage out-of-manifest situations
return false;
}
return isOutOfPeriodList(wantedTimeOffset + currentTime);
}),
take(1),
tap(({ currentTime, wantedTimeOffset }) => {
log.info("Current position out of the bounds of the active periods," +
"re-creating buffers.", bufferType, currentTime + wantedTimeOffset);
destroyCurrentBuffers.next();
}),
mergeMap(({ currentTime, wantedTimeOffset }) => {
const newInitialPeriod = manifest
.getPeriodForTime(currentTime + wantedTimeOffset);
if (newInitialPeriod == null) {
throw new MediaError("MEDIA_TIME_NOT_FOUND", null, true);
} else {
// Note: For this to work, manageEveryBuffers should always emit the
// "periodBufferReady" event for the new InitialPeriod synchronously
return manageEveryBuffers(bufferType, newInitialPeriod);
}
})
);
const currentBuffers$ = manageConsecutivePeriodBuffers(
bufferType,
basePeriod,
destroyCurrentBuffers
).pipe(
tap((message) => {
if (message.type === "periodBufferReady") {
periodList.add(message.value.period);
} else if (message.type === "periodBufferCleared") {
periodList.removeFirst(message.value.period);
}
}),
share() // as always, with side-effects
);
return observableMerge(currentBuffers$, restartBuffers$);
}
/**
* Manage creation and removal of Buffers for consecutive Periods.
*
* This function is called recursively for each successive Periods as needed.
*
* This function does not guarantee creation/destruction of the right Buffers
* when the user seeks or rewind in the content.
* It only manages regular playback, another layer should be used to manage
* those cases.
*
* You can know about buffers creation and destruction respectively through
* the "periodBufferReady" and "periodBufferCleared" events.
*
* The "periodBufferReady" related to the given period should be sent synchronously
* on subscription.
* Further "periodBufferReady" for further Periods should be sent each time the
* Buffer for the previous Buffer is full.
*
* Buffers for each Period are cleared ("periodBufferCleared" event) either:
* - when it has finished to play (currentTime is after it)
* - when one of the older Buffers becomes active again, in which case the
* Buffers coming after will be cleared from the newest to the oldest.
* - when the destroy$ observable emits, in which case every created Buffer
* here will be cleared from the newest to the oldest.
*
* TODO The code here can surely be greatly simplified.
* @param {string} bufferType - e.g. "audio" or "video"
* @param {Period} basePeriod - Initial Period downloaded.
* @param {Observable} destroy$ - Emit when/if all created Buffer from this
* point should be destroyed.
* @returns {Observable}
*/
function manageConsecutivePeriodBuffers(
bufferType : IBufferType,
basePeriod : Period,
destroy$ : Observable<void>
) : Observable<IMultiplePeriodBuffersEvent> {
log.info("creating new Buffer for", bufferType, basePeriod);
/**
* Emits the chosen adaptation for the current type.
* @type {ReplaySubject}
*/
const adaptation$ = new ReplaySubject<Adaptation|null>(1);
/**
* Emits the Period of the next Period Buffer when it can be created.
* @type {Subject}
*/
const createNextPeriodBuffer$ = new Subject<Period>();
/**
* Emits when the Buffers for the next Periods should be destroyed, if
* created.
* @type {Subject}
*/
const destroyNextBuffers$ = new Subject<void>();
/**
* Emits when the current position goes over the end of the current buffer.
* @type {Subject}
*/
const endOfCurrentBuffer$ = clock$
.pipe(filter(({ currentTime, wantedTimeOffset }) =>
!!basePeriod.end && (currentTime + wantedTimeOffset) >= basePeriod.end
));
/**
* Create Period Buffer for the next Period.
* @type {Observable}
*/
const nextPeriodBuffer$ = createNextPeriodBuffer$
.pipe(exhaustMap((nextPeriod) => {
return manageConsecutivePeriodBuffers(
bufferType, nextPeriod, destroyNextBuffers$);
}));
/**
* Allows to destroy each created Buffer, from the newest to the oldest,
* once destroy$ emits.
* @type {Observable}
*/
const destroyAll$ = destroy$.pipe(
take(1),
tap(() => {
// first complete createNextBuffer$ to allow completion of the
// nextPeriodBuffer$ observable once every further Buffers have been
// cleared.
createNextPeriodBuffer$.complete();
// emit destruction signal to the next Buffer first
destroyNextBuffers$.next();
destroyNextBuffers$.complete(); // we do not need it anymore
}),
share() // share side-effects
);
/**
* Will emit when the current buffer should be destroyed.
* @type {Observable}
*/
const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$);
const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe(
mergeMap((
evt : IPeriodBufferEvent
) : Observable<IMultiplePeriodBuffersEvent> => {
const { type } = evt;
if (type === "full-buffer") {
/**
* The Period coming just after the current one.
* @type {Period|undefined}
*/
const nextPeriod = manifest.getPeriodAfter(basePeriod);
if (nextPeriod == null) {
// no more period, emits event
return observableOf(EVENTS.bufferComplete(bufferType));
} else {
|
// current buffer is full, create the next one if not
createNextPeriodBuffer$.next(nextPeriod);
}
|
conditional_block
|
|
BrowseButton.js
|
* red background to see how well they are positioned.
*/
debug : false,
/*
* Private constants:
*/
/**
* @property FLOAT_EL_WIDTH
* @type Number The width (in pixels) of floatEl. It should be less than the width of the IE "Browse" button's width
* (65 pixels), since IE doesn't let you resize it. We define this width so we can quickly center floatEl at
* the mouse cursor without having to make any function calls.
* @private
*/
FLOAT_EL_WIDTH : 60,
/**
* @property FLOAT_EL_HEIGHT
* @type Number The heigh (in pixels) of floatEl. It should be less than the height of the "Browse" button's height.
* We define this height so we can quickly center floatEl at the mouse cursor without having to make any
* function calls.
* @private
*/
FLOAT_EL_HEIGHT : 18,
/*
* Private properties:
*/
/**
* @property buttonCt
* @type Ext.Element Element that contains the actual Button DOM element. We store a reference to it, so we can
* easily grab its size for sizing the clipEl.
* @private
*/
buttonCt : null,
/**
* @property clipEl
* @type Ext.Element Element that contains the floatEl. This element is positioned to fill the area of Ext.Button
* and has overflow turned off. This keeps floadEl tight to the Ext.Button, and prevents it from masking
* surrounding elements.
* @private
*/
clipEl : null,
/**
* @property floatEl
* @type Ext.Element Element that contains the inputFileEl. This element is size to be less than or equal to the
* size of the input file "Browse" button. It is then positioned wherever the user moves the cursor, so that
* their click always clicks the input file "Browse" button. Overflow is turned off to preven inputFileEl from
* masking surrounding elements.
* @private
*/
floatEl : null,
/**
* @property inputFileEl
* @type Ext.Element Element for the hiden file input.
* @private
*/
inputFileEl : null,
/**
* @property originalHandler
* @type Function The handler originally defined for the Ext.Button during construction using the "handler" config
* option. We need to null out the "handler" property so that it is only called when a file is selected.
* @private
*/
originalHandler : null,
/**
* @property originalScope
* @type Object The scope originally defined for the Ext.Button during construction using the "scope" config option.
* While the "scope" property doesn't need to be nulled, to be consistent with originalHandler, we do.
* @private
*/
originalScope : null,
/*
* Protected Ext.Button overrides
*/
/**
* @see Ext.Button.initComponent
*/
initComponent : function() {
Ext.ux.form.BrowseButton.superclass.initComponent.call(this);
// Store references to the original handler and scope before nulling them.
// This is done so that this class can control when the handler is called.
// There are some cases where the hidden file input browse button doesn't completely cover the Ext.Button.
// The handler shouldn't be called in these cases. It should only be called if a new file is selected on the
// file system.
this.originalHandler = this.handler;
this.originalScope = this.scope;
this.handler = null;
this.scope = null;
},
/**
* @see Ext.Button.onRender
*/
onRender : function(ct, position) {
Ext.ux.form.BrowseButton.superclass.onRender.call(this, ct, position); // render the Ext.Button
this.buttonCt = this.el.child('.x-btn-center em');
this.buttonCt.position('relative'); // this is important!
var styleCfg = {
position : 'absolute',
overflow : 'hidden',
top : '0px', // default
left : '0px' // default
};
// browser specifics for better overlay tightness
if (Ext.isIE) {
Ext.apply(styleCfg, {
left : '-3px',
top : '-3px'
});
} else if (Ext.isGecko) {
Ext.apply(styleCfg, {
left : '-3px',
top : '-3px'
});
} else if (Ext.isSafari) {
Ext.apply(styleCfg, {
left : '-4px',
top : '-2px'
});
}
this.clipEl = this.buttonCt.createChild({
tag : 'div',
style : styleCfg
});
this.setClipSize();
this.clipEl.on({
'mousemove' : this.onButtonMouseMove,
'mouseover' : this.onButtonMouseMove,
scope : this
});
this.floatEl = this.clipEl.createChild({
tag : 'div',
style : {
position : 'absolute',
width : this.FLOAT_EL_WIDTH + 'px',
height : this.FLOAT_EL_HEIGHT + 'px',
overflow : 'hidden'
}
});
if (this.debug) {
this.clipEl.applyStyles({
'background-color' : 'green'
});
this.floatEl.applyStyles({
'background-color' : 'red'
});
} else {
// We don't set the clipEl to be transparent, because IE 6/7 occassionaly looses mouse events for
// transparent elements.
// We have listeners on the clipEl that can't be lost as they're needed for realligning the input file
// element.
this.floatEl.setOpacity(0.0);
}
// Cover cases where someone tabs to the button:
// Listen to focus of the button so we can translate the focus to the input file el.
var buttonEl = this.el.child(this.buttonSelector);
buttonEl.on('focus', this.onButtonFocus, this);
// In IE, it's possible to tab to the text portion of the input file el.
// We want to listen to keyevents so that if a space is pressed, we "click" the input file el.
if (Ext.isIE) {
this.el.on('keydown', this.onButtonKeyDown, this);
}
this.createInputFile();
},
/*
* Private helper methods:
*/
/**
* Sets the size of clipEl so that is covering as much of the button as possible.
*
* @private
*/
setClipSize : function() {
if (this.clipEl) {
var width = this.buttonCt.getWidth();
var height = this.buttonCt.getHeight();
// The button container can have a width and height of zero when it's rendered in a hidden panel.
// This is most noticable when using a card layout, as the items are all rendered but hidden,
// (unless deferredRender is set to true).
// In this case, the clip size can't be determined, so we attempt to set it later.
// This check repeats until the button container has a size.
if (width === 0 || height === 0) {
this.setClipSize.defer(100, this);
} else {
if (Ext.isIE) {
width = width + 5;
height = height + 5;
} else if (Ext.isGecko) {
width = width + 6;
height = height + 6;
} else if (Ext.isSafari)
|
this.clipEl.setSize(width, height);
}
}
},
/**
* Creates the input file element and adds it to inputFileCt. The created input file elementis sized, positioned,
* and styled appropriately. Event handlers for the element are set up, and a tooltip is applied if defined in the
* original config.
*
* @private
*/
createInputFile : function() {
// When an input file gets detached and set as the child of a different DOM element,
// straggling <em> elements get left behind.
// I don't know why this happens but we delete any <em> elements we can find under the floatEl to prevent a
// memory leak.
this.floatEl.select('em').each(function(el) {
el.remove();
});
this.inputFileEl = this.floatEl.createChild({
tag : 'input',
type : 'file',
size : 1, // must be > 0. It's value doesn't really matter due to our masking div (inputFileCt).
name : this.inputFileName || Ext.id(this.el),
tabindex : this.tabIndex,
// Use the same pointer as an Ext.Button would use. This doesn't work in Firefox.
// This positioning right-aligns the input file to ensure that the "Browse" button is visible.
|
{
width = width + 6;
height = height + 6;
}
|
conditional_block
|
BrowseButton.js
|
* red background to see how well they are positioned.
*/
debug : false,
/*
* Private constants:
*/
/**
* @property FLOAT_EL_WIDTH
* @type Number The width (in pixels) of floatEl. It should be less than the width of the IE "Browse" button's width
* (65 pixels), since IE doesn't let you resize it. We define this width so we can quickly center floatEl at
* the mouse cursor without having to make any function calls.
* @private
*/
FLOAT_EL_WIDTH : 60,
/**
* @property FLOAT_EL_HEIGHT
* @type Number The heigh (in pixels) of floatEl. It should be less than the height of the "Browse" button's height.
* We define this height so we can quickly center floatEl at the mouse cursor without having to make any
* function calls.
* @private
*/
FLOAT_EL_HEIGHT : 18,
/*
* Private properties:
*/
/**
* @property buttonCt
* @type Ext.Element Element that contains the actual Button DOM element. We store a reference to it, so we can
* easily grab its size for sizing the clipEl.
* @private
*/
buttonCt : null,
/**
* @property clipEl
* @type Ext.Element Element that contains the floatEl. This element is positioned to fill the area of Ext.Button
* and has overflow turned off. This keeps floadEl tight to the Ext.Button, and prevents it from masking
* surrounding elements.
* @private
*/
clipEl : null,
/**
* @property floatEl
* @type Ext.Element Element that contains the inputFileEl. This element is size to be less than or equal to the
* size of the input file "Browse" button. It is then positioned wherever the user moves the cursor, so that
* their click always clicks the input file "Browse" button. Overflow is turned off to preven inputFileEl from
* masking surrounding elements.
* @private
*/
floatEl : null,
/**
* @property inputFileEl
* @type Ext.Element Element for the hiden file input.
* @private
*/
inputFileEl : null,
/**
* @property originalHandler
* @type Function The handler originally defined for the Ext.Button during construction using the "handler" config
* option. We need to null out the "handler" property so that it is only called when a file is selected.
* @private
*/
originalHandler : null,
/**
* @property originalScope
* @type Object The scope originally defined for the Ext.Button during construction using the "scope" config option.
* While the "scope" property doesn't need to be nulled, to be consistent with originalHandler, we do.
* @private
*/
originalScope : null,
/*
* Protected Ext.Button overrides
*/
/**
* @see Ext.Button.initComponent
*/
initComponent : function() {
Ext.ux.form.BrowseButton.superclass.initComponent.call(this);
// Store references to the original handler and scope before nulling them.
// This is done so that this class can control when the handler is called.
// There are some cases where the hidden file input browse button doesn't completely cover the Ext.Button.
// The handler shouldn't be called in these cases. It should only be called if a new file is selected on the
// file system.
this.originalHandler = this.handler;
this.originalScope = this.scope;
this.handler = null;
this.scope = null;
},
/**
* @see Ext.Button.onRender
*/
onRender : function(ct, position) {
Ext.ux.form.BrowseButton.superclass.onRender.call(this, ct, position); // render the Ext.Button
this.buttonCt = this.el.child('.x-btn-center em');
this.buttonCt.position('relative'); // this is important!
var styleCfg = {
position : 'absolute',
overflow : 'hidden',
top : '0px', // default
left : '0px' // default
};
// browser specifics for better overlay tightness
if (Ext.isIE) {
Ext.apply(styleCfg, {
left : '-3px',
top : '-3px'
});
} else if (Ext.isGecko) {
Ext.apply(styleCfg, {
left : '-3px',
top : '-3px'
});
} else if (Ext.isSafari) {
Ext.apply(styleCfg, {
left : '-4px',
top : '-2px'
});
}
this.clipEl = this.buttonCt.createChild({
tag : 'div',
style : styleCfg
});
this.setClipSize();
this.clipEl.on({
'mousemove' : this.onButtonMouseMove,
'mouseover' : this.onButtonMouseMove,
scope : this
});
this.floatEl = this.clipEl.createChild({
tag : 'div',
style : {
position : 'absolute',
width : this.FLOAT_EL_WIDTH + 'px',
height : this.FLOAT_EL_HEIGHT + 'px',
overflow : 'hidden'
}
});
if (this.debug) {
this.clipEl.applyStyles({
'background-color' : 'green'
});
this.floatEl.applyStyles({
'background-color' : 'red'
});
} else {
// We don't set the clipEl to be transparent, because IE 6/7 occassionaly looses mouse events for
// transparent elements.
// We have listeners on the clipEl that can't be lost as they're needed for realligning the input file
// element.
this.floatEl.setOpacity(0.0);
}
// Cover cases where someone tabs to the button:
// Listen to focus of the button so we can translate the focus to the input file el.
var buttonEl = this.el.child(this.buttonSelector);
buttonEl.on('focus', this.onButtonFocus, this);
// In IE, it's possible to tab to the text portion of the input file el.
// We want to listen to keyevents so that if a space is pressed, we "click" the input file el.
if (Ext.isIE) {
this.el.on('keydown', this.onButtonKeyDown, this);
}
this.createInputFile();
},
/*
* Private helper methods:
*/
/**
* Sets the size of clipEl so that is covering as much of the button as possible.
*
* @private
*/
setClipSize : function() {
if (this.clipEl) {
var width = this.buttonCt.getWidth();
var height = this.buttonCt.getHeight();
// The button container can have a width and height of zero when it's rendered in a hidden panel.
// This is most noticable when using a card layout, as the items are all rendered but hidden,
// (unless deferredRender is set to true).
// In this case, the clip size can't be determined, so we attempt to set it later.
// This check repeats until the button container has a size.
if (width === 0 || height === 0) {
this.setClipSize.defer(100, this);
} else {
if (Ext.isIE) {
width = width + 5;
height = height + 5;
} else if (Ext.isGecko) {
width = width + 6;
height = height + 6;
} else if (Ext.isSafari) {
width = width + 6;
height = height + 6;
}
this.clipEl.setSize(width, height);
}
}
},
/**
* Creates the input file element and adds it to inputFileCt. The created input file elementis sized, positioned,
* and styled appropriately. Event handlers for the element are set up, and a tooltip is applied if defined in the
* original config.
|
*/
createInputFile : function() {
// When an input file gets detached and set as the child of a different DOM element,
// straggling <em> elements get left behind.
// I don't know why this happens but we delete any <em> elements we can find under the floatEl to prevent a
// memory leak.
this.floatEl.select('em').each(function(el) {
el.remove();
});
this.inputFileEl = this.floatEl.createChild({
tag : 'input',
type : 'file',
size : 1, // must be > 0. It's value doesn't really matter due to our masking div (inputFileCt).
name : this.inputFileName || Ext.id(this.el),
tabindex : this.tabIndex,
// Use the same pointer as an Ext.Button would use. This doesn't work in Firefox.
// This positioning right-aligns the input file to ensure that the "Browse" button is visible.
|
*
* @private
|
random_line_split
|
win_export.py
|
result, write_title=False):
import csv
try:
fp = file(fname, 'wb+')
writer = csv.writer(fp)
if write_title:
writer.writerow(fields)
for data in result:
row = []
for d in data:
if type(d)==types.StringType:
row.append(d.replace('\n',' ').replace('\t',' '))
else:
row.append(d)
writer.writerow(row)
fp.close()
common.message(str(len(result))+_(' record(s) saved !'))
return True
except IOError, (errno, strerror):
common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,))
return False
def open_excel(fields, result):
if os.name == 'nt':
try:
from win32com.client import Dispatch
xlApp = Dispatch("Excel.Application")
xlApp.Workbooks.Add()
for col in range(len(fields)):
xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col]
sht = xlApp.ActiveSheet
for a in result:
for b in range(len(a)):
if type(a[b]) == type(''):
a[b]=a[b].decode('utf-8','replace')
elif type(a[b]) == type([]):
if len(a[b])==2:
a[b] = a[b][1].decode('utf-8','replace')
else:
a[b] = ''
sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result
xlApp.Visible = 1
except:
common.error(_('Error Opening Excel !'),'')
else:
common.message(_("Function only available for MS Office !\nSorry, OOo users :("))
def datas_read(ids, model, fields, fields_view, prefix='', context=None):
ctx = context.copy()
ctx.update(rpc.session.context)
datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx)
return datas
class win_export(object):
def __init__(self, model, ids, fields, preload = [], parent=None, context=None):
self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as',
gettext.textdomain())
self.win = self.glade.get_widget('win_save_as')
self.ids = ids
self.model = model
self.fields_data = {}
if context is None:
context = {}
self.context = context
if parent is None:
parent = service.LocalService('gui.main').window
self.win.set_transient_for(parent)
self.win.set_icon(common.OPENERP_ICON)
self.parent = parent
self.view1 = gtk.TreeView()
self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp1').add(self.view1)
self.view2 = gtk.TreeView()
self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp2').add(self.view2)
self.view1.set_headers_visible(False)
self.view2.set_headers_visible(False)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0, background=2)
self.view1.append_column(column)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0)
self.view2.append_column(column)
self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
for f in preload:
self.model2.set(self.model2.append(), 0, f[1], 1, f[0])
self.fields = {}
def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2):
fields_order = fields.keys()
fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', '')))
for field in fields_order:
self.fields_data[prefix_node+field] = fields[field]
if prefix_node:
self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string'])
st_name = fields[field]['string'] or field
node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white'])
self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False))
if fields[field].get('relation', False) and level>0:
fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context)
fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1)
fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields)
self.view1.set_model(self.model1)
self.view2.set_model(self.model2)
self.view1.show_all()
self.view2.show_all()
self.wid_action = self.glade.get_widget('win_saveas_combo')
self.wid_write_field_names = self.glade.get_widget('add_field_names_cb')
self.wid_import_compatible = self.glade.get_widget('import_compatible')
action = self.wid_action.set_active(os.name!='nt')
self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all)
self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all)
self.glade.signal_connect('on_but_select_clicked', self.sig_sel)
self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel)
self.glade.signal_connect('on_but_predefined_clicked', self.add_predef)
self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn)
# Creating the predefined export view
self.pref_export = gtk.TreeView()
self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1))
self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2))
self.glade.get_widget('predefined_exports').add(self.pref_export)
self.pref_export.connect("row-activated", self.sel_predef)
self.pref_export.connect('key_press_event', self.del_export_list_key)
# Fill the predefined export tree view and show everything
self.fill_predefwin()
self.pref_export.show_all()
def del_export_list_key(self,widget, event, *args):
if event.keyval==gtk.keysyms.Delete:
self.del_selected_export_list()
def del_export_list_btn(self, widget=None):
self.del_selected_export_list()
def del_selected_export_list(self):
store, paths = self.pref_export.get_selection().get_selected_rows()
for p in paths:
export_fields= store.get_value(store.__getitem__(p[0]).iter,0)
export_name= store.get_value(store.__getitem__(p[0]).iter,1)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids=ir_export.search([('name','=',export_name)])
for id in export_ids:
fields=[]
line_ids=ir_export_line.search([('export_id','=',id)])
obj_line=ir_export_line.read(line_ids)
for i in range(0,len(obj_line)):
fields.append(obj_line[i]['name'])
if fields==export_fields:
ir_export.unlink(id)
ir_export_line.unlink(line_ids)
store.remove(store.get_iter(p))
break
def sig_sel_all(self, widget=None):
self.model2.clear()
for field, relation in self.fields.keys():
if not relation:
|
def sig_sel(self, widget=None):
sel = self.view1.get_selection()
sel.selected_foreach(self._sig_sel_add)
def _sig_sel_add(self, store, path, iter):
name, relation = self.fields[store.get_value(iter,1)]
#if relation:
# return
num = self.model2.append()
self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1))
def sig_unsel(self, widget=None):
store, paths = self.view2.get_selection().get_selected_rows()
for p in paths:
store.remove(store.get_iter(p))
def sig_unsel_all(self, widget=None):
self.model2.clear()
def fill_predefwin(self):
self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids = ir_export.search([('resource', '=', self.model)])
for export in ir_export.read(export_ids):
fields = ir_export_line.read(
|
self.model2.set(self.model2.append(), 0, self.fields[field], 1, field)
|
conditional_block
|
win_export.py
|
result, write_title=False):
import csv
try:
fp = file(fname, 'wb+')
writer = csv.writer(fp)
if write_title:
writer.writerow(fields)
for data in result:
row = []
for d in data:
if type(d)==types.StringType:
row.append(d.replace('\n',' ').replace('\t',' '))
else:
row.append(d)
writer.writerow(row)
fp.close()
common.message(str(len(result))+_(' record(s) saved !'))
return True
except IOError, (errno, strerror):
common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,))
return False
def open_excel(fields, result):
if os.name == 'nt':
try:
from win32com.client import Dispatch
xlApp = Dispatch("Excel.Application")
xlApp.Workbooks.Add()
for col in range(len(fields)):
xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col]
sht = xlApp.ActiveSheet
for a in result:
for b in range(len(a)):
if type(a[b]) == type(''):
a[b]=a[b].decode('utf-8','replace')
elif type(a[b]) == type([]):
if len(a[b])==2:
a[b] = a[b][1].decode('utf-8','replace')
else:
a[b] = ''
sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result
xlApp.Visible = 1
except:
common.error(_('Error Opening Excel !'),'')
else:
common.message(_("Function only available for MS Office !\nSorry, OOo users :("))
def datas_read(ids, model, fields, fields_view, prefix='', context=None):
ctx = context.copy()
ctx.update(rpc.session.context)
datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx)
return datas
class win_export(object):
def __init__(self, model, ids, fields, preload = [], parent=None, context=None):
self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as',
gettext.textdomain())
self.win = self.glade.get_widget('win_save_as')
self.ids = ids
self.model = model
self.fields_data = {}
if context is None:
context = {}
self.context = context
if parent is None:
parent = service.LocalService('gui.main').window
self.win.set_transient_for(parent)
self.win.set_icon(common.OPENERP_ICON)
self.parent = parent
self.view1 = gtk.TreeView()
self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp1').add(self.view1)
self.view2 = gtk.TreeView()
self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp2').add(self.view2)
self.view1.set_headers_visible(False)
self.view2.set_headers_visible(False)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0, background=2)
self.view1.append_column(column)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0)
self.view2.append_column(column)
self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
for f in preload:
self.model2.set(self.model2.append(), 0, f[1], 1, f[0])
self.fields = {}
def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2):
fields_order = fields.keys()
fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', '')))
for field in fields_order:
self.fields_data[prefix_node+field] = fields[field]
if prefix_node:
self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string'])
st_name = fields[field]['string'] or field
node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white'])
self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False))
if fields[field].get('relation', False) and level>0:
fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context)
fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1)
fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields)
self.view1.set_model(self.model1)
self.view2.set_model(self.model2)
self.view1.show_all()
self.view2.show_all()
self.wid_action = self.glade.get_widget('win_saveas_combo')
self.wid_write_field_names = self.glade.get_widget('add_field_names_cb')
self.wid_import_compatible = self.glade.get_widget('import_compatible')
action = self.wid_action.set_active(os.name!='nt')
self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all)
self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all)
self.glade.signal_connect('on_but_select_clicked', self.sig_sel)
self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel)
self.glade.signal_connect('on_but_predefined_clicked', self.add_predef)
self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn)
# Creating the predefined export view
self.pref_export = gtk.TreeView()
self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1))
self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2))
self.glade.get_widget('predefined_exports').add(self.pref_export)
self.pref_export.connect("row-activated", self.sel_predef)
self.pref_export.connect('key_press_event', self.del_export_list_key)
# Fill the predefined export tree view and show everything
self.fill_predefwin()
|
self.pref_export.show_all()
def del_export_list_key(self,widget, event, *args):
if event.keyval==gtk.keysyms.Delete:
self.del_selected_export_list()
def del_export_list_btn(self, widget=None):
self.del_selected_export_list()
def del_selected_export_list(self):
store, paths = self.pref_export.get_selection().get_selected_rows()
for p in paths:
export_fields= store.get_value(store.__getitem__(p[0]).iter,0)
export_name= store.get_value(store.__getitem__(p[0]).iter,1)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids=ir_export.search([('name','=',export_name)])
for id in export_ids:
fields=[]
line_ids=ir_export_line.search([('export_id','=',id)])
obj_line=ir_export_line.read(line_ids)
for i in range(0,len(obj_line)):
fields.append(obj_line[i]['name'])
if fields==export_fields:
ir_export.unlink(id)
ir_export_line.unlink(line_ids)
store.remove(store.get_iter(p))
break
def sig_sel_all(self, widget=None):
self.model2.clear()
for field, relation in self.fields.keys():
if not relation:
self.model2.set(self.model2.append(), 0, self.fields[field], 1, field)
def sig_sel(self, widget=None):
sel = self.view1.get_selection()
sel.selected_foreach(self._sig_sel_add)
def _sig_sel_add(self, store, path, iter):
name, relation = self.fields[store.get_value(iter,1)]
#if relation:
# return
num = self.model2.append()
self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1))
def sig_unsel(self, widget=None):
store, paths = self.view2.get_selection().get_selected_rows()
for p in paths:
store.remove(store.get_iter(p))
def sig_unsel_all(self, widget=None):
self.model2.clear()
def fill_predefwin(self):
self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids = ir_export.search([('resource', '=', self.model)])
for export in ir_export.read(export_ids):
fields = ir_export_line.read(export
|
random_line_split
|
|
win_export.py
|
result, write_title=False):
import csv
try:
fp = file(fname, 'wb+')
writer = csv.writer(fp)
if write_title:
writer.writerow(fields)
for data in result:
row = []
for d in data:
if type(d)==types.StringType:
row.append(d.replace('\n',' ').replace('\t',' '))
else:
row.append(d)
writer.writerow(row)
fp.close()
common.message(str(len(result))+_(' record(s) saved !'))
return True
except IOError, (errno, strerror):
common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,))
return False
def open_excel(fields, result):
if os.name == 'nt':
try:
from win32com.client import Dispatch
xlApp = Dispatch("Excel.Application")
xlApp.Workbooks.Add()
for col in range(len(fields)):
xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col]
sht = xlApp.ActiveSheet
for a in result:
for b in range(len(a)):
if type(a[b]) == type(''):
a[b]=a[b].decode('utf-8','replace')
elif type(a[b]) == type([]):
if len(a[b])==2:
a[b] = a[b][1].decode('utf-8','replace')
else:
a[b] = ''
sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result
xlApp.Visible = 1
except:
common.error(_('Error Opening Excel !'),'')
else:
common.message(_("Function only available for MS Office !\nSorry, OOo users :("))
def datas_read(ids, model, fields, fields_view, prefix='', context=None):
ctx = context.copy()
ctx.update(rpc.session.context)
datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx)
return datas
class win_export(object):
def __init__(self, model, ids, fields, preload = [], parent=None, context=None):
|
self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp2').add(self.view2)
self.view1.set_headers_visible(False)
self.view2.set_headers_visible(False)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0, background=2)
self.view1.append_column(column)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0)
self.view2.append_column(column)
self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
for f in preload:
self.model2.set(self.model2.append(), 0, f[1], 1, f[0])
self.fields = {}
def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2):
fields_order = fields.keys()
fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', '')))
for field in fields_order:
self.fields_data[prefix_node+field] = fields[field]
if prefix_node:
self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string'])
st_name = fields[field]['string'] or field
node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white'])
self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False))
if fields[field].get('relation', False) and level>0:
fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context)
fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1)
fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields)
self.view1.set_model(self.model1)
self.view2.set_model(self.model2)
self.view1.show_all()
self.view2.show_all()
self.wid_action = self.glade.get_widget('win_saveas_combo')
self.wid_write_field_names = self.glade.get_widget('add_field_names_cb')
self.wid_import_compatible = self.glade.get_widget('import_compatible')
action = self.wid_action.set_active(os.name!='nt')
self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all)
self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all)
self.glade.signal_connect('on_but_select_clicked', self.sig_sel)
self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel)
self.glade.signal_connect('on_but_predefined_clicked', self.add_predef)
self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn)
# Creating the predefined export view
self.pref_export = gtk.TreeView()
self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1))
self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2))
self.glade.get_widget('predefined_exports').add(self.pref_export)
self.pref_export.connect("row-activated", self.sel_predef)
self.pref_export.connect('key_press_event', self.del_export_list_key)
# Fill the predefined export tree view and show everything
self.fill_predefwin()
self.pref_export.show_all()
def del_export_list_key(self,widget, event, *args):
if event.keyval==gtk.keysyms.Delete:
self.del_selected_export_list()
def del_export_list_btn(self, widget=None):
self.del_selected_export_list()
def del_selected_export_list(self):
store, paths = self.pref_export.get_selection().get_selected_rows()
for p in paths:
export_fields= store.get_value(store.__getitem__(p[0]).iter,0)
export_name= store.get_value(store.__getitem__(p[0]).iter,1)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids=ir_export.search([('name','=',export_name)])
for id in export_ids:
fields=[]
line_ids=ir_export_line.search([('export_id','=',id)])
obj_line=ir_export_line.read(line_ids)
for i in range(0,len(obj_line)):
fields.append(obj_line[i]['name'])
if fields==export_fields:
ir_export.unlink(id)
ir_export_line.unlink(line_ids)
store.remove(store.get_iter(p))
break
def sig_sel_all(self, widget=None):
self.model2.clear()
for field, relation in self.fields.keys():
if not relation:
self.model2.set(self.model2.append(), 0, self.fields[field], 1, field)
def sig_sel(self, widget=None):
sel = self.view1.get_selection()
sel.selected_foreach(self._sig_sel_add)
def _sig_sel_add(self, store, path, iter):
name, relation = self.fields[store.get_value(iter,1)]
#if relation:
# return
num = self.model2.append()
self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1))
def sig_unsel(self, widget=None):
store, paths = self.view2.get_selection().get_selected_rows()
for p in paths:
store.remove(store.get_iter(p))
def sig_unsel_all(self, widget=None):
self.model2.clear()
def fill_predefwin(self):
self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids = ir_export.search([('resource', '=', self.model)])
for export in ir_export.read(export_ids):
fields = ir_export_line.read(export
|
self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as',
gettext.textdomain())
self.win = self.glade.get_widget('win_save_as')
self.ids = ids
self.model = model
self.fields_data = {}
if context is None:
context = {}
self.context = context
if parent is None:
parent = service.LocalService('gui.main').window
self.win.set_transient_for(parent)
self.win.set_icon(common.OPENERP_ICON)
self.parent = parent
self.view1 = gtk.TreeView()
self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp1').add(self.view1)
self.view2 = gtk.TreeView()
|
identifier_body
|
win_export.py
|
result, write_title=False):
import csv
try:
fp = file(fname, 'wb+')
writer = csv.writer(fp)
if write_title:
writer.writerow(fields)
for data in result:
row = []
for d in data:
if type(d)==types.StringType:
row.append(d.replace('\n',' ').replace('\t',' '))
else:
row.append(d)
writer.writerow(row)
fp.close()
common.message(str(len(result))+_(' record(s) saved !'))
return True
except IOError, (errno, strerror):
common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,))
return False
def open_excel(fields, result):
if os.name == 'nt':
try:
from win32com.client import Dispatch
xlApp = Dispatch("Excel.Application")
xlApp.Workbooks.Add()
for col in range(len(fields)):
xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col]
sht = xlApp.ActiveSheet
for a in result:
for b in range(len(a)):
if type(a[b]) == type(''):
a[b]=a[b].decode('utf-8','replace')
elif type(a[b]) == type([]):
if len(a[b])==2:
a[b] = a[b][1].decode('utf-8','replace')
else:
a[b] = ''
sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result
xlApp.Visible = 1
except:
common.error(_('Error Opening Excel !'),'')
else:
common.message(_("Function only available for MS Office !\nSorry, OOo users :("))
def datas_read(ids, model, fields, fields_view, prefix='', context=None):
ctx = context.copy()
ctx.update(rpc.session.context)
datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx)
return datas
class win_export(object):
def __init__(self, model, ids, fields, preload = [], parent=None, context=None):
self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as',
gettext.textdomain())
self.win = self.glade.get_widget('win_save_as')
self.ids = ids
self.model = model
self.fields_data = {}
if context is None:
context = {}
self.context = context
if parent is None:
parent = service.LocalService('gui.main').window
self.win.set_transient_for(parent)
self.win.set_icon(common.OPENERP_ICON)
self.parent = parent
self.view1 = gtk.TreeView()
self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp1').add(self.view1)
self.view2 = gtk.TreeView()
self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.glade.get_widget('exp_vp2').add(self.view2)
self.view1.set_headers_visible(False)
self.view2.set_headers_visible(False)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0, background=2)
self.view1.append_column(column)
cell = gtk.CellRendererText()
column = gtk.TreeViewColumn('Field name', cell, text=0)
self.view2.append_column(column)
self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
for f in preload:
self.model2.set(self.model2.append(), 0, f[1], 1, f[0])
self.fields = {}
def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2):
fields_order = fields.keys()
fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', '')))
for field in fields_order:
self.fields_data[prefix_node+field] = fields[field]
if prefix_node:
self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string'])
st_name = fields[field]['string'] or field
node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white'])
self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False))
if fields[field].get('relation', False) and level>0:
fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context)
fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1)
fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}})
model_populate(fields)
self.view1.set_model(self.model1)
self.view2.set_model(self.model2)
self.view1.show_all()
self.view2.show_all()
self.wid_action = self.glade.get_widget('win_saveas_combo')
self.wid_write_field_names = self.glade.get_widget('add_field_names_cb')
self.wid_import_compatible = self.glade.get_widget('import_compatible')
action = self.wid_action.set_active(os.name!='nt')
self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all)
self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all)
self.glade.signal_connect('on_but_select_clicked', self.sig_sel)
self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel)
self.glade.signal_connect('on_but_predefined_clicked', self.add_predef)
self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn)
# Creating the predefined export view
self.pref_export = gtk.TreeView()
self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1))
self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2))
self.glade.get_widget('predefined_exports').add(self.pref_export)
self.pref_export.connect("row-activated", self.sel_predef)
self.pref_export.connect('key_press_event', self.del_export_list_key)
# Fill the predefined export tree view and show everything
self.fill_predefwin()
self.pref_export.show_all()
def del_export_list_key(self,widget, event, *args):
if event.keyval==gtk.keysyms.Delete:
self.del_selected_export_list()
def
|
(self, widget=None):
self.del_selected_export_list()
def del_selected_export_list(self):
store, paths = self.pref_export.get_selection().get_selected_rows()
for p in paths:
export_fields= store.get_value(store.__getitem__(p[0]).iter,0)
export_name= store.get_value(store.__getitem__(p[0]).iter,1)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids=ir_export.search([('name','=',export_name)])
for id in export_ids:
fields=[]
line_ids=ir_export_line.search([('export_id','=',id)])
obj_line=ir_export_line.read(line_ids)
for i in range(0,len(obj_line)):
fields.append(obj_line[i]['name'])
if fields==export_fields:
ir_export.unlink(id)
ir_export_line.unlink(line_ids)
store.remove(store.get_iter(p))
break
def sig_sel_all(self, widget=None):
self.model2.clear()
for field, relation in self.fields.keys():
if not relation:
self.model2.set(self.model2.append(), 0, self.fields[field], 1, field)
def sig_sel(self, widget=None):
sel = self.view1.get_selection()
sel.selected_foreach(self._sig_sel_add)
def _sig_sel_add(self, store, path, iter):
name, relation = self.fields[store.get_value(iter,1)]
#if relation:
# return
num = self.model2.append()
self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1))
def sig_unsel(self, widget=None):
store, paths = self.view2.get_selection().get_selected_rows()
for p in paths:
store.remove(store.get_iter(p))
def sig_unsel_all(self, widget=None):
self.model2.clear()
def fill_predefwin(self):
self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING)
ir_export = rpc.RPCProxy('ir.exports')
ir_export_line = rpc.RPCProxy('ir.exports.line')
export_ids = ir_export.search([('resource', '=', self.model)])
for export in ir_export.read(export_ids):
fields = ir_export_line.read(
|
del_export_list_btn
|
identifier_name
|
main.py
|
long}&formatted=0')
sun_times = response.json()
return sun_times
#COLLECTS USER LAT / LONG & ASKS IF THEY WANT TO SUBMIT EMAIL FOR ALERTS. SAVES DATA TO JSON IF YES.
def user_input():
user = {}
search = False
while search == False:
search_area = input("Type in your country name / ISO code. \nOr type 'manual', if you want to use your own coordinates\n")
if len(df.loc[(df['name'] == search_area.title())]) > 0:
matched_result = df.loc[(df['name'] == search_area.title())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
search = True
elif len(df.loc[(df['country'] == search_area.upper())]) > 0:
matched_result = df.loc[(df['country'] == search_area.upper())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print("=====================================================================================")
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
print("=====================================================================================")
search = True
elif search_area.lower() == 'manual':
user_latitude = input("Enter Latitude\n")
user_longitude = input("Enter longitude\n")
search = True
else:
print("Country not found in Database, check spelling or type 'manual' to enter location manually\n")
question = input("Do you want email alerts when the ISS is visible from your location?\n")
if question.lower() == "y" or question.lower() == "yes":
print("Email Alert / Future use case / Testing")
user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n")
new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude}
try:
with open("users.json", "r") as user_file:
# Reading old data
data = pd.read_json("users.json")
except FileNotFoundError:
with open("users.json", "w") as user_file:
default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'],
'latitude': [40.463667, 53.41291, 37.09024, 35.86166],
'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]}
default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude'])
updated_df = default_df.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json', indent=4)
else:
df_stored = pd.DataFrame(data)
updated_df = df_stored.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json',indent=4)
finally:
print("\nUser Lat, Long & Email Returned")
return (user_latitude,user_longitude,user_email)
else:
print("user lat & user long returned only")
return (user_latitude,user_longitude)
user = user_input()
user_la = user[0]
user_lo = user[1]
def get_iss_location():
response = requests.get(url="http://api.open-notify.org/iss-now.json")
|
response.raise_for_status()
data = response.json()
latitude = float(data["iss_position"]["latitude"])
longitude = float(data["iss_position"]["longitude"])
return (latitude,longitude)
iss_location = get_iss_location()
def find_user():
ISS = get_iss_location()
try:
json_stored = pd.read_json('users.json')
df_stored = pd.DataFrame(json_stored)
print(f" ISS location = {ISS}")
except FileNotFoundError:
print("File not Found")
return False
else:
print("df_stored")
print(df_stored)
condition = df_stored['latitude'].between(-45,45)
print(condition)
find_user()
latitudes = df['latitude'].to_list()
longitudes = df['longitude'].to_list()
nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)]
# Current limited use is, must be near the center of the country.
countries_nearby_list = nearby_countries['name'].to_list()
country_list = []
def nearby_countries():
for country in countries_nearby_list:
country_add = df.loc[(df['name'] == country)]
country_name = country_add['name'].item()
country_latitude = country_add['latitude'].item()
country_longitude = country_add['longitude'].item()
direction = []
if iss_lat < country_latitude:
direction.append("North")
else:
direction.append("South")
if iss_long < country_longitude:
direction.append("East")
else:
direction.append("West")
country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(),
"Direction": f"{' '.join(direction)} of ISS"}
country_list.append(country)
iss_lat = get_iss_location()[0]
iss_long = get_iss_location()[1]
def direction_NS(iss_lat, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
def direction_WE(iss_long, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
#Check if there are countries nearby
if len(countries_nearby_list) != 0:
# print(countries_nearby_list)
print("\n=====================================================================================")
print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}")
nearby_countries()
print("=====================================================================================")
print(country_list)
else:
print("=====================================================================================")
print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE")
nearby_countries()
print("=====================================================================================")
print("\n=====================================================================================")
print("Current Location Data")
print("Times are in UTC")
print("=====================================================================================")
#Suntimes API
# All this code does, is return True if it's night time where the user currently is.
# Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion.
# Essentially though, all it does is return True at night.
def local_is_night(user_la, user_lo):
#GET CURRENT TIME
current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":",""))
sun_times = get_sun(user_la,user_lo)
# print(current_time)
#SUNRISE FORMATTING INTO DIGITS
sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":",""))
sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":",""))
#SUNSET IN LOCATION
if sunset > current_time:
print(f"Current time is {sunrise}")
time_until_sunset = sunset - current_time
print(time_until_sunset)
if len(str(time_until_sunset)) <= 2:
print(f"{time_until_sunset} Minutes left until sunset in Submitted Location")
else:
print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}")
print(f"Sunset time is {sunset}")
#SUNRISE IN LOCATION
if sunrise > current_time:
print("Time until Sunrise ( Morning ) ")
#TIME UNTIL SUNRISE TOMORROW
if sunrise < current_time and sunset < current_time:
sunrise_time_tomorrow = 2400 - current_time + sunrise
print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}")
#FORMATTING FOR TIME OUTPUT
if len(str(sunrise)) == 3:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}")
else:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' +
|
random_line_split
|
|
main.py
|
user_longitude = matched_result['longitude'].item()
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
search = True
elif len(df.loc[(df['country'] == search_area.upper())]) > 0:
matched_result = df.loc[(df['country'] == search_area.upper())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print("=====================================================================================")
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
print("=====================================================================================")
search = True
elif search_area.lower() == 'manual':
user_latitude = input("Enter Latitude\n")
user_longitude = input("Enter longitude\n")
search = True
else:
print("Country not found in Database, check spelling or type 'manual' to enter location manually\n")
question = input("Do you want email alerts when the ISS is visible from your location?\n")
if question.lower() == "y" or question.lower() == "yes":
print("Email Alert / Future use case / Testing")
user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n")
new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude}
try:
with open("users.json", "r") as user_file:
# Reading old data
data = pd.read_json("users.json")
except FileNotFoundError:
with open("users.json", "w") as user_file:
default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'],
'latitude': [40.463667, 53.41291, 37.09024, 35.86166],
'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]}
default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude'])
updated_df = default_df.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json', indent=4)
else:
df_stored = pd.DataFrame(data)
updated_df = df_stored.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json',indent=4)
finally:
print("\nUser Lat, Long & Email Returned")
return (user_latitude,user_longitude,user_email)
else:
print("user lat & user long returned only")
return (user_latitude,user_longitude)
user = user_input()
user_la = user[0]
user_lo = user[1]
def get_iss_location():
response = requests.get(url="http://api.open-notify.org/iss-now.json")
response.raise_for_status()
data = response.json()
latitude = float(data["iss_position"]["latitude"])
longitude = float(data["iss_position"]["longitude"])
return (latitude,longitude)
iss_location = get_iss_location()
def find_user():
ISS = get_iss_location()
try:
json_stored = pd.read_json('users.json')
df_stored = pd.DataFrame(json_stored)
print(f" ISS location = {ISS}")
except FileNotFoundError:
print("File not Found")
return False
else:
print("df_stored")
print(df_stored)
condition = df_stored['latitude'].between(-45,45)
print(condition)
find_user()
latitudes = df['latitude'].to_list()
longitudes = df['longitude'].to_list()
nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)]
# Current limited use is, must be near the center of the country.
countries_nearby_list = nearby_countries['name'].to_list()
country_list = []
def nearby_countries():
for country in countries_nearby_list:
country_add = df.loc[(df['name'] == country)]
country_name = country_add['name'].item()
country_latitude = country_add['latitude'].item()
country_longitude = country_add['longitude'].item()
direction = []
if iss_lat < country_latitude:
direction.append("North")
else:
direction.append("South")
if iss_long < country_longitude:
direction.append("East")
else:
direction.append("West")
country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(),
"Direction": f"{' '.join(direction)} of ISS"}
country_list.append(country)
iss_lat = get_iss_location()[0]
iss_long = get_iss_location()[1]
def direction_NS(iss_lat, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
def direction_WE(iss_long, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
#Check if there are countries nearby
if len(countries_nearby_list) != 0:
# print(countries_nearby_list)
print("\n=====================================================================================")
print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}")
nearby_countries()
print("=====================================================================================")
print(country_list)
else:
print("=====================================================================================")
print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE")
nearby_countries()
print("=====================================================================================")
print("\n=====================================================================================")
print("Current Location Data")
print("Times are in UTC")
print("=====================================================================================")
#Suntimes API
# All this code does, is return True if it's night time where the user currently is.
# Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion.
# Essentially though, all it does is return True at night.
def local_is_night(user_la, user_lo):
#GET CURRENT TIME
current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":",""))
sun_times = get_sun(user_la,user_lo)
# print(current_time)
#SUNRISE FORMATTING INTO DIGITS
sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":",""))
sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":",""))
#SUNSET IN LOCATION
if sunset > current_time:
print(f"Current time is {sunrise}")
time_until_sunset = sunset - current_time
print(time_until_sunset)
if len(str(time_until_sunset)) <= 2:
print(f"{time_until_sunset} Minutes left until sunset in Submitted Location")
else:
print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}")
print(f"Sunset time is {sunset}")
#SUNRISE IN LOCATION
if sunrise > current_time:
print("Time until Sunrise ( Morning ) ")
#TIME UNTIL SUNRISE TOMORROW
if sunrise < current_time and sunset < current_time:
sunrise_time_tomorrow = 2400 - current_time + sunrise
print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}")
#FORMATTING FOR TIME OUTPUT
if len(str(sunrise)) == 3:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}")
else:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[2] + list(str(sunrise))[3]}")
#SUNSET TIME - ALWAYS 4 DIGITS LONG
print(f"Sunset time is {str(sunset)[:-2] + ':' + list(str(sunset))[2] + list(str(sunset))[3]}")
#IS DARK?
if sunset < current_time:
return True
else:
return False
if len(user) == 3:
# Can pass email here if needed
user_email = user[2]
local_is_night(user_la,user_lo)
else:
local_is_night(user_la,user_lo)
def is_iss_overhead():
|
print("Checking")
|
identifier_body
|
|
main.py
|
}&formatted=0')
sun_times = response.json()
return sun_times
#COLLECTS USER LAT / LONG & ASKS IF THEY WANT TO SUBMIT EMAIL FOR ALERTS. SAVES DATA TO JSON IF YES.
def user_input():
user = {}
search = False
while search == False:
search_area = input("Type in your country name / ISO code. \nOr type 'manual', if you want to use your own coordinates\n")
if len(df.loc[(df['name'] == search_area.title())]) > 0:
matched_result = df.loc[(df['name'] == search_area.title())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
search = True
elif len(df.loc[(df['country'] == search_area.upper())]) > 0:
matched_result = df.loc[(df['country'] == search_area.upper())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print("=====================================================================================")
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
print("=====================================================================================")
search = True
elif search_area.lower() == 'manual':
user_latitude = input("Enter Latitude\n")
user_longitude = input("Enter longitude\n")
search = True
else:
print("Country not found in Database, check spelling or type 'manual' to enter location manually\n")
question = input("Do you want email alerts when the ISS is visible from your location?\n")
if question.lower() == "y" or question.lower() == "yes":
print("Email Alert / Future use case / Testing")
user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n")
new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude}
try:
with open("users.json", "r") as user_file:
# Reading old data
data = pd.read_json("users.json")
except FileNotFoundError:
with open("users.json", "w") as user_file:
default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'],
'latitude': [40.463667, 53.41291, 37.09024, 35.86166],
'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]}
default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude'])
updated_df = default_df.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json', indent=4)
else:
df_stored = pd.DataFrame(data)
updated_df = df_stored.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json',indent=4)
finally:
print("\nUser Lat, Long & Email Returned")
return (user_latitude,user_longitude,user_email)
else:
print("user lat & user long returned only")
return (user_latitude,user_longitude)
user = user_input()
user_la = user[0]
user_lo = user[1]
def get_iss_location():
response = requests.get(url="http://api.open-notify.org/iss-now.json")
response.raise_for_status()
data = response.json()
latitude = float(data["iss_position"]["latitude"])
longitude = float(data["iss_position"]["longitude"])
return (latitude,longitude)
iss_location = get_iss_location()
def
|
():
ISS = get_iss_location()
try:
json_stored = pd.read_json('users.json')
df_stored = pd.DataFrame(json_stored)
print(f" ISS location = {ISS}")
except FileNotFoundError:
print("File not Found")
return False
else:
print("df_stored")
print(df_stored)
condition = df_stored['latitude'].between(-45,45)
print(condition)
find_user()
latitudes = df['latitude'].to_list()
longitudes = df['longitude'].to_list()
nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)]
# Current limited use is, must be near the center of the country.
countries_nearby_list = nearby_countries['name'].to_list()
country_list = []
def nearby_countries():
for country in countries_nearby_list:
country_add = df.loc[(df['name'] == country)]
country_name = country_add['name'].item()
country_latitude = country_add['latitude'].item()
country_longitude = country_add['longitude'].item()
direction = []
if iss_lat < country_latitude:
direction.append("North")
else:
direction.append("South")
if iss_long < country_longitude:
direction.append("East")
else:
direction.append("West")
country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(),
"Direction": f"{' '.join(direction)} of ISS"}
country_list.append(country)
iss_lat = get_iss_location()[0]
iss_long = get_iss_location()[1]
def direction_NS(iss_lat, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
def direction_WE(iss_long, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
#Check if there are countries nearby
if len(countries_nearby_list) != 0:
# print(countries_nearby_list)
print("\n=====================================================================================")
print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}")
nearby_countries()
print("=====================================================================================")
print(country_list)
else:
print("=====================================================================================")
print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE")
nearby_countries()
print("=====================================================================================")
print("\n=====================================================================================")
print("Current Location Data")
print("Times are in UTC")
print("=====================================================================================")
#Suntimes API
# All this code does, is return True if it's night time where the user currently is.
# Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion.
# Essentially though, all it does is return True at night.
def local_is_night(user_la, user_lo):
#GET CURRENT TIME
current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":",""))
sun_times = get_sun(user_la,user_lo)
# print(current_time)
#SUNRISE FORMATTING INTO DIGITS
sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":",""))
sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":",""))
#SUNSET IN LOCATION
if sunset > current_time:
print(f"Current time is {sunrise}")
time_until_sunset = sunset - current_time
print(time_until_sunset)
if len(str(time_until_sunset)) <= 2:
print(f"{time_until_sunset} Minutes left until sunset in Submitted Location")
else:
print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}")
print(f"Sunset time is {sunset}")
#SUNRISE IN LOCATION
if sunrise > current_time:
print("Time until Sunrise ( Morning ) ")
#TIME UNTIL SUNRISE TOMORROW
if sunrise < current_time and sunset < current_time:
sunrise_time_tomorrow = 2400 - current_time + sunrise
print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}")
#FORMATTING FOR TIME OUTPUT
if len(str(sunrise)) == 3:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}")
else:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' +
|
find_user
|
identifier_name
|
main.py
|
.loc[(df['name'] == search_area.title())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
search = True
elif len(df.loc[(df['country'] == search_area.upper())]) > 0:
matched_result = df.loc[(df['country'] == search_area.upper())]
user_latitude = matched_result['latitude'].item()
user_longitude = matched_result['longitude'].item()
print("=====================================================================================")
print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})")
print("=====================================================================================")
search = True
elif search_area.lower() == 'manual':
user_latitude = input("Enter Latitude\n")
user_longitude = input("Enter longitude\n")
search = True
else:
print("Country not found in Database, check spelling or type 'manual' to enter location manually\n")
question = input("Do you want email alerts when the ISS is visible from your location?\n")
if question.lower() == "y" or question.lower() == "yes":
print("Email Alert / Future use case / Testing")
user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n")
new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude}
try:
with open("users.json", "r") as user_file:
# Reading old data
data = pd.read_json("users.json")
except FileNotFoundError:
with open("users.json", "w") as user_file:
default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'],
'latitude': [40.463667, 53.41291, 37.09024, 35.86166],
'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]}
default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude'])
updated_df = default_df.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json', indent=4)
else:
df_stored = pd.DataFrame(data)
updated_df = df_stored.append(new_entry, ignore_index=True)
updated_df.to_json(r'users.json',indent=4)
finally:
print("\nUser Lat, Long & Email Returned")
return (user_latitude,user_longitude,user_email)
else:
print("user lat & user long returned only")
return (user_latitude,user_longitude)
user = user_input()
user_la = user[0]
user_lo = user[1]
def get_iss_location():
response = requests.get(url="http://api.open-notify.org/iss-now.json")
response.raise_for_status()
data = response.json()
latitude = float(data["iss_position"]["latitude"])
longitude = float(data["iss_position"]["longitude"])
return (latitude,longitude)
iss_location = get_iss_location()
def find_user():
ISS = get_iss_location()
try:
json_stored = pd.read_json('users.json')
df_stored = pd.DataFrame(json_stored)
print(f" ISS location = {ISS}")
except FileNotFoundError:
print("File not Found")
return False
else:
print("df_stored")
print(df_stored)
condition = df_stored['latitude'].between(-45,45)
print(condition)
find_user()
latitudes = df['latitude'].to_list()
longitudes = df['longitude'].to_list()
nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)]
# Current limited use is, must be near the center of the country.
countries_nearby_list = nearby_countries['name'].to_list()
country_list = []
def nearby_countries():
for country in countries_nearby_list:
country_add = df.loc[(df['name'] == country)]
country_name = country_add['name'].item()
country_latitude = country_add['latitude'].item()
country_longitude = country_add['longitude'].item()
direction = []
if iss_lat < country_latitude:
direction.append("North")
else:
direction.append("South")
if iss_long < country_longitude:
direction.append("East")
else:
direction.append("West")
country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(),
"Direction": f"{' '.join(direction)} of ISS"}
country_list.append(country)
iss_lat = get_iss_location()[0]
iss_long = get_iss_location()[1]
def direction_NS(iss_lat, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
def direction_WE(iss_long, country_location):
if iss_lat > country_location['latitude']:
print(f"ISS is North of {country_location['name']}")
return "North"
else:
print("South")
return "South"
#Check if there are countries nearby
if len(countries_nearby_list) != 0:
# print(countries_nearby_list)
print("\n=====================================================================================")
print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}")
nearby_countries()
print("=====================================================================================")
print(country_list)
else:
print("=====================================================================================")
print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE")
nearby_countries()
print("=====================================================================================")
print("\n=====================================================================================")
print("Current Location Data")
print("Times are in UTC")
print("=====================================================================================")
#Suntimes API
# All this code does, is return True if it's night time where the user currently is.
# Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion.
# Essentially though, all it does is return True at night.
def local_is_night(user_la, user_lo):
#GET CURRENT TIME
current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":",""))
sun_times = get_sun(user_la,user_lo)
# print(current_time)
#SUNRISE FORMATTING INTO DIGITS
sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":",""))
sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":",""))
#SUNSET IN LOCATION
if sunset > current_time:
print(f"Current time is {sunrise}")
time_until_sunset = sunset - current_time
print(time_until_sunset)
if len(str(time_until_sunset)) <= 2:
print(f"{time_until_sunset} Minutes left until sunset in Submitted Location")
else:
print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}")
print(f"Sunset time is {sunset}")
#SUNRISE IN LOCATION
if sunrise > current_time:
print("Time until Sunrise ( Morning ) ")
#TIME UNTIL SUNRISE TOMORROW
if sunrise < current_time and sunset < current_time:
sunrise_time_tomorrow = 2400 - current_time + sunrise
print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}")
#FORMATTING FOR TIME OUTPUT
if len(str(sunrise)) == 3:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}")
else:
print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[2] + list(str(sunrise))[3]}")
#SUNSET TIME - ALWAYS 4 DIGITS LONG
print(f"Sunset time is {str(sunset)[:-2] + ':' + list(str(sunset))[2] + list(str(sunset))[3]}")
#IS DARK?
if sunset < current_time:
return True
else:
return False
if len(user) == 3:
# Can pass email here if needed
|
user_email = user[2]
local_is_night(user_la,user_lo)
|
conditional_block
|
|
helpers.py
|
# Set as return variables
final_X = corrected_tf_filt_X
final_Y = corrected_tf_Y
else:
# Set unmodified values as return variables
final_X = filt_X
final_Y = Y
return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx
def filter_features(Y, N):
"""
Return top N varying image features.
"""
most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:]
filt_Y = Y[:, most_varying_feature_idx]
return filt_Y, most_varying_feature_idx
def filter_expression(X, tIDs, M, k):
"""
Return top M varying transcripts, with mean expression > k, along with their transcript names.
"""
k_threshold_idx = np.mean(X, axis=0) > k
M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:]
idx = np.array(list(range(X.shape[1])))
final_exp_idx = idx[k_threshold_idx][M_varying_idx]
filt_X = X[:, final_exp_idx]
filt_tIDs = tIDs[final_exp_idx]
return filt_X, filt_tIDs, final_exp_idx
def compute_pearsonR(Y, X, parallel=False, verbose=False):
"""
Perform pairwise associations between filt_features and filt_expression.
Also computes pvalues for 1 random shuffles.
"""
# Make sure all features are > 0
# X[X < 0] = 0
N = Y.shape[1]
M = X.shape[1]
if parallel:
print('Computing in parallel')
results = {}
shuffle = ['real', 'shuffle']
for sh in shuffle:
print ("Shuffle: {}".format(sh))
Y_copy = Y.copy()
shuf_idx = list(range(Y.shape[0]))
if sh != 'real':
np.random.shuffle(shuf_idx)
Y_copy = Y_copy[shuf_idx, :]
if parallel:
def perform_pearsonr(idx):
i, j = idx
R, pv = pearsonr(Y_copy[:, i], X[:, j])
# pbar.update(1)
return R, pv
indicies = []
for i in range(N):
for j in range(M):
idx = (i,j)
indicies.append(idx)
import pathos
import time
pool = pathos.pools.ProcessPool(node=32)
results = pool.map(perform_pearsonr, indicies)
R_mat = np.array([x[0] for x in results]).reshape(N,M)
pvs = np.array([x[1] for x in parallel_results]).reshape(N,M)
else:
pbar = tqdm(total=N*M)
R_mat = np.zeros((N, M))
pvs = np.zeros((N, M))
for i in range(N):
for j in range(M):
R, pv = pearsonr(Y_copy[:, i], X[:, j])
R_mat[i, j] = R
pvs[i, j] = pv
pbar.update(1)
pbar.close()
results['Rs_{}'.format(sh)] = R_mat
results['pvs_{}'.format(sh)] = pvs
return results['Rs_real'], results['pvs_real'], results['pvs_shuffle']
def create_tissue_boundary(ID, tissue, patchsize):
from openslide import open_slide
image_filepath = os.path.join(GTEx_directory, 'data', 'raw', tissue, ID + '.svs')
image_slide = open_slide(image_filepath)
toplevel = image_slide.level_count - 1
topdim = image_slide.level_dimensions[-1]
topdownsample = image_slide.level_downsamples[-1]
topdownsampleint = int(topdownsample)
toplevelslide = image_slide.read_region((0, 0), toplevel, topdim)
toplevelslide = np.array(toplevelslide)
toplevelslide = toplevelslide[:, :, 0:3]
slide = toplevelslide
blurredslide = cv2.GaussianBlur(slide, (51, 51), 0)
blurredslide = cv2.cvtColor(blurredslide, cv2.COLOR_BGR2GRAY)
T_otsu = mahotas.otsu(blurredslide)
mask = np.zeros_like(slide)
mask = mask[:, :, 0]
mask[blurredslide < T_otsu] = 255
downsampledpatchsize = patchsize / topdownsampleint
xlimit = int(topdim[1] / downsampledpatchsize)
ylimit = int(topdim[0] / downsampledpatchsize)
# Find downsampled coords
coords = []
for i in range(xlimit):
for j in range(ylimit):
x = int(downsampledpatchsize/2 + i*downsampledpatchsize)
y = int(downsampledpatchsize/2 + j*downsampledpatchsize)
coords.append((x, y))
# Find coords in downsampled mask
mask_coords = []
for c in coords:
x = c[0]
y = c[1]
if mask[x, y] > 0:
mask_coords.append(c)
slidemarkings = slide.copy()
for c in mask_coords:
x = c[0]
y = c[1]
slidemarkings[x-3:x+3, y-3:y+3] = [0, 0, 255]
return slide, mask, slidemarkings
def top5_bottom5_image(tissue, model, patchsize, feature):
"""
Displays thumbnails of the top 5 and bottom 5 images that activate a
given image features at a specific patchsize
"""
from openslide import open_slide
features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, model, 'mean', patchsize)
sorted_idx = np.argsort(features[:,feature - 1])
donorIDs_ordered = donorIDs[sorted_idx]
tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue)
LungGTExIDs = os.listdir(tissue_filepath)
LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs]
ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]]
topIDs = ordered_GTExIDs[-5:]
bottomIDs = ordered_GTExIDs[:5]
top_five_images = []
bottom_five_images = []
for (k,ID) in enumerate(topIDs):
image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID)
slide = open_slide(image_filepath)
x = slide.get_thumbnail(size=(400,400))
top_five_images.append(x)
for (k,ID) in enumerate(bottomIDs):
image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID)
slide = open_slide(image_filepath)
x = slide.get_thumbnail(size=(400,400))
bottom_five_images.append(x)
return top_five_images, bottom_five_images
def estimate_lambda(pv):
"""estimate lambda form a set of PV"""
LOD2 = sp.median(st.chi2.isf(pv, 1))
null_median = st.chi2.median(1)
L = (LOD2 / null_median)
return L
def display_tissue_feature_gradient(feature, tissue):
from openslide import open_slide
features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, 'retrained', 'mean', '256')
sorted_idx = np.argsort(features[:,feature - 1])
donorIDs_ordered = donorIDs[sorted_idx]
gradient_IDs = [donorIDs_ordered[20*i] for i in range(13)]
tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue)
LungGTExIDs = os.listdir(tissue_filepath)
LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs]
ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]]
thumbnails = []
pbar = tqdm(total=len(ordered_GTExIDs))
for (k,ID) in enumerate(ordered_GTExIDs):
image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID)
slide = open_slide(image_filepath)
thumbnail = slide.get_thumbnail(size=(400,400))
feature_value = features[:,feature - 1][sorted_idx[k]]
thumbnails.append((thumbnail, feature_value))
pbar.update(1)
return thumbnails
def get_gene_name(transcript):
transcript_id = transcript.decode('utf-8').split('.')[0]
try:
gene_name = data.gene_name_of_gene_id(transcript_id)
except:
gene_name = transcript_id
return gene_name
def
|
normalize_feature
|
identifier_name
|
|
helpers.py
|
vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint
if cbook.iterable(value):
val = ma.asarray(value)
val = 2 * (val-0.5)
val[val>0] *= abs(vmax - midpoint)
val[val<0] *= abs(vmin - midpoint)
val += midpoint
return val
else:
val = 2 * (val - 0.5)
if val < 0:
return val*abs(vmin-midpoint) + midpoint
else:
return val*abs(vmax-midpoint) + midpoint
def extract_final_layer_data(t, m, a, ps, genotypes=False, shuffle=False):
with h5py.File(GTEx_directory +
'/data/h5py/aggregated_features.h5py', 'r') as f:
X = f[t]['ordered_expression'].value
tIDs = f[t]['transcriptIDs'].value
dIDs = f[t]['donorIDs'].value
tfs, ths, t_idx = \
get_technical_factors(t, dIDs)
size_group = f[t]['-1'][ps]
Y = size_group[m][a]['ordered_aggregated_features'].value
if shuffle:
idx = np.array(range(Y.shape[0]))
np.random.shuffle(idx)
Y = Y[idx,:]
Y[Y < 0] = 0
if genotypes:
G = f[t]['ordered_genotypes'].value
gIDs = f[t]['genotype_locations'].value
return Y, X, G, dIDs, tIDs, gIDs, \
tfs, ths, t_idx
else:
return Y, X, dIDs, tIDs, \
tfs, ths, t_idx
def extract_mid_layer_data(t, l, ca, m, a, ps):
with h5py.File(GTEx_directory +
'/data/h5py/aggregated_features.h5py', 'r') as f:
expression = f[t]['ordered_expression'].value
transcriptIDs = f[t]['transcriptIDs'].value
donorIDs = f[t]['donorIDs'].value
technical_factors, technical_headers, technical_idx = \
get_technical_factors(t, donorIDs)
size_group = f[t][l][ca][ps]
features = size_group[m][a]['ordered_aggregated_features'].value
features[features < 0] = 0
return features, expression, donorIDs, transcriptIDs, \
technical_factors, technical_headers, technical_idx
def get_technical_factors(tissue, donorIDs):
phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz'
with gzip.open(phenotype_filepath, 'rb') as f:
g = f.read().splitlines()
phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')]
phenotype_array = phenotype_array[1:]
phenotype_df = pd.DataFrame(phenotype_array)
phenotype_df.columns = phenotype_df.iloc[0]
phenotype_df = phenotype_df[1:]
tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue]
donorIDs = [x.decode('utf-8') for x in donorIDs]
phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']]
phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs]
tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue]
tissue_df = tissue_df.iloc[phenotype_idx, :]
SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER'])
for d in SMCENTER_dummy.columns:
tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d]
clean_tissue_df = pd.DataFrame()
for col in tissue_df.columns:
clean_factor = pd.to_numeric(tissue_df[col], errors='coerce')
clean_tissue_df[col] = clean_factor
clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1)
technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0)
clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0)
technical_factors, technical_headers = \
np.array(clean_tissue_df), clean_tissue_df.columns
technical_headers = technical_headers[technical_factors.std(0) > 0]
technical_factors = technical_factors[:,technical_factors.std(0) > 0]
return technical_factors, technical_headers, technical_idx
def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False):
"""
Computes M most varying pvalues across all patch sizes.
- Filters to the top M most varying genes that have mean expression > k.
Optional:
- Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix.
- Performs TF correction - regresses out effect of five PCs from both the image features, and expression.
"""
# Filter expression
Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size)
filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k)
if pc_correction:
print ('Correcting with {} expression PCs'.format(pc_correction))
pca = PCA(n_components=pc_correction)
pca_predictors = pca.fit_transform(filt_X)
# Correct Y
lr = LinearRegression()
lr.fit(pca_predictors, Y)
predicted_Y = lr.predict(pca_predictors)
corrected_Y = Y - predicted_Y
# Correct X
projected_filt_X = np.dot(pca_predictors,pca.components_)
corrected_filt_X = filt_X - projected_filt_X
# Set as return variables
final_X = corrected_filt_X
final_Y = corrected_Y
elif tf_correction:
print('Correcting with all technical factors')
tf_Y = Y[t_idx,:]
tf_filt_X = filt_X[t_idx,:]
tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1)
tf_predictors = tfs
#Correct Y
lr_Y = LinearRegression()
lr_Y.fit(tf_predictors, tf_Y)
tf_Y_predicted = lr_Y.predict(tf_predictors)
corrected_tf_Y = tf_Y - tf_Y_predicted
#Correct X
lr_X = LinearRegression()
lr_X.fit(tf_predictors, tf_filt_X)
tf_filt_X_predicted = lr_X.predict(tf_predictors)
corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted
# Set as return variables
final_X = corrected_tf_filt_X
final_Y = corrected_tf_Y
else:
# Set unmodified values as return variables
final_X = filt_X
final_Y = Y
return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx
def filter_features(Y, N):
"""
Return top N varying image features.
"""
most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:]
filt_Y = Y[:, most_varying_feature_idx]
return filt_Y, most_varying_feature_idx
def filter_expression(X, tIDs, M, k):
"""
Return top M varying transcripts, with mean expression > k, along with their transcript names.
"""
k_threshold_idx = np.mean(X, axis=0) > k
M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:]
idx = np.array(list(range(X.shape[1])))
final_exp_idx = idx[k_threshold_idx][M_varying_idx]
filt_X = X[:, final_exp_idx]
filt_tIDs = tIDs[final_exp_idx]
return filt_X, filt_tIDs, final_exp_idx
def compute_pearsonR(Y, X, parallel=False, verbose=False):
"""
Perform pairwise associations between filt_features and filt_expression.
Also computes pvalues for 1 random shuffles.
"""
# Make sure all features are > 0
# X[X < 0] = 0
N = Y.shape[1]
M = X.shape[1]
if parallel:
print('Computing in parallel')
results = {}
shuffle = ['real', 'shuffle']
for sh in shuffle:
print ("Shuffle: {}".format(sh))
Y_copy = Y.copy()
shuf_idx = list(range(Y.shape[0]))
if sh != 'real':
np.random.shuffle(shuf_idx
|
raise ValueError("Not invertible until scaled")
|
conditional_block
|
|
helpers.py
|
]['transcriptIDs'].value
donorIDs = f[t]['donorIDs'].value
technical_factors, technical_headers, technical_idx = \
get_technical_factors(t, donorIDs)
size_group = f[t][l][ca][ps]
features = size_group[m][a]['ordered_aggregated_features'].value
features[features < 0] = 0
return features, expression, donorIDs, transcriptIDs, \
technical_factors, technical_headers, technical_idx
def get_technical_factors(tissue, donorIDs):
phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz'
with gzip.open(phenotype_filepath, 'rb') as f:
g = f.read().splitlines()
phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')]
phenotype_array = phenotype_array[1:]
phenotype_df = pd.DataFrame(phenotype_array)
phenotype_df.columns = phenotype_df.iloc[0]
phenotype_df = phenotype_df[1:]
tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue]
donorIDs = [x.decode('utf-8') for x in donorIDs]
phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']]
phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs]
tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue]
tissue_df = tissue_df.iloc[phenotype_idx, :]
SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER'])
for d in SMCENTER_dummy.columns:
tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d]
clean_tissue_df = pd.DataFrame()
for col in tissue_df.columns:
clean_factor = pd.to_numeric(tissue_df[col], errors='coerce')
clean_tissue_df[col] = clean_factor
clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1)
technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0)
clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0)
technical_factors, technical_headers = \
np.array(clean_tissue_df), clean_tissue_df.columns
technical_headers = technical_headers[technical_factors.std(0) > 0]
technical_factors = technical_factors[:,technical_factors.std(0) > 0]
return technical_factors, technical_headers, technical_idx
def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False):
"""
Computes M most varying pvalues across all patch sizes.
- Filters to the top M most varying genes that have mean expression > k.
Optional:
- Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix.
- Performs TF correction - regresses out effect of five PCs from both the image features, and expression.
"""
# Filter expression
Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size)
filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k)
if pc_correction:
print ('Correcting with {} expression PCs'.format(pc_correction))
pca = PCA(n_components=pc_correction)
pca_predictors = pca.fit_transform(filt_X)
# Correct Y
lr = LinearRegression()
lr.fit(pca_predictors, Y)
predicted_Y = lr.predict(pca_predictors)
corrected_Y = Y - predicted_Y
# Correct X
projected_filt_X = np.dot(pca_predictors,pca.components_)
corrected_filt_X = filt_X - projected_filt_X
# Set as return variables
final_X = corrected_filt_X
final_Y = corrected_Y
elif tf_correction:
print('Correcting with all technical factors')
tf_Y = Y[t_idx,:]
tf_filt_X = filt_X[t_idx,:]
tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1)
tf_predictors = tfs
#Correct Y
lr_Y = LinearRegression()
lr_Y.fit(tf_predictors, tf_Y)
tf_Y_predicted = lr_Y.predict(tf_predictors)
corrected_tf_Y = tf_Y - tf_Y_predicted
#Correct X
lr_X = LinearRegression()
lr_X.fit(tf_predictors, tf_filt_X)
tf_filt_X_predicted = lr_X.predict(tf_predictors)
corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted
# Set as return variables
final_X = corrected_tf_filt_X
final_Y = corrected_tf_Y
else:
# Set unmodified values as return variables
final_X = filt_X
final_Y = Y
return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx
def filter_features(Y, N):
"""
Return top N varying image features.
"""
most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:]
filt_Y = Y[:, most_varying_feature_idx]
return filt_Y, most_varying_feature_idx
def filter_expression(X, tIDs, M, k):
"""
Return top M varying transcripts, with mean expression > k, along with their transcript names.
"""
k_threshold_idx = np.mean(X, axis=0) > k
M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:]
idx = np.array(list(range(X.shape[1])))
final_exp_idx = idx[k_threshold_idx][M_varying_idx]
filt_X = X[:, final_exp_idx]
filt_tIDs = tIDs[final_exp_idx]
return filt_X, filt_tIDs, final_exp_idx
def compute_pearsonR(Y, X, parallel=False, verbose=False):
"""
Perform pairwise associations between filt_features and filt_expression.
Also computes pvalues for 1 random shuffles.
"""
# Make sure all features are > 0
# X[X < 0] = 0
N = Y.shape[1]
M = X.shape[1]
if parallel:
print('Computing in parallel')
results = {}
shuffle = ['real', 'shuffle']
for sh in shuffle:
print ("Shuffle: {}".format(sh))
Y_copy = Y.copy()
shuf_idx = list(range(Y.shape[0]))
if sh != 'real':
np.random.shuffle(shuf_idx)
Y_copy = Y_copy[shuf_idx, :]
if parallel:
def perform_pearsonr(idx):
i, j = idx
R, pv = pearsonr(Y_copy[:, i], X[:, j])
# pbar.update(1)
return R, pv
indicies = []
for i in range(N):
for j in range(M):
idx = (i,j)
indicies.append(idx)
import pathos
import time
pool = pathos.pools.ProcessPool(node=32)
results = pool.map(perform_pearsonr, indicies)
R_mat = np.array([x[0] for x in results]).reshape(N,M)
pvs = np.array([x[1] for x in parallel_results]).reshape(N,M)
else:
pbar = tqdm(total=N*M)
R_mat = np.zeros((N, M))
pvs = np.zeros((N, M))
for i in range(N):
for j in range(M):
R, pv = pearsonr(Y_copy[:, i], X[:, j])
R_mat[i, j] = R
pvs[i, j] = pv
pbar.update(1)
pbar.close()
results['Rs_{}'.format(sh)] = R_mat
results['pvs_{}'.format(sh)] = pvs
return results['Rs_real'], results['pvs_real'], results['pvs_shuffle']
def create_tissue_boundary(ID, tissue, patchsize):
from openslide import open_slide
image_filepath = os.path.join(GTEx_directory, 'data', 'raw', tissue, ID + '.svs')
image_slide = open_slide(image_filepath)
toplevel = image_slide.level_count - 1
topdim = image_slide.level_dimensions[-1]
topdownsample = image_slide.level_downsamples[-1]
topdownsampleint = int(topdownsample)
toplevelslide = image_slide.read_region((0, 0), toplevel, topdim)
toplevelslide = np.array(toplevelslide)
toplevelslide = toplevelslide[:, :, 0:3]
slide = toplevelslide
|
random_line_split
|
||
helpers.py
|
-midpoint) + midpoint
else:
return val*abs(vmax-midpoint) + midpoint
def extract_final_layer_data(t, m, a, ps, genotypes=False, shuffle=False):
with h5py.File(GTEx_directory +
'/data/h5py/aggregated_features.h5py', 'r') as f:
X = f[t]['ordered_expression'].value
tIDs = f[t]['transcriptIDs'].value
dIDs = f[t]['donorIDs'].value
tfs, ths, t_idx = \
get_technical_factors(t, dIDs)
size_group = f[t]['-1'][ps]
Y = size_group[m][a]['ordered_aggregated_features'].value
if shuffle:
idx = np.array(range(Y.shape[0]))
np.random.shuffle(idx)
Y = Y[idx,:]
Y[Y < 0] = 0
if genotypes:
G = f[t]['ordered_genotypes'].value
gIDs = f[t]['genotype_locations'].value
return Y, X, G, dIDs, tIDs, gIDs, \
tfs, ths, t_idx
else:
return Y, X, dIDs, tIDs, \
tfs, ths, t_idx
def extract_mid_layer_data(t, l, ca, m, a, ps):
with h5py.File(GTEx_directory +
'/data/h5py/aggregated_features.h5py', 'r') as f:
expression = f[t]['ordered_expression'].value
transcriptIDs = f[t]['transcriptIDs'].value
donorIDs = f[t]['donorIDs'].value
technical_factors, technical_headers, technical_idx = \
get_technical_factors(t, donorIDs)
size_group = f[t][l][ca][ps]
features = size_group[m][a]['ordered_aggregated_features'].value
features[features < 0] = 0
return features, expression, donorIDs, transcriptIDs, \
technical_factors, technical_headers, technical_idx
def get_technical_factors(tissue, donorIDs):
phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz'
with gzip.open(phenotype_filepath, 'rb') as f:
g = f.read().splitlines()
phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')]
phenotype_array = phenotype_array[1:]
phenotype_df = pd.DataFrame(phenotype_array)
phenotype_df.columns = phenotype_df.iloc[0]
phenotype_df = phenotype_df[1:]
tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue]
donorIDs = [x.decode('utf-8') for x in donorIDs]
phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']]
phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs]
tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue]
tissue_df = tissue_df.iloc[phenotype_idx, :]
SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER'])
for d in SMCENTER_dummy.columns:
tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d]
clean_tissue_df = pd.DataFrame()
for col in tissue_df.columns:
clean_factor = pd.to_numeric(tissue_df[col], errors='coerce')
clean_tissue_df[col] = clean_factor
clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1)
technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0)
clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0)
technical_factors, technical_headers = \
np.array(clean_tissue_df), clean_tissue_df.columns
technical_headers = technical_headers[technical_factors.std(0) > 0]
technical_factors = technical_factors[:,technical_factors.std(0) > 0]
return technical_factors, technical_headers, technical_idx
def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False):
"""
Computes M most varying pvalues across all patch sizes.
- Filters to the top M most varying genes that have mean expression > k.
Optional:
- Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix.
- Performs TF correction - regresses out effect of five PCs from both the image features, and expression.
"""
# Filter expression
Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size)
filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k)
if pc_correction:
print ('Correcting with {} expression PCs'.format(pc_correction))
pca = PCA(n_components=pc_correction)
pca_predictors = pca.fit_transform(filt_X)
# Correct Y
lr = LinearRegression()
lr.fit(pca_predictors, Y)
predicted_Y = lr.predict(pca_predictors)
corrected_Y = Y - predicted_Y
# Correct X
projected_filt_X = np.dot(pca_predictors,pca.components_)
corrected_filt_X = filt_X - projected_filt_X
# Set as return variables
final_X = corrected_filt_X
final_Y = corrected_Y
elif tf_correction:
print('Correcting with all technical factors')
tf_Y = Y[t_idx,:]
tf_filt_X = filt_X[t_idx,:]
tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1)
tf_predictors = tfs
#Correct Y
lr_Y = LinearRegression()
lr_Y.fit(tf_predictors, tf_Y)
tf_Y_predicted = lr_Y.predict(tf_predictors)
corrected_tf_Y = tf_Y - tf_Y_predicted
#Correct X
lr_X = LinearRegression()
lr_X.fit(tf_predictors, tf_filt_X)
tf_filt_X_predicted = lr_X.predict(tf_predictors)
corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted
# Set as return variables
final_X = corrected_tf_filt_X
final_Y = corrected_tf_Y
else:
# Set unmodified values as return variables
final_X = filt_X
final_Y = Y
return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx
def filter_features(Y, N):
|
def filter_expression(X, tIDs, M, k):
"""
Return top M varying transcripts, with mean expression > k, along with their transcript names.
"""
k_threshold_idx = np.mean(X, axis=0) > k
M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:]
idx = np.array(list(range(X.shape[1])))
final_exp_idx = idx[k_threshold_idx][M_varying_idx]
filt_X = X[:, final_exp_idx]
filt_tIDs = tIDs[final_exp_idx]
return filt_X, filt_tIDs, final_exp_idx
def compute_pearsonR(Y, X, parallel=False, verbose=False):
"""
Perform pairwise associations between filt_features and filt_expression.
Also computes pvalues for 1 random shuffles.
"""
# Make sure all features are > 0
# X[X < 0] = 0
N = Y.shape[1]
M = X.shape[1]
if parallel:
print('Computing in parallel')
results = {}
shuffle = ['real', 'shuffle']
for sh in shuffle:
print ("Shuffle: {}".format(sh))
Y_copy = Y.copy()
shuf_idx = list(range(Y.shape[0]))
if sh != 'real':
np.random.shuffle(shuf_idx)
Y_copy = Y_copy[shuf_idx, :]
if parallel:
def perform_pearsonr(idx):
i, j = idx
R, pv = pearsonr(Y_copy[:, i], X[:, j])
# pbar.update(1)
return R, pv
indicies = []
for i in range(N):
for j in range(M):
idx = (i,j)
indicies.append(idx)
import pathos
import time
pool = pathos.pools.ProcessPool(node=32)
results = pool.map
|
"""
Return top N varying image features.
"""
most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:]
filt_Y = Y[:, most_varying_feature_idx]
return filt_Y, most_varying_feature_idx
|
identifier_body
|
object_ptr.rs
|
::convert::TryFrom;
use std::ffi::CString;
use std::ptr::NonNull;
use std::sync::atomic::AtomicI32;
use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index};
use tvm_sys::{ArgValue, RetValue};
use crate::errors::Error;
type Deleter = unsafe extern "C" fn(object: *mut Object) -> ();
#[derive(Debug)]
#[repr(C)]
pub struct Object {
pub type_index: u32,
// TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure.
// NB: in general we should not touch this in Rust.
pub(self) ref_count: AtomicI32,
pub fdeleter: Deleter,
}
unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) {
let typed_object: *mut T = std::mem::transmute(object);
T::typed_delete(typed_object);
}
fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool {
let mut is_derived = 0;
crate::check_call!(ffi::TVMObjectDerivedFrom(
child_type_index,
parent_type_index,
&mut is_derived
));
if is_derived == 0 {
false
} else {
true
}
}
impl Object {
fn new(type_index: u32, deleter: Deleter) -> Object {
Object {
type_index,
// Note: do not touch this field directly again, this is
// a critical section, we write a 1 to the atomic which will now
// be managed by the C++ atomics.
// In the future we should probably use C-atomcis.
ref_count: AtomicI32::new(0),
fdeleter: deleter,
}
}
fn get_type_index<T: IsObject>() -> u32 {
let type_key = T::TYPE_KEY;
let cstring = CString::new(type_key).expect("type key must not contain null characters");
if type_key == "Object" {
return 0;
} else {
let mut index = 0;
unsafe {
let index_ptr = std::mem::transmute(&mut index);
if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 {
panic!(crate::get_last_error())
}
}
return index;
}
}
pub fn base_object<T: IsObject>() -> Object {
let index = Object::get_type_index::<T>();
Object::new(index, delete::<T>)
}
pub(self) fn inc_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectRetain(raw_ptr), 0);
}
}
pub(self) fn dec_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectFree(raw_ptr), 0);
}
}
}
pub unsafe trait IsObject {
const TYPE_KEY: &'static str;
fn as_object<'s>(&'s self) -> &'s Object;
unsafe extern "C" fn typed_delete(object: *mut Self) {
let object = Box::from_raw(object);
drop(object)
}
}
unsafe impl IsObject for Object {
const TYPE_KEY: &'static str = "Object";
fn as_object<'s>(&'s self) -> &'s Object {
self
}
}
#[repr(C)]
pub struct ObjectPtr<T: IsObject> {
pub ptr: NonNull<T>,
}
fn inc_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().inc_ref() }
}
fn dec_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().dec_ref() }
}
impl ObjectPtr<Object> {
fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> {
let non_null = NonNull::new(object_ptr);
non_null.map(|ptr| ObjectPtr { ptr })
}
}
impl<T: IsObject> Clone for ObjectPtr<T> {
fn clone(&self) -> Self {
inc_ref(self.ptr);
ObjectPtr { ptr: self.ptr }
}
}
impl<T: IsObject> Drop for ObjectPtr<T> {
fn drop(&mut self) {
dec_ref(self.ptr);
}
}
impl<T: IsObject> ObjectPtr<T> {
pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T
where
T: 'a,
{
unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() }
}
pub fn new(object: T) -> ObjectPtr<T> {
let object_ptr = Box::new(object);
let object_ptr = Box::leak(object_ptr);
let ptr = NonNull::from(object_ptr);
inc_ref(ptr);
ObjectPtr { ptr }
}
pub fn count(&self) -> i32 {
// need to do atomic read in C++
// ABI compatible atomics is funky/hard.
self.as_object()
.ref_count
.load(std::sync::atomic::Ordering::SeqCst)
}
fn as_object<'s>(&'s self) -> &'s Object {
unsafe { self.ptr.as_ref().as_object() }
}
pub fn upcast(&self) -> ObjectPtr<Object> {
ObjectPtr {
ptr: self.ptr.cast(),
}
}
pub fn
|
<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> {
let child_index = Object::get_type_index::<U>();
let object_index = self.as_object().type_index;
let is_derived = if child_index == object_index {
true
} else {
// TODO(@jroesch): write tests
derived_from(object_index, child_index)
};
if is_derived {
Ok(ObjectPtr {
ptr: self.ptr.cast(),
})
} else {
Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY))
}
}
}
impl<T: IsObject> std::ops::Deref for ObjectPtr<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.ptr.as_ref() }
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue {
fn from(object_ptr: ObjectPtr<T>) -> RetValue {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
RetValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> {
type Error = Error;
fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> {
match ret_value {
RetValue::ObjectHandle(handle) => {
let handle: *mut Object = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> {
fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
ArgValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
#[cfg(test)]
mod tests {
use super::{Object, ObjectPtr};
use anyhow::{ensure, Result};
use std::convert::TryInto;
use tvm_sys::{ArgValue, RetValue};
#[test]
fn test_new_object() -> anyhow::Result<()> {
let object =
|
downcast
|
identifier_name
|
object_ptr.rs
|
::convert::TryFrom;
use std::ffi::CString;
use std::ptr::NonNull;
use std::sync::atomic::AtomicI32;
use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index};
use tvm_sys::{ArgValue, RetValue};
use crate::errors::Error;
type Deleter = unsafe extern "C" fn(object: *mut Object) -> ();
#[derive(Debug)]
#[repr(C)]
pub struct Object {
pub type_index: u32,
// TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure.
// NB: in general we should not touch this in Rust.
pub(self) ref_count: AtomicI32,
pub fdeleter: Deleter,
}
unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) {
let typed_object: *mut T = std::mem::transmute(object);
T::typed_delete(typed_object);
}
fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool {
let mut is_derived = 0;
crate::check_call!(ffi::TVMObjectDerivedFrom(
child_type_index,
parent_type_index,
&mut is_derived
));
if is_derived == 0 {
false
} else {
true
}
}
impl Object {
fn new(type_index: u32, deleter: Deleter) -> Object {
Object {
type_index,
// Note: do not touch this field directly again, this is
// a critical section, we write a 1 to the atomic which will now
// be managed by the C++ atomics.
// In the future we should probably use C-atomcis.
ref_count: AtomicI32::new(0),
fdeleter: deleter,
}
}
fn get_type_index<T: IsObject>() -> u32 {
let type_key = T::TYPE_KEY;
let cstring = CString::new(type_key).expect("type key must not contain null characters");
if type_key == "Object" {
return 0;
} else {
let mut index = 0;
unsafe {
let index_ptr = std::mem::transmute(&mut index);
if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 {
panic!(crate::get_last_error())
}
}
return index;
}
}
pub fn base_object<T: IsObject>() -> Object {
let index = Object::get_type_index::<T>();
Object::new(index, delete::<T>)
}
pub(self) fn inc_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectRetain(raw_ptr), 0);
}
}
pub(self) fn dec_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectFree(raw_ptr), 0);
}
}
}
pub unsafe trait IsObject {
const TYPE_KEY: &'static str;
fn as_object<'s>(&'s self) -> &'s Object;
unsafe extern "C" fn typed_delete(object: *mut Self) {
let object = Box::from_raw(object);
drop(object)
}
}
unsafe impl IsObject for Object {
const TYPE_KEY: &'static str = "Object";
fn as_object<'s>(&'s self) -> &'s Object {
self
}
}
#[repr(C)]
pub struct ObjectPtr<T: IsObject> {
pub ptr: NonNull<T>,
}
fn inc_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().inc_ref() }
}
fn dec_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().dec_ref() }
}
impl ObjectPtr<Object> {
fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> {
let non_null = NonNull::new(object_ptr);
non_null.map(|ptr| ObjectPtr { ptr })
}
}
impl<T: IsObject> Clone for ObjectPtr<T> {
fn clone(&self) -> Self {
inc_ref(self.ptr);
ObjectPtr { ptr: self.ptr }
}
}
impl<T: IsObject> Drop for ObjectPtr<T> {
fn drop(&mut self) {
dec_ref(self.ptr);
}
}
impl<T: IsObject> ObjectPtr<T> {
pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T
where
T: 'a,
{
unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() }
}
pub fn new(object: T) -> ObjectPtr<T> {
let object_ptr = Box::new(object);
let object_ptr = Box::leak(object_ptr);
let ptr = NonNull::from(object_ptr);
inc_ref(ptr);
ObjectPtr { ptr }
}
pub fn count(&self) -> i32 {
// need to do atomic read in C++
// ABI compatible atomics is funky/hard.
self.as_object()
.ref_count
.load(std::sync::atomic::Ordering::SeqCst)
}
fn as_object<'s>(&'s self) -> &'s Object
|
pub fn upcast(&self) -> ObjectPtr<Object> {
ObjectPtr {
ptr: self.ptr.cast(),
}
}
pub fn downcast<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> {
let child_index = Object::get_type_index::<U>();
let object_index = self.as_object().type_index;
let is_derived = if child_index == object_index {
true
} else {
// TODO(@jroesch): write tests
derived_from(object_index, child_index)
};
if is_derived {
Ok(ObjectPtr {
ptr: self.ptr.cast(),
})
} else {
Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY))
}
}
}
impl<T: IsObject> std::ops::Deref for ObjectPtr<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.ptr.as_ref() }
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue {
fn from(object_ptr: ObjectPtr<T>) -> RetValue {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
RetValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> {
type Error = Error;
fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> {
match ret_value {
RetValue::ObjectHandle(handle) => {
let handle: *mut Object = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> {
fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
ArgValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
#[cfg(test)]
mod tests {
use super::{Object, ObjectPtr};
use anyhow::{ensure, Result};
use std::convert::TryInto;
use tvm_sys::{ArgValue, RetValue};
#[test]
fn test_new_object() -> anyhow::Result<()> {
let object
|
{
unsafe { self.ptr.as_ref().as_object() }
}
|
identifier_body
|
object_ptr.rs
|
std::convert::TryFrom;
use std::ffi::CString;
use std::ptr::NonNull;
use std::sync::atomic::AtomicI32;
use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index};
use tvm_sys::{ArgValue, RetValue};
use crate::errors::Error;
type Deleter = unsafe extern "C" fn(object: *mut Object) -> ();
#[derive(Debug)]
#[repr(C)]
pub struct Object {
pub type_index: u32,
// TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure.
// NB: in general we should not touch this in Rust.
pub(self) ref_count: AtomicI32,
pub fdeleter: Deleter,
}
unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) {
let typed_object: *mut T = std::mem::transmute(object);
T::typed_delete(typed_object);
}
fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool {
let mut is_derived = 0;
crate::check_call!(ffi::TVMObjectDerivedFrom(
child_type_index,
parent_type_index,
&mut is_derived
));
if is_derived == 0 {
false
} else {
true
}
}
impl Object {
fn new(type_index: u32, deleter: Deleter) -> Object {
Object {
type_index,
// Note: do not touch this field directly again, this is
// a critical section, we write a 1 to the atomic which will now
// be managed by the C++ atomics.
// In the future we should probably use C-atomcis.
ref_count: AtomicI32::new(0),
fdeleter: deleter,
}
}
fn get_type_index<T: IsObject>() -> u32 {
let type_key = T::TYPE_KEY;
let cstring = CString::new(type_key).expect("type key must not contain null characters");
if type_key == "Object" {
return 0;
} else {
let mut index = 0;
unsafe {
let index_ptr = std::mem::transmute(&mut index);
if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 {
panic!(crate::get_last_error())
}
}
return index;
}
}
pub fn base_object<T: IsObject>() -> Object {
let index = Object::get_type_index::<T>();
Object::new(index, delete::<T>)
}
pub(self) fn inc_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectRetain(raw_ptr), 0);
}
}
pub(self) fn dec_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectFree(raw_ptr), 0);
}
}
}
pub unsafe trait IsObject {
const TYPE_KEY: &'static str;
fn as_object<'s>(&'s self) -> &'s Object;
unsafe extern "C" fn typed_delete(object: *mut Self) {
let object = Box::from_raw(object);
drop(object)
}
}
unsafe impl IsObject for Object {
const TYPE_KEY: &'static str = "Object";
fn as_object<'s>(&'s self) -> &'s Object {
self
}
}
#[repr(C)]
pub struct ObjectPtr<T: IsObject> {
pub ptr: NonNull<T>,
}
fn inc_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().inc_ref() }
}
fn dec_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().dec_ref() }
}
impl ObjectPtr<Object> {
fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> {
let non_null = NonNull::new(object_ptr);
non_null.map(|ptr| ObjectPtr { ptr })
}
}
impl<T: IsObject> Clone for ObjectPtr<T> {
fn clone(&self) -> Self {
inc_ref(self.ptr);
ObjectPtr { ptr: self.ptr }
}
}
impl<T: IsObject> Drop for ObjectPtr<T> {
fn drop(&mut self) {
dec_ref(self.ptr);
}
}
impl<T: IsObject> ObjectPtr<T> {
pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T
where
T: 'a,
{
unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() }
}
pub fn new(object: T) -> ObjectPtr<T> {
let object_ptr = Box::new(object);
let object_ptr = Box::leak(object_ptr);
let ptr = NonNull::from(object_ptr);
inc_ref(ptr);
ObjectPtr { ptr }
}
pub fn count(&self) -> i32 {
// need to do atomic read in C++
// ABI compatible atomics is funky/hard.
self.as_object()
.ref_count
.load(std::sync::atomic::Ordering::SeqCst)
}
fn as_object<'s>(&'s self) -> &'s Object {
unsafe { self.ptr.as_ref().as_object() }
}
pub fn upcast(&self) -> ObjectPtr<Object> {
ObjectPtr {
ptr: self.ptr.cast(),
}
}
pub fn downcast<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> {
let child_index = Object::get_type_index::<U>();
let object_index = self.as_object().type_index;
let is_derived = if child_index == object_index {
true
} else {
// TODO(@jroesch): write tests
derived_from(object_index, child_index)
};
if is_derived {
Ok(ObjectPtr {
ptr: self.ptr.cast(),
})
} else {
Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY))
}
}
}
impl<T: IsObject> std::ops::Deref for ObjectPtr<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.ptr.as_ref() }
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue {
fn from(object_ptr: ObjectPtr<T>) -> RetValue {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
RetValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> {
type Error = Error;
fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> {
match ret_value {
RetValue::ObjectHandle(handle) => {
let handle: *mut Object = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> {
fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
ArgValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
#[cfg(test)]
mod tests {
use super::{Object, ObjectPtr};
use anyhow::{ensure, Result};
use std::convert::TryInto;
use tvm_sys::{ArgValue, RetValue};
#[test]
fn test_new_object() -> anyhow::Result<()> {
|
let object =
|
random_line_split
|
|
object_ptr.rs
|
::convert::TryFrom;
use std::ffi::CString;
use std::ptr::NonNull;
use std::sync::atomic::AtomicI32;
use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index};
use tvm_sys::{ArgValue, RetValue};
use crate::errors::Error;
type Deleter = unsafe extern "C" fn(object: *mut Object) -> ();
#[derive(Debug)]
#[repr(C)]
pub struct Object {
pub type_index: u32,
// TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure.
// NB: in general we should not touch this in Rust.
pub(self) ref_count: AtomicI32,
pub fdeleter: Deleter,
}
unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) {
let typed_object: *mut T = std::mem::transmute(object);
T::typed_delete(typed_object);
}
fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool {
let mut is_derived = 0;
crate::check_call!(ffi::TVMObjectDerivedFrom(
child_type_index,
parent_type_index,
&mut is_derived
));
if is_derived == 0 {
false
} else
|
}
impl Object {
fn new(type_index: u32, deleter: Deleter) -> Object {
Object {
type_index,
// Note: do not touch this field directly again, this is
// a critical section, we write a 1 to the atomic which will now
// be managed by the C++ atomics.
// In the future we should probably use C-atomcis.
ref_count: AtomicI32::new(0),
fdeleter: deleter,
}
}
fn get_type_index<T: IsObject>() -> u32 {
let type_key = T::TYPE_KEY;
let cstring = CString::new(type_key).expect("type key must not contain null characters");
if type_key == "Object" {
return 0;
} else {
let mut index = 0;
unsafe {
let index_ptr = std::mem::transmute(&mut index);
if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 {
panic!(crate::get_last_error())
}
}
return index;
}
}
pub fn base_object<T: IsObject>() -> Object {
let index = Object::get_type_index::<T>();
Object::new(index, delete::<T>)
}
pub(self) fn inc_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectRetain(raw_ptr), 0);
}
}
pub(self) fn dec_ref(&self) {
unsafe {
let raw_ptr = std::mem::transmute(self);
assert_eq!(TVMObjectFree(raw_ptr), 0);
}
}
}
pub unsafe trait IsObject {
const TYPE_KEY: &'static str;
fn as_object<'s>(&'s self) -> &'s Object;
unsafe extern "C" fn typed_delete(object: *mut Self) {
let object = Box::from_raw(object);
drop(object)
}
}
unsafe impl IsObject for Object {
const TYPE_KEY: &'static str = "Object";
fn as_object<'s>(&'s self) -> &'s Object {
self
}
}
#[repr(C)]
pub struct ObjectPtr<T: IsObject> {
pub ptr: NonNull<T>,
}
fn inc_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().inc_ref() }
}
fn dec_ref<T: IsObject>(ptr: NonNull<T>) {
unsafe { ptr.as_ref().as_object().dec_ref() }
}
impl ObjectPtr<Object> {
fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> {
let non_null = NonNull::new(object_ptr);
non_null.map(|ptr| ObjectPtr { ptr })
}
}
impl<T: IsObject> Clone for ObjectPtr<T> {
fn clone(&self) -> Self {
inc_ref(self.ptr);
ObjectPtr { ptr: self.ptr }
}
}
impl<T: IsObject> Drop for ObjectPtr<T> {
fn drop(&mut self) {
dec_ref(self.ptr);
}
}
impl<T: IsObject> ObjectPtr<T> {
pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T
where
T: 'a,
{
unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() }
}
pub fn new(object: T) -> ObjectPtr<T> {
let object_ptr = Box::new(object);
let object_ptr = Box::leak(object_ptr);
let ptr = NonNull::from(object_ptr);
inc_ref(ptr);
ObjectPtr { ptr }
}
pub fn count(&self) -> i32 {
// need to do atomic read in C++
// ABI compatible atomics is funky/hard.
self.as_object()
.ref_count
.load(std::sync::atomic::Ordering::SeqCst)
}
fn as_object<'s>(&'s self) -> &'s Object {
unsafe { self.ptr.as_ref().as_object() }
}
pub fn upcast(&self) -> ObjectPtr<Object> {
ObjectPtr {
ptr: self.ptr.cast(),
}
}
pub fn downcast<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> {
let child_index = Object::get_type_index::<U>();
let object_index = self.as_object().type_index;
let is_derived = if child_index == object_index {
true
} else {
// TODO(@jroesch): write tests
derived_from(object_index, child_index)
};
if is_derived {
Ok(ObjectPtr {
ptr: self.ptr.cast(),
})
} else {
Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY))
}
}
}
impl<T: IsObject> std::ops::Deref for ObjectPtr<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.ptr.as_ref() }
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue {
fn from(object_ptr: ObjectPtr<T>) -> RetValue {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
RetValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> {
type Error = Error;
fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> {
match ret_value {
RetValue::ObjectHandle(handle) => {
let handle: *mut Object = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> {
fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> {
let raw_object_ptr = ObjectPtr::leak(object_ptr);
let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) };
ArgValue::ObjectHandle(void_ptr)
}
}
impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> {
type Error = Error;
fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> {
match arg_value {
ArgValue::ObjectHandle(handle) => {
let handle = unsafe { std::mem::transmute(handle) };
let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?;
optr.downcast()
}
_ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")),
}
}
}
#[cfg(test)]
mod tests {
use super::{Object, ObjectPtr};
use anyhow::{ensure, Result};
use std::convert::TryInto;
use tvm_sys::{ArgValue, RetValue};
#[test]
fn test_new_object() -> anyhow::Result<()> {
let object
|
{
true
}
|
conditional_block
|
FtpContext.js
|
=> void} ResponseHandler
*/
/**
* FTPContext holds the control and data sockets of an FTP connection and provides a
* simplified way to interact with an FTP server, handle responses, errors and timeouts.
*
* It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP
* client as easy as possible. You won't usually instantiate this, but use `Client`.
*/
module.exports = class FTPContext {
/**
* Instantiate an FTP context.
*
* @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout.
* @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers.
*/
constructor(timeout = 0, encoding = "utf8") {
/**
* Timeout applied to all connections.
* @private
* @type {number}
*/
this._timeout = timeout;
/**
* Current task to be resolved or rejected.
* @private
* @type {(Task | undefined)}
*/
this._task = undefined;
/**
* Function that handles incoming messages and resolves or rejects a task.
* @private
* @type {(ResponseHandler | undefined)}
*/
this._handler = undefined;
/**
* A multiline response might be received as multiple chunks.
* @private
* @type {string}
*/
this._partialResponse = "";
/**
* The encoding used when reading from and writing to the control socket.
* @type {string}
*/
this.encoding = encoding;
/**
* Options for TLS connections.
* @type {import("tls").ConnectionOptions}
*/
this.tlsOptions = {};
/**
* IP version to prefer (4: IPv4, 6: IPv6).
* @type {(string | undefined)}
*/
this.ipFamily = undefined;
/**
* Log every communication detail.
* @type {boolean}
*/
this.verbose = false;
/**
* The control connection to the FTP server.
* @type {Socket}
*/
this.socket = new Socket();
/**
* The current data connection to the FTP server.
* @type {(Socket | undefined)}
*/
this.dataSocket = undefined;
}
/**
* Close the context by resetting its state.
*/
close() {
this._passToHandler({ error: { info: "User closed client during task." }});
this._reset();
}
/** @type {Socket} */
get socket() {
return this._socket;
}
/**
* Set the socket for the control connection. This will only close the current control socket
* if the new one is set to `undefined` because you're most likely to be upgrading an existing
* control connection that continues to be used.
*
* @type {Socket}
*/
set socket(socket) {
// No data socket should be open in any case where the control socket is set or upgraded.
this.dataSocket = undefined;
if (this._socket) {
this._removeSocketListeners(this._socket);
}
if (socket) {
// Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below.
socket.setTimeout(0);
socket.setKeepAlive(true);
socket.on("data", data => this._onControlSocketData(data));
this._setupErrorHandlers(socket, "control");
}
else {
this._closeSocket(this._socket);
}
this._socket = socket;
}
/** @type {(Socket | undefined)} */
get dataSocket() {
return this._dataSocket;
}
/**
* Set the socket for the data connection. This will automatically close the former data socket.
*
* @type {(Socket | undefined)}
**/
set dataSocket(socket) {
this._closeSocket(this._dataSocket);
if (socket) {
socket.setTimeout(this._timeout);
this._setupErrorHandlers(socket, "data");
}
this._dataSocket = socket;
}
/**
* Send an FTP command without waiting for or handling the result.
*
* @param {string} command
*/
send(command) {
// Don't log passwords.
const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`;
this.log(message);
this._socket.write(command + "\r\n", this.encoding);
}
/**
* Log message if set to be verbose.
*
* @param {string} message
*/
log(message) {
if (this.verbose) {
console.log(message);
}
}
/**
* Enable timeout on the control socket connection. Disabling it can be useful if
* a timeout should be caught by the current data connection instead of the
* control connection that sits idle during transfers anyway.
*
* @param {boolean} enabled
*/
enableControlTimeout(enabled) {
this.socket.setTimeout(enabled ? this._timeout : 0);
}
/**
* Return true if the control socket is using TLS. This does not mean that a session
* has already been negotiated.
*
* @returns {boolean}
*/
get hasTLS() {
//@ts-ignore that not every socket has property encrypted.
return this._socket && this._socket.encrypted === true;
}
/**
* Send an FTP command and handle any response until the new task is resolved. This returns a Promise that
* will hold whatever the handler passed on when resolving/rejecting its task.
*
* @param {string} command
* @param {ResponseHandler} handler
* @returns {Promise<any>}
*/
handle(command, handler) {
if (this._handler !== undefined) {
this.close();
throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?");
}
// Only track control socket timeout during the lifecycle of a task associated with a handler.
// That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users.
this.enableControlTimeout(true);
return new Promise((resolvePromise, rejectPromise) => {
this._handler = handler;
this._task = {
// When resolving or rejecting we also want the handler
// to no longer receive any responses or errors.
resolve: (...args) => {
this._stopTrackingTask();
resolvePromise(...args);
},
reject: (...args) => {
this._stopTrackingTask();
rejectPromise(...args);
}
};
if (command !== undefined)
|
});
}
/**
* Removes reference to current task and handler. This won't resolve or reject the task.
*/
_stopTrackingTask() {
// Disable timeout on control socket if there is no task active.
this.enableControlTimeout(false);
this._task = undefined;
this._handler = undefined;
}
/**
* Handle incoming data on the control socket.
*
* @private
* @param {Buffer} data
*/
_onControlSocketData(data) {
let response = data.toString(this.encoding).trim();
this.log(`< ${response}`);
// This response might complete an earlier partial response.
response = this._partialResponse + response;
const parsed = parseControlResponse(response);
// Remember any incomplete remainder.
this._partialResponse = parsed.rest;
// Each response group is passed along individually.
for (const message of parsed.messages) {
const code = parseInt(message.substr(0, 3), 10);
this._passToHandler({ code, message });
}
}
/**
* Send the current handler a response. This is usually a control socket response
* or a socket event, like an error or timeout.
*
* @private
* @param {Object} response
*/
_passToHandler(response) {
if (this._handler) {
this._handler(response, this._task);
}
}
/**
* Reset the state of this context.
*
* @private
*/
_reset() {
this.log("Closing connections.");
this._stopTrackingTask();
this._partialResponse = "";
this._closeSocket(this._socket);
this._closeSocket(this._dataSocket);
// Set a new socket instance to make reconnecting possible.
this.socket = new Socket();
}
/**
* Send an error to the current handler and close all connections.
*
* @private
* @param {*} error
*/
_closeWithError(error) {
this.log(error);
this._passToHandler({ error });
this._reset();
}
/**
* Close a socket.
*
* @private
* @param {(Socket | undefined)} socket
*/
_closeSocket(socket) {
if (socket) {
socket.destroy();
this._removeSocketListeners(socket);
}
}
/**
* Setup all error handlers for a socket.
*
* @private
* @param {Socket} socket
* @param {string} identifier
*/
_setupErrorHandlers
|
{
this.send(command);
}
|
conditional_block
|
FtpContext.js
|
=> void} ResponseHandler
*/
/**
* FTPContext holds the control and data sockets of an FTP connection and provides a
* simplified way to interact with an FTP server, handle responses, errors and timeouts.
*
* It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP
* client as easy as possible. You won't usually instantiate this, but use `Client`.
*/
module.exports = class FTPContext {
/**
* Instantiate an FTP context.
*
* @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout.
* @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers.
*/
constructor(timeout = 0, encoding = "utf8") {
/**
* Timeout applied to all connections.
* @private
* @type {number}
*/
this._timeout = timeout;
/**
* Current task to be resolved or rejected.
* @private
* @type {(Task | undefined)}
*/
this._task = undefined;
/**
* Function that handles incoming messages and resolves or rejects a task.
* @private
* @type {(ResponseHandler | undefined)}
*/
this._handler = undefined;
/**
* A multiline response might be received as multiple chunks.
* @private
* @type {string}
*/
this._partialResponse = "";
/**
* The encoding used when reading from and writing to the control socket.
* @type {string}
*/
this.encoding = encoding;
/**
* Options for TLS connections.
* @type {import("tls").ConnectionOptions}
*/
this.tlsOptions = {};
/**
* IP version to prefer (4: IPv4, 6: IPv6).
* @type {(string | undefined)}
*/
this.ipFamily = undefined;
/**
* Log every communication detail.
* @type {boolean}
*/
this.verbose = false;
/**
* The control connection to the FTP server.
* @type {Socket}
*/
this.socket = new Socket();
/**
* The current data connection to the FTP server.
* @type {(Socket | undefined)}
*/
this.dataSocket = undefined;
}
/**
* Close the context by resetting its state.
*/
close() {
this._passToHandler({ error: { info: "User closed client during task." }});
this._reset();
}
/** @type {Socket} */
get
|
() {
return this._socket;
}
/**
* Set the socket for the control connection. This will only close the current control socket
* if the new one is set to `undefined` because you're most likely to be upgrading an existing
* control connection that continues to be used.
*
* @type {Socket}
*/
set socket(socket) {
// No data socket should be open in any case where the control socket is set or upgraded.
this.dataSocket = undefined;
if (this._socket) {
this._removeSocketListeners(this._socket);
}
if (socket) {
// Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below.
socket.setTimeout(0);
socket.setKeepAlive(true);
socket.on("data", data => this._onControlSocketData(data));
this._setupErrorHandlers(socket, "control");
}
else {
this._closeSocket(this._socket);
}
this._socket = socket;
}
/** @type {(Socket | undefined)} */
get dataSocket() {
return this._dataSocket;
}
/**
* Set the socket for the data connection. This will automatically close the former data socket.
*
* @type {(Socket | undefined)}
**/
set dataSocket(socket) {
this._closeSocket(this._dataSocket);
if (socket) {
socket.setTimeout(this._timeout);
this._setupErrorHandlers(socket, "data");
}
this._dataSocket = socket;
}
/**
* Send an FTP command without waiting for or handling the result.
*
* @param {string} command
*/
send(command) {
// Don't log passwords.
const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`;
this.log(message);
this._socket.write(command + "\r\n", this.encoding);
}
/**
* Log message if set to be verbose.
*
* @param {string} message
*/
log(message) {
if (this.verbose) {
console.log(message);
}
}
/**
* Enable timeout on the control socket connection. Disabling it can be useful if
* a timeout should be caught by the current data connection instead of the
* control connection that sits idle during transfers anyway.
*
* @param {boolean} enabled
*/
enableControlTimeout(enabled) {
this.socket.setTimeout(enabled ? this._timeout : 0);
}
/**
* Return true if the control socket is using TLS. This does not mean that a session
* has already been negotiated.
*
* @returns {boolean}
*/
get hasTLS() {
//@ts-ignore that not every socket has property encrypted.
return this._socket && this._socket.encrypted === true;
}
/**
* Send an FTP command and handle any response until the new task is resolved. This returns a Promise that
* will hold whatever the handler passed on when resolving/rejecting its task.
*
* @param {string} command
* @param {ResponseHandler} handler
* @returns {Promise<any>}
*/
handle(command, handler) {
if (this._handler !== undefined) {
this.close();
throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?");
}
// Only track control socket timeout during the lifecycle of a task associated with a handler.
// That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users.
this.enableControlTimeout(true);
return new Promise((resolvePromise, rejectPromise) => {
this._handler = handler;
this._task = {
// When resolving or rejecting we also want the handler
// to no longer receive any responses or errors.
resolve: (...args) => {
this._stopTrackingTask();
resolvePromise(...args);
},
reject: (...args) => {
this._stopTrackingTask();
rejectPromise(...args);
}
};
if (command !== undefined) {
this.send(command);
}
});
}
/**
* Removes reference to current task and handler. This won't resolve or reject the task.
*/
_stopTrackingTask() {
// Disable timeout on control socket if there is no task active.
this.enableControlTimeout(false);
this._task = undefined;
this._handler = undefined;
}
/**
* Handle incoming data on the control socket.
*
* @private
* @param {Buffer} data
*/
_onControlSocketData(data) {
let response = data.toString(this.encoding).trim();
this.log(`< ${response}`);
// This response might complete an earlier partial response.
response = this._partialResponse + response;
const parsed = parseControlResponse(response);
// Remember any incomplete remainder.
this._partialResponse = parsed.rest;
// Each response group is passed along individually.
for (const message of parsed.messages) {
const code = parseInt(message.substr(0, 3), 10);
this._passToHandler({ code, message });
}
}
/**
* Send the current handler a response. This is usually a control socket response
* or a socket event, like an error or timeout.
*
* @private
* @param {Object} response
*/
_passToHandler(response) {
if (this._handler) {
this._handler(response, this._task);
}
}
/**
* Reset the state of this context.
*
* @private
*/
_reset() {
this.log("Closing connections.");
this._stopTrackingTask();
this._partialResponse = "";
this._closeSocket(this._socket);
this._closeSocket(this._dataSocket);
// Set a new socket instance to make reconnecting possible.
this.socket = new Socket();
}
/**
* Send an error to the current handler and close all connections.
*
* @private
* @param {*} error
*/
_closeWithError(error) {
this.log(error);
this._passToHandler({ error });
this._reset();
}
/**
* Close a socket.
*
* @private
* @param {(Socket | undefined)} socket
*/
_closeSocket(socket) {
if (socket) {
socket.destroy();
this._removeSocketListeners(socket);
}
}
/**
* Setup all error handlers for a socket.
*
* @private
* @param {Socket} socket
* @param {string} identifier
*/
_setupErrorHandlers(socket
|
socket
|
identifier_name
|
FtpContext.js
|
=> void} ResponseHandler
*/
/**
* FTPContext holds the control and data sockets of an FTP connection and provides a
* simplified way to interact with an FTP server, handle responses, errors and timeouts.
*
* It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP
* client as easy as possible. You won't usually instantiate this, but use `Client`.
*/
module.exports = class FTPContext {
/**
* Instantiate an FTP context.
*
* @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout.
* @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers.
*/
constructor(timeout = 0, encoding = "utf8")
|
* A multiline response might be received as multiple chunks.
* @private
* @type {string}
*/
this._partialResponse = "";
/**
* The encoding used when reading from and writing to the control socket.
* @type {string}
*/
this.encoding = encoding;
/**
* Options for TLS connections.
* @type {import("tls").ConnectionOptions}
*/
this.tlsOptions = {};
/**
* IP version to prefer (4: IPv4, 6: IPv6).
* @type {(string | undefined)}
*/
this.ipFamily = undefined;
/**
* Log every communication detail.
* @type {boolean}
*/
this.verbose = false;
/**
* The control connection to the FTP server.
* @type {Socket}
*/
this.socket = new Socket();
/**
* The current data connection to the FTP server.
* @type {(Socket | undefined)}
*/
this.dataSocket = undefined;
}
/**
* Close the context by resetting its state.
*/
close() {
this._passToHandler({ error: { info: "User closed client during task." }});
this._reset();
}
/** @type {Socket} */
get socket() {
return this._socket;
}
/**
* Set the socket for the control connection. This will only close the current control socket
* if the new one is set to `undefined` because you're most likely to be upgrading an existing
* control connection that continues to be used.
*
* @type {Socket}
*/
set socket(socket) {
// No data socket should be open in any case where the control socket is set or upgraded.
this.dataSocket = undefined;
if (this._socket) {
this._removeSocketListeners(this._socket);
}
if (socket) {
// Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below.
socket.setTimeout(0);
socket.setKeepAlive(true);
socket.on("data", data => this._onControlSocketData(data));
this._setupErrorHandlers(socket, "control");
}
else {
this._closeSocket(this._socket);
}
this._socket = socket;
}
/** @type {(Socket | undefined)} */
get dataSocket() {
return this._dataSocket;
}
/**
* Set the socket for the data connection. This will automatically close the former data socket.
*
* @type {(Socket | undefined)}
**/
set dataSocket(socket) {
this._closeSocket(this._dataSocket);
if (socket) {
socket.setTimeout(this._timeout);
this._setupErrorHandlers(socket, "data");
}
this._dataSocket = socket;
}
/**
* Send an FTP command without waiting for or handling the result.
*
* @param {string} command
*/
send(command) {
// Don't log passwords.
const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`;
this.log(message);
this._socket.write(command + "\r\n", this.encoding);
}
/**
* Log message if set to be verbose.
*
* @param {string} message
*/
log(message) {
if (this.verbose) {
console.log(message);
}
}
/**
* Enable timeout on the control socket connection. Disabling it can be useful if
* a timeout should be caught by the current data connection instead of the
* control connection that sits idle during transfers anyway.
*
* @param {boolean} enabled
*/
enableControlTimeout(enabled) {
this.socket.setTimeout(enabled ? this._timeout : 0);
}
/**
* Return true if the control socket is using TLS. This does not mean that a session
* has already been negotiated.
*
* @returns {boolean}
*/
get hasTLS() {
//@ts-ignore that not every socket has property encrypted.
return this._socket && this._socket.encrypted === true;
}
/**
* Send an FTP command and handle any response until the new task is resolved. This returns a Promise that
* will hold whatever the handler passed on when resolving/rejecting its task.
*
* @param {string} command
* @param {ResponseHandler} handler
* @returns {Promise<any>}
*/
handle(command, handler) {
if (this._handler !== undefined) {
this.close();
throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?");
}
// Only track control socket timeout during the lifecycle of a task associated with a handler.
// That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users.
this.enableControlTimeout(true);
return new Promise((resolvePromise, rejectPromise) => {
this._handler = handler;
this._task = {
// When resolving or rejecting we also want the handler
// to no longer receive any responses or errors.
resolve: (...args) => {
this._stopTrackingTask();
resolvePromise(...args);
},
reject: (...args) => {
this._stopTrackingTask();
rejectPromise(...args);
}
};
if (command !== undefined) {
this.send(command);
}
});
}
/**
* Removes reference to current task and handler. This won't resolve or reject the task.
*/
_stopTrackingTask() {
// Disable timeout on control socket if there is no task active.
this.enableControlTimeout(false);
this._task = undefined;
this._handler = undefined;
}
/**
* Handle incoming data on the control socket.
*
* @private
* @param {Buffer} data
*/
_onControlSocketData(data) {
let response = data.toString(this.encoding).trim();
this.log(`< ${response}`);
// This response might complete an earlier partial response.
response = this._partialResponse + response;
const parsed = parseControlResponse(response);
// Remember any incomplete remainder.
this._partialResponse = parsed.rest;
// Each response group is passed along individually.
for (const message of parsed.messages) {
const code = parseInt(message.substr(0, 3), 10);
this._passToHandler({ code, message });
}
}
/**
* Send the current handler a response. This is usually a control socket response
* or a socket event, like an error or timeout.
*
* @private
* @param {Object} response
*/
_passToHandler(response) {
if (this._handler) {
this._handler(response, this._task);
}
}
/**
* Reset the state of this context.
*
* @private
*/
_reset() {
this.log("Closing connections.");
this._stopTrackingTask();
this._partialResponse = "";
this._closeSocket(this._socket);
this._closeSocket(this._dataSocket);
// Set a new socket instance to make reconnecting possible.
this.socket = new Socket();
}
/**
* Send an error to the current handler and close all connections.
*
* @private
* @param {*} error
*/
_closeWithError(error) {
this.log(error);
this._passToHandler({ error });
this._reset();
}
/**
* Close a socket.
*
* @private
* @param {(Socket | undefined)} socket
*/
_closeSocket(socket) {
if (socket) {
socket.destroy();
this._removeSocketListeners(socket);
}
}
/**
* Setup all error handlers for a socket.
*
* @private
* @param {Socket} socket
* @param {string} identifier
*/
_setupErrorHandlers(socket
|
{
/**
* Timeout applied to all connections.
* @private
* @type {number}
*/
this._timeout = timeout;
/**
* Current task to be resolved or rejected.
* @private
* @type {(Task | undefined)}
*/
this._task = undefined;
/**
* Function that handles incoming messages and resolves or rejects a task.
* @private
* @type {(ResponseHandler | undefined)}
*/
this._handler = undefined;
/**
|
identifier_body
|
FtpContext.js
|
) => void} ResponseHandler
*/
/**
* FTPContext holds the control and data sockets of an FTP connection and provides a
* simplified way to interact with an FTP server, handle responses, errors and timeouts.
*
* It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP
* client as easy as possible. You won't usually instantiate this, but use `Client`.
*/
module.exports = class FTPContext {
/**
* Instantiate an FTP context.
*
* @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout.
* @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers.
*/
constructor(timeout = 0, encoding = "utf8") {
/**
* Timeout applied to all connections.
* @private
* @type {number}
*/
this._timeout = timeout;
/**
* Current task to be resolved or rejected.
* @private
* @type {(Task | undefined)}
*/
this._task = undefined;
/**
* Function that handles incoming messages and resolves or rejects a task.
* @private
* @type {(ResponseHandler | undefined)}
*/
this._handler = undefined;
/**
* A multiline response might be received as multiple chunks.
* @private
* @type {string}
*/
this._partialResponse = "";
/**
* The encoding used when reading from and writing to the control socket.
* @type {string}
*/
this.encoding = encoding;
/**
* Options for TLS connections.
* @type {import("tls").ConnectionOptions}
*/
this.tlsOptions = {};
/**
* IP version to prefer (4: IPv4, 6: IPv6).
* @type {(string | undefined)}
*/
this.ipFamily = undefined;
/**
* Log every communication detail.
* @type {boolean}
*/
this.verbose = false;
/**
* The control connection to the FTP server.
* @type {Socket}
*/
this.socket = new Socket();
/**
* The current data connection to the FTP server.
* @type {(Socket | undefined)}
*/
this.dataSocket = undefined;
}
/**
* Close the context by resetting its state.
*/
close() {
this._passToHandler({ error: { info: "User closed client during task." }});
this._reset();
}
/** @type {Socket} */
get socket() {
return this._socket;
}
/**
* Set the socket for the control connection. This will only close the current control socket
* if the new one is set to `undefined` because you're most likely to be upgrading an existing
* control connection that continues to be used.
*
* @type {Socket}
*/
set socket(socket) {
// No data socket should be open in any case where the control socket is set or upgraded.
this.dataSocket = undefined;
if (this._socket) {
this._removeSocketListeners(this._socket);
}
if (socket) {
// Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below.
socket.setTimeout(0);
socket.setKeepAlive(true);
socket.on("data", data => this._onControlSocketData(data));
this._setupErrorHandlers(socket, "control");
}
else {
this._closeSocket(this._socket);
}
this._socket = socket;
}
/** @type {(Socket | undefined)} */
get dataSocket() {
|
}
/**
* Set the socket for the data connection. This will automatically close the former data socket.
*
* @type {(Socket | undefined)}
**/
set dataSocket(socket) {
this._closeSocket(this._dataSocket);
if (socket) {
socket.setTimeout(this._timeout);
this._setupErrorHandlers(socket, "data");
}
this._dataSocket = socket;
}
/**
* Send an FTP command without waiting for or handling the result.
*
* @param {string} command
*/
send(command) {
// Don't log passwords.
const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`;
this.log(message);
this._socket.write(command + "\r\n", this.encoding);
}
/**
* Log message if set to be verbose.
*
* @param {string} message
*/
log(message) {
if (this.verbose) {
console.log(message);
}
}
/**
* Enable timeout on the control socket connection. Disabling it can be useful if
* a timeout should be caught by the current data connection instead of the
* control connection that sits idle during transfers anyway.
*
* @param {boolean} enabled
*/
enableControlTimeout(enabled) {
this.socket.setTimeout(enabled ? this._timeout : 0);
}
/**
* Return true if the control socket is using TLS. This does not mean that a session
* has already been negotiated.
*
* @returns {boolean}
*/
get hasTLS() {
//@ts-ignore that not every socket has property encrypted.
return this._socket && this._socket.encrypted === true;
}
/**
* Send an FTP command and handle any response until the new task is resolved. This returns a Promise that
* will hold whatever the handler passed on when resolving/rejecting its task.
*
* @param {string} command
* @param {ResponseHandler} handler
* @returns {Promise<any>}
*/
handle(command, handler) {
if (this._handler !== undefined) {
this.close();
throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?");
}
// Only track control socket timeout during the lifecycle of a task associated with a handler.
// That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users.
this.enableControlTimeout(true);
return new Promise((resolvePromise, rejectPromise) => {
this._handler = handler;
this._task = {
// When resolving or rejecting we also want the handler
// to no longer receive any responses or errors.
resolve: (...args) => {
this._stopTrackingTask();
resolvePromise(...args);
},
reject: (...args) => {
this._stopTrackingTask();
rejectPromise(...args);
}
};
if (command !== undefined) {
this.send(command);
}
});
}
/**
* Removes reference to current task and handler. This won't resolve or reject the task.
*/
_stopTrackingTask() {
// Disable timeout on control socket if there is no task active.
this.enableControlTimeout(false);
this._task = undefined;
this._handler = undefined;
}
/**
* Handle incoming data on the control socket.
*
* @private
* @param {Buffer} data
*/
_onControlSocketData(data) {
let response = data.toString(this.encoding).trim();
this.log(`< ${response}`);
// This response might complete an earlier partial response.
response = this._partialResponse + response;
const parsed = parseControlResponse(response);
// Remember any incomplete remainder.
this._partialResponse = parsed.rest;
// Each response group is passed along individually.
for (const message of parsed.messages) {
const code = parseInt(message.substr(0, 3), 10);
this._passToHandler({ code, message });
}
}
/**
* Send the current handler a response. This is usually a control socket response
* or a socket event, like an error or timeout.
*
* @private
* @param {Object} response
*/
_passToHandler(response) {
if (this._handler) {
this._handler(response, this._task);
}
}
/**
* Reset the state of this context.
*
* @private
*/
_reset() {
this.log("Closing connections.");
this._stopTrackingTask();
this._partialResponse = "";
this._closeSocket(this._socket);
this._closeSocket(this._dataSocket);
// Set a new socket instance to make reconnecting possible.
this.socket = new Socket();
}
/**
* Send an error to the current handler and close all connections.
*
* @private
* @param {*} error
*/
_closeWithError(error) {
this.log(error);
this._passToHandler({ error });
this._reset();
}
/**
* Close a socket.
*
* @private
* @param {(Socket | undefined)} socket
*/
_closeSocket(socket) {
if (socket) {
socket.destroy();
this._removeSocketListeners(socket);
}
}
/**
* Setup all error handlers for a socket.
*
* @private
* @param {Socket} socket
* @param {string} identifier
*/
_setupErrorHandlers(socket
|
return this._dataSocket;
|
random_line_split
|
custom_fingers.py
|
for Known node:
# path_len is the path length source node,
# ident is the identity value of the Known node.
# lindex is the list index of the Known node.
Knode = namedtuple('Knode', ['path_len', 'ident','lindex'])
def rand_ident():
"""
Generate random identity in the range [0,MAX_IDENT)
"""
return random.randrange(MAX_IDENT)
def dist_ident(x,y):
"""
Distance between two nodes (According to ident):
"""
return (y - x) % MAX_IDENT
|
def remove_knodes_duplicates(knodes):
"""
Go over a list of knodes, and remove knodes that show up more than once.
In case of node ident showing more than once, we pick the shorter path.
"""
if len(knodes) == 0:
return knodes
knodes.sort(key=lambda kn:(kn.ident,kn.path_len))
# Resulting array
cur_ident = knodes[0].ident
res = [knodes[0]]
for kn in knodes[1:]:
if kn.ident != cur_ident:
cur_ident = kn.ident
res.append(kn)
return res
# A node:
class Node():
def __init__(self,fk,ident=None):
"""
Initialize a node.
"""
# If ident value is not specified, we randomize one:
if ident is None:
self.ident = rand_ident()
else:
self.ident = ident
# Argument related to amount of known best finger candidates.
self.fk = fk
# Initialize list of known nodes:
self.neighbours = []
self.best_finger_succ = [list() for f in range(IDENT_BITS)]
self.best_finger_pred = [list() for f in range(IDENT_BITS)]
def get_finger_succ_loc(self,f):
"""
Get the exact location of successor finger f.
"""
return (self.ident + 2**f) % MAX_IDENT
def get_finger_pred_loc(self,f):
"""
Get the exact location of predecessor finger f.
"""
return (self.ident - 2**f) % MAX_IDENT
def set_neighbours(self,knodes):
"""
set knodes to be the neighbours of this Node.
"""
self.neighbours = []
for kn in knodes:
# Make sure we don't have ourselves as a neighbour:
if kn.ident == self.ident:
continue
# A neighbour has a path length 1:
self.neighbours.append(\
kn._replace(path_len=1))
# Update known nodes:
self.add_known_nodes(0,self.neighbours)
def add_known_best_finger_succ(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
successor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes)
self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len))
def add_known_best_finger_pred(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
predecessor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes)
self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len))
def add_known_nodes(self,source_path_len,knodes):
"""
Add a set of known nodes to self.known .
Take the change of path_len into acount.
"""
# Update the path lengths:
updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\
for kn in knodes]
# Make sure the node self.ident is not inside:
updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\
updated_knodes))
for f in SUCC_FINGERS:
self.add_known_best_finger_succ(f,updated_knodes)
for f in PRED_FINGERS:
self.add_known_best_finger_pred(f,updated_knodes)
def get_known(self):
"""
Return a list of all known nodes.
Items in the list are unique.
"""
pool = set()
# Add neighbours:
pool.update(self.neighbours)
# Add fingers:
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_close(self):
"""
Return a list of the closest known nodes.
Close in the virtual sense, to self.ident,
and to the possible fingers on the Chord DHT.
"""
pool = set()
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_best_succ_finger(self,f):
"""
Get the best successor for finger f.
"""
return min(self.best_finger_succ[f],\
key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident))
def get_best_pred_finger(self,f):
"""
Get the best predecessor for finger f.
"""
return min(self.best_finger_pred[f],\
key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f)))
# Simulation for a mesh network with Virtual DHT abilities:
class VirtualDHT():
def __init__(self,n,fk,nei):
# Amount of nodes:
self.num_nodes = n
# Half amount of neighbours per node:
self.nei = nei
# Known finger nodes parameter:
self.fk = fk
# Generate nodes and neighbours links:
self.gen_nodes()
self.rand_neighbours()
def gen_nodes(self):
"""
Generate n nodes with random identity numbers.
"""
self.nodes = []
for i in range(self.num_nodes):
self.nodes.append(Node(self.fk))
def make_knode(self,i,path_len=0):
"""
Given an index i of a node in self.nodes,
create a Knode tuple. Optionally set path_len.
"""
return Knode(path_len=path_len,\
ident=self.nodes[i].ident,\
lindex=i)
def rand_neighbours(self):
"""
Randomize immediate neighbours links between the nodes.
"""
# Initialize neighbours sets as empty sets:
nodes_nei = [set() for _ in range(self.num_nodes)]
for i,nd in enumerate(self.nodes):
# Sample a set of indices (Which represent a set of nodes).
# Those nodes will be nd's neighbours:
nodes_nei[i].update(\
random.sample(range(self.num_nodes),self.nei))
# Remove myself:
nodes_nei[i].discard(i)
# To make the graph undirected, we add i to be neighbour of all
# i's neighbours:
for j in nodes_nei[i]:
nodes_nei[j].add(i)
for i,nd in enumerate(self.nodes):
# Initialize a list of neighbours:
nd.set_neighbours(map(self.make_knode,list(nodes_nei[i])))
def iter_node(self,i):
"""
Ask all known nodes for better known nodes.
i is the index of the node in self.nodes.
"""
nd = self.nodes[i]
for kn in nd.get_close():
# for kn in nd.get_known():
# for kn in nd.neighbours:
kn_node = self.nodes[kn.lindex]
nd.add_known_nodes(kn.path_len,kn_node.get_close())
def iter_all(self):
"""
Perform a full iteration, where all nodes ask other nodes for better
nodes.
"""
for i in range(self.num_nodes):
self.iter_node(i)
def converge(self,max_iters=0x10):
"""
"converge" the DHT by iterating until nothing changes.
"""
for i in range(max_iters):
self.iter_all()
print(".",end="",flush=True)
if self.verify():
print("\nReached correct succ and pred + fingers.")
return
print("\nmax_iters acheived.")
def verify_succ_pred_fingers(self):
"""
Verify the succ and pred fingers found for all nodes.
"""
# Get all nodes (as Knodes), and sort them according to ident:
lnodes = list(map(self.make_knode,range(self.num_nodes)))
lnodes.sort(key=lambda ln:ln.ident)
idents = [ln.ident for ln in lnodes]
for i,ln in enumerate(lnodes):
nd = self.nodes[ln.lindex]
for f in SUCC_FINGERS:
ind = bisect.bisect_left(\
idents,nd.get_finger_succ_loc(f))
f_succ = lnodes[(ind) % self.num_nodes]
if nd.get_best_succ_finger(f).ident != f_succ.ident:
return False
for f in PRED_FINGERS:
ind = bisect.bisect_right(\
idents,nd.get_finger_pred_loc(f))
f_pred = lnodes[(ind-1) % self.num_nodes]
if nd
|
random_line_split
|
|
custom_fingers.py
|
[0].ident
res = [knodes[0]]
for kn in knodes[1:]:
if kn.ident != cur_ident:
cur_ident = kn.ident
res.append(kn)
return res
# A node:
class Node():
def __init__(self,fk,ident=None):
"""
Initialize a node.
"""
# If ident value is not specified, we randomize one:
if ident is None:
self.ident = rand_ident()
else:
self.ident = ident
# Argument related to amount of known best finger candidates.
self.fk = fk
# Initialize list of known nodes:
self.neighbours = []
self.best_finger_succ = [list() for f in range(IDENT_BITS)]
self.best_finger_pred = [list() for f in range(IDENT_BITS)]
def get_finger_succ_loc(self,f):
"""
Get the exact location of successor finger f.
"""
return (self.ident + 2**f) % MAX_IDENT
def get_finger_pred_loc(self,f):
"""
Get the exact location of predecessor finger f.
"""
return (self.ident - 2**f) % MAX_IDENT
def set_neighbours(self,knodes):
"""
set knodes to be the neighbours of this Node.
"""
self.neighbours = []
for kn in knodes:
# Make sure we don't have ourselves as a neighbour:
if kn.ident == self.ident:
continue
# A neighbour has a path length 1:
self.neighbours.append(\
kn._replace(path_len=1))
# Update known nodes:
self.add_known_nodes(0,self.neighbours)
def add_known_best_finger_succ(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
successor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes)
self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len))
def add_known_best_finger_pred(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
predecessor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes)
self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len))
def add_known_nodes(self,source_path_len,knodes):
"""
Add a set of known nodes to self.known .
Take the change of path_len into acount.
"""
# Update the path lengths:
updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\
for kn in knodes]
# Make sure the node self.ident is not inside:
updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\
updated_knodes))
for f in SUCC_FINGERS:
self.add_known_best_finger_succ(f,updated_knodes)
for f in PRED_FINGERS:
self.add_known_best_finger_pred(f,updated_knodes)
def get_known(self):
"""
Return a list of all known nodes.
Items in the list are unique.
"""
pool = set()
# Add neighbours:
pool.update(self.neighbours)
# Add fingers:
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_close(self):
"""
Return a list of the closest known nodes.
Close in the virtual sense, to self.ident,
and to the possible fingers on the Chord DHT.
"""
pool = set()
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_best_succ_finger(self,f):
"""
Get the best successor for finger f.
"""
return min(self.best_finger_succ[f],\
key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident))
def get_best_pred_finger(self,f):
"""
Get the best predecessor for finger f.
"""
return min(self.best_finger_pred[f],\
key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f)))
# Simulation for a mesh network with Virtual DHT abilities:
class VirtualDHT():
def __init__(self,n,fk,nei):
# Amount of nodes:
self.num_nodes = n
# Half amount of neighbours per node:
self.nei = nei
# Known finger nodes parameter:
self.fk = fk
# Generate nodes and neighbours links:
self.gen_nodes()
self.rand_neighbours()
def gen_nodes(self):
"""
Generate n nodes with random identity numbers.
"""
self.nodes = []
for i in range(self.num_nodes):
self.nodes.append(Node(self.fk))
def make_knode(self,i,path_len=0):
"""
Given an index i of a node in self.nodes,
create a Knode tuple. Optionally set path_len.
"""
return Knode(path_len=path_len,\
ident=self.nodes[i].ident,\
lindex=i)
def rand_neighbours(self):
"""
Randomize immediate neighbours links between the nodes.
"""
# Initialize neighbours sets as empty sets:
nodes_nei = [set() for _ in range(self.num_nodes)]
for i,nd in enumerate(self.nodes):
# Sample a set of indices (Which represent a set of nodes).
# Those nodes will be nd's neighbours:
nodes_nei[i].update(\
random.sample(range(self.num_nodes),self.nei))
# Remove myself:
nodes_nei[i].discard(i)
# To make the graph undirected, we add i to be neighbour of all
# i's neighbours:
for j in nodes_nei[i]:
nodes_nei[j].add(i)
for i,nd in enumerate(self.nodes):
# Initialize a list of neighbours:
nd.set_neighbours(map(self.make_knode,list(nodes_nei[i])))
def iter_node(self,i):
"""
Ask all known nodes for better known nodes.
i is the index of the node in self.nodes.
"""
nd = self.nodes[i]
for kn in nd.get_close():
# for kn in nd.get_known():
# for kn in nd.neighbours:
kn_node = self.nodes[kn.lindex]
nd.add_known_nodes(kn.path_len,kn_node.get_close())
def iter_all(self):
"""
Perform a full iteration, where all nodes ask other nodes for better
nodes.
"""
for i in range(self.num_nodes):
self.iter_node(i)
def converge(self,max_iters=0x10):
"""
"converge" the DHT by iterating until nothing changes.
"""
for i in range(max_iters):
self.iter_all()
print(".",end="",flush=True)
if self.verify():
print("\nReached correct succ and pred + fingers.")
return
print("\nmax_iters acheived.")
def verify_succ_pred_fingers(self):
"""
Verify the succ and pred fingers found for all nodes.
"""
# Get all nodes (as Knodes), and sort them according to ident:
lnodes = list(map(self.make_knode,range(self.num_nodes)))
lnodes.sort(key=lambda ln:ln.ident)
idents = [ln.ident for ln in lnodes]
for i,ln in enumerate(lnodes):
nd = self.nodes[ln.lindex]
for f in SUCC_FINGERS:
ind = bisect.bisect_left(\
idents,nd.get_finger_succ_loc(f))
f_succ = lnodes[(ind) % self.num_nodes]
if nd.get_best_succ_finger(f).ident != f_succ.ident:
return False
for f in PRED_FINGERS:
ind = bisect.bisect_right(\
idents,nd.get_finger_pred_loc(f))
f_pred = lnodes[(ind-1) % self.num_nodes]
if nd.get_best_pred_finger(f).ident != f_pred.ident:
return False
return True
def verify(self):
"""
Verify all the found nodes.
"""
if not self.verify_succ_pred_fingers():
return False
return True
def sample_path_len(self,num_samp=0x200):
"""
Find an approximated average to the path_len to successor and
predecessor.
"""
sum_finger_path = 0.0
# We don't want to sample more than the total amount of nodes:
num_samp = min([num_samp,self.num_nodes])
snodes = random.sample(self.nodes,num_samp)
for sn in snodes:
|
for f in SUCC_FINGERS:
sum_finger_path += sn.get_best_succ_finger(f).path_len
for f in PRED_FINGERS:
sum_finger_path += sn.get_best_pred_finger(f).path_len
|
conditional_block
|
|
custom_fingers.py
|
Known node:
# path_len is the path length source node,
# ident is the identity value of the Known node.
# lindex is the list index of the Known node.
Knode = namedtuple('Knode', ['path_len', 'ident','lindex'])
def rand_ident():
"""
Generate random identity in the range [0,MAX_IDENT)
"""
return random.randrange(MAX_IDENT)
def dist_ident(x,y):
"""
Distance between two nodes (According to ident):
"""
return (y - x) % MAX_IDENT
def remove_knodes_duplicates(knodes):
"""
Go over a list of knodes, and remove knodes that show up more than once.
In case of node ident showing more than once, we pick the shorter path.
"""
if len(knodes) == 0:
return knodes
knodes.sort(key=lambda kn:(kn.ident,kn.path_len))
# Resulting array
cur_ident = knodes[0].ident
res = [knodes[0]]
for kn in knodes[1:]:
if kn.ident != cur_ident:
cur_ident = kn.ident
res.append(kn)
return res
# A node:
class Node():
def __init__(self,fk,ident=None):
"""
Initialize a node.
"""
# If ident value is not specified, we randomize one:
if ident is None:
self.ident = rand_ident()
else:
self.ident = ident
# Argument related to amount of known best finger candidates.
self.fk = fk
# Initialize list of known nodes:
self.neighbours = []
self.best_finger_succ = [list() for f in range(IDENT_BITS)]
self.best_finger_pred = [list() for f in range(IDENT_BITS)]
def get_finger_succ_loc(self,f):
"""
Get the exact location of successor finger f.
"""
return (self.ident + 2**f) % MAX_IDENT
def get_finger_pred_loc(self,f):
"""
Get the exact location of predecessor finger f.
"""
return (self.ident - 2**f) % MAX_IDENT
def set_neighbours(self,knodes):
"""
set knodes to be the neighbours of this Node.
"""
self.neighbours = []
for kn in knodes:
# Make sure we don't have ourselves as a neighbour:
if kn.ident == self.ident:
continue
# A neighbour has a path length 1:
self.neighbours.append(\
kn._replace(path_len=1))
# Update known nodes:
self.add_known_nodes(0,self.neighbours)
def add_known_best_finger_succ(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
successor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes)
self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len))
def add_known_best_finger_pred(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
predecessor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes)
self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len))
def add_known_nodes(self,source_path_len,knodes):
"""
Add a set of known nodes to self.known .
Take the change of path_len into acount.
"""
# Update the path lengths:
updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\
for kn in knodes]
# Make sure the node self.ident is not inside:
updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\
updated_knodes))
for f in SUCC_FINGERS:
self.add_known_best_finger_succ(f,updated_knodes)
for f in PRED_FINGERS:
self.add_known_best_finger_pred(f,updated_knodes)
def get_known(self):
"""
Return a list of all known nodes.
Items in the list are unique.
"""
pool = set()
# Add neighbours:
pool.update(self.neighbours)
# Add fingers:
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_close(self):
"""
Return a list of the closest known nodes.
Close in the virtual sense, to self.ident,
and to the possible fingers on the Chord DHT.
"""
pool = set()
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_best_succ_finger(self,f):
"""
Get the best successor for finger f.
"""
return min(self.best_finger_succ[f],\
key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident))
def get_best_pred_finger(self,f):
"""
Get the best predecessor for finger f.
"""
return min(self.best_finger_pred[f],\
key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f)))
# Simulation for a mesh network with Virtual DHT abilities:
class VirtualDHT():
def __init__(self,n,fk,nei):
# Amount of nodes:
self.num_nodes = n
# Half amount of neighbours per node:
self.nei = nei
# Known finger nodes parameter:
self.fk = fk
# Generate nodes and neighbours links:
self.gen_nodes()
self.rand_neighbours()
def gen_nodes(self):
"""
Generate n nodes with random identity numbers.
"""
self.nodes = []
for i in range(self.num_nodes):
self.nodes.append(Node(self.fk))
def make_knode(self,i,path_len=0):
"""
Given an index i of a node in self.nodes,
create a Knode tuple. Optionally set path_len.
"""
return Knode(path_len=path_len,\
ident=self.nodes[i].ident,\
lindex=i)
def rand_neighbours(self):
"""
Randomize immediate neighbours links between the nodes.
"""
# Initialize neighbours sets as empty sets:
nodes_nei = [set() for _ in range(self.num_nodes)]
for i,nd in enumerate(self.nodes):
# Sample a set of indices (Which represent a set of nodes).
# Those nodes will be nd's neighbours:
nodes_nei[i].update(\
random.sample(range(self.num_nodes),self.nei))
# Remove myself:
nodes_nei[i].discard(i)
# To make the graph undirected, we add i to be neighbour of all
# i's neighbours:
for j in nodes_nei[i]:
nodes_nei[j].add(i)
for i,nd in enumerate(self.nodes):
# Initialize a list of neighbours:
nd.set_neighbours(map(self.make_knode,list(nodes_nei[i])))
def iter_node(self,i):
"""
Ask all known nodes for better known nodes.
i is the index of the node in self.nodes.
"""
nd = self.nodes[i]
for kn in nd.get_close():
# for kn in nd.get_known():
# for kn in nd.neighbours:
kn_node = self.nodes[kn.lindex]
nd.add_known_nodes(kn.path_len,kn_node.get_close())
def iter_all(self):
"""
Perform a full iteration, where all nodes ask other nodes for better
nodes.
"""
for i in range(self.num_nodes):
self.iter_node(i)
def converge(self,max_iters=0x10):
|
def verify_succ_pred_fingers(self):
"""
Verify the succ and pred fingers found for all nodes.
"""
# Get all nodes (as Knodes), and sort them according to ident:
lnodes = list(map(self.make_knode,range(self.num_nodes)))
lnodes.sort(key=lambda ln:ln.ident)
idents = [ln.ident for ln in lnodes]
for i,ln in enumerate(lnodes):
nd = self.nodes[ln.lindex]
for f in SUCC_FINGERS:
ind = bisect.bisect_left(\
idents,nd.get_finger_succ_loc(f))
f_succ = lnodes[(ind) % self.num_nodes]
if nd.get_best_succ_finger(f).ident != f_succ.ident:
return False
for f in PRED_FINGERS:
ind = bisect.bisect_right(\
idents,nd.get_finger_pred_loc(f))
f_pred = lnodes[(ind-1) % self.num_nodes]
if nd
|
"""
"converge" the DHT by iterating until nothing changes.
"""
for i in range(max_iters):
self.iter_all()
print(".",end="",flush=True)
if self.verify():
print("\nReached correct succ and pred + fingers.")
return
print("\nmax_iters acheived.")
|
identifier_body
|
custom_fingers.py
|
Known node:
# path_len is the path length source node,
# ident is the identity value of the Known node.
# lindex is the list index of the Known node.
Knode = namedtuple('Knode', ['path_len', 'ident','lindex'])
def rand_ident():
"""
Generate random identity in the range [0,MAX_IDENT)
"""
return random.randrange(MAX_IDENT)
def dist_ident(x,y):
"""
Distance between two nodes (According to ident):
"""
return (y - x) % MAX_IDENT
def remove_knodes_duplicates(knodes):
"""
Go over a list of knodes, and remove knodes that show up more than once.
In case of node ident showing more than once, we pick the shorter path.
"""
if len(knodes) == 0:
return knodes
knodes.sort(key=lambda kn:(kn.ident,kn.path_len))
# Resulting array
cur_ident = knodes[0].ident
res = [knodes[0]]
for kn in knodes[1:]:
if kn.ident != cur_ident:
cur_ident = kn.ident
res.append(kn)
return res
# A node:
class Node():
def __init__(self,fk,ident=None):
"""
Initialize a node.
"""
# If ident value is not specified, we randomize one:
if ident is None:
self.ident = rand_ident()
else:
self.ident = ident
# Argument related to amount of known best finger candidates.
self.fk = fk
# Initialize list of known nodes:
self.neighbours = []
self.best_finger_succ = [list() for f in range(IDENT_BITS)]
self.best_finger_pred = [list() for f in range(IDENT_BITS)]
def get_finger_succ_loc(self,f):
"""
Get the exact location of successor finger f.
"""
return (self.ident + 2**f) % MAX_IDENT
def get_finger_pred_loc(self,f):
"""
Get the exact location of predecessor finger f.
"""
return (self.ident - 2**f) % MAX_IDENT
def set_neighbours(self,knodes):
"""
set knodes to be the neighbours of this Node.
"""
self.neighbours = []
for kn in knodes:
# Make sure we don't have ourselves as a neighbour:
if kn.ident == self.ident:
continue
# A neighbour has a path length 1:
self.neighbours.append(\
kn._replace(path_len=1))
# Update known nodes:
self.add_known_nodes(0,self.neighbours)
def add_known_best_finger_succ(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
successor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes)
self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len))
def add_known_best_finger_pred(self,f,knodes):
"""
If any of the nodes in knodes is a better candidate for the f's
predecessor finger, we replace.
"""
pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes)
self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\
(dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len))
def add_known_nodes(self,source_path_len,knodes):
"""
Add a set of known nodes to self.known .
Take the change of path_len into acount.
"""
# Update the path lengths:
updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\
for kn in knodes]
# Make sure the node self.ident is not inside:
updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\
updated_knodes))
for f in SUCC_FINGERS:
self.add_known_best_finger_succ(f,updated_knodes)
for f in PRED_FINGERS:
self.add_known_best_finger_pred(f,updated_knodes)
def get_known(self):
"""
Return a list of all known nodes.
Items in the list are unique.
"""
pool = set()
# Add neighbours:
pool.update(self.neighbours)
# Add fingers:
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_close(self):
"""
Return a list of the closest known nodes.
Close in the virtual sense, to self.ident,
and to the possible fingers on the Chord DHT.
"""
pool = set()
for f in SUCC_FINGERS:
pool.update(self.best_finger_succ[f])
for f in PRED_FINGERS:
pool.update(self.best_finger_pred[f])
return list(pool)
def get_best_succ_finger(self,f):
"""
Get the best successor for finger f.
"""
return min(self.best_finger_succ[f],\
key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident))
def get_best_pred_finger(self,f):
"""
Get the best predecessor for finger f.
"""
return min(self.best_finger_pred[f],\
key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f)))
# Simulation for a mesh network with Virtual DHT abilities:
class VirtualDHT():
def __init__(self,n,fk,nei):
# Amount of nodes:
self.num_nodes = n
# Half amount of neighbours per node:
self.nei = nei
# Known finger nodes parameter:
self.fk = fk
# Generate nodes and neighbours links:
self.gen_nodes()
self.rand_neighbours()
def
|
(self):
"""
Generate n nodes with random identity numbers.
"""
self.nodes = []
for i in range(self.num_nodes):
self.nodes.append(Node(self.fk))
def make_knode(self,i,path_len=0):
"""
Given an index i of a node in self.nodes,
create a Knode tuple. Optionally set path_len.
"""
return Knode(path_len=path_len,\
ident=self.nodes[i].ident,\
lindex=i)
def rand_neighbours(self):
"""
Randomize immediate neighbours links between the nodes.
"""
# Initialize neighbours sets as empty sets:
nodes_nei = [set() for _ in range(self.num_nodes)]
for i,nd in enumerate(self.nodes):
# Sample a set of indices (Which represent a set of nodes).
# Those nodes will be nd's neighbours:
nodes_nei[i].update(\
random.sample(range(self.num_nodes),self.nei))
# Remove myself:
nodes_nei[i].discard(i)
# To make the graph undirected, we add i to be neighbour of all
# i's neighbours:
for j in nodes_nei[i]:
nodes_nei[j].add(i)
for i,nd in enumerate(self.nodes):
# Initialize a list of neighbours:
nd.set_neighbours(map(self.make_knode,list(nodes_nei[i])))
def iter_node(self,i):
"""
Ask all known nodes for better known nodes.
i is the index of the node in self.nodes.
"""
nd = self.nodes[i]
for kn in nd.get_close():
# for kn in nd.get_known():
# for kn in nd.neighbours:
kn_node = self.nodes[kn.lindex]
nd.add_known_nodes(kn.path_len,kn_node.get_close())
def iter_all(self):
"""
Perform a full iteration, where all nodes ask other nodes for better
nodes.
"""
for i in range(self.num_nodes):
self.iter_node(i)
def converge(self,max_iters=0x10):
"""
"converge" the DHT by iterating until nothing changes.
"""
for i in range(max_iters):
self.iter_all()
print(".",end="",flush=True)
if self.verify():
print("\nReached correct succ and pred + fingers.")
return
print("\nmax_iters acheived.")
def verify_succ_pred_fingers(self):
"""
Verify the succ and pred fingers found for all nodes.
"""
# Get all nodes (as Knodes), and sort them according to ident:
lnodes = list(map(self.make_knode,range(self.num_nodes)))
lnodes.sort(key=lambda ln:ln.ident)
idents = [ln.ident for ln in lnodes]
for i,ln in enumerate(lnodes):
nd = self.nodes[ln.lindex]
for f in SUCC_FINGERS:
ind = bisect.bisect_left(\
idents,nd.get_finger_succ_loc(f))
f_succ = lnodes[(ind) % self.num_nodes]
if nd.get_best_succ_finger(f).ident != f_succ.ident:
return False
for f in PRED_FINGERS:
ind = bisect.bisect_right(\
idents,nd.get_finger_pred_loc(f))
f_pred = lnodes[(ind-1) % self.num_nodes]
if nd
|
gen_nodes
|
identifier_name
|
utils_test.go
|
FeegrantKeeper: suite.app.FeeGrantKeeper,
IBCKeeper: suite.app.IBCKeeper,
FeeMarketKeeper: suite.app.FeeMarketKeeper,
SignModeHandler: encodingConfig.TxConfig.SignModeHandler(),
SigGasConsumer: ante.DefaultSigVerificationGasConsumer,
})
suite.Require().NoError(err)
suite.anteHandler = anteHandler
suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID())
}
func TestAnteTestSuite(t *testing.T) {
suite.Run(t, &AnteTestSuite{
enableLondonHF: true,
})
}
func (s *AnteTestSuite) BuildTestEthTx(
from common.Address,
to common.Address,
amount *big.Int,
input []byte,
gasPrice *big.Int,
gasFeeCap *big.Int,
gasTipCap *big.Int,
accesses *ethtypes.AccessList,
) *evmtypes.MsgEthereumTx {
chainID := s.app.EvmKeeper.ChainID()
nonce := s.app.EvmKeeper.GetNonce(
s.ctx,
common.BytesToAddress(from.Bytes()),
)
msgEthereumTx := evmtypes.NewTx(
chainID,
nonce,
&to,
amount,
TestGasLimit,
gasPrice,
gasFeeCap,
gasTipCap,
input,
accesses,
)
msgEthereumTx.From = from.String()
return msgEthereumTx
}
// CreateTestTx is a helper function to create a tx given multiple inputs.
func (suite *AnteTestSuite) CreateTestTx(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) authsigning.Tx {
return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx()
}
// CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs.
func (suite *AnteTestSuite) CreateTestTxBuilder(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) client.TxBuilder {
var option *codectypes.Any
var err error
if len(unsetExtensionOptions) == 0 {
option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{})
suite.Require().NoError(err)
}
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder)
suite.Require().True(ok)
if len(unsetExtensionOptions) == 0 {
builder.SetExtensionOptions(option)
}
err = msg.Sign(suite.ethSigner, tests.NewSigner(priv))
suite.Require().NoError(err)
msg.From = ""
err = builder.SetMsgs(msg)
suite.Require().NoError(err)
txData, err := evmtypes.UnpackTxData(msg.Data)
suite.Require().NoError(err)
fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee())))
builder.SetFeeAmount(fees)
builder.SetGasLimit(msg.GetGas())
if signCosmosTx {
// First round: we gather all the signer infos. We use the "set empty
// signature" hack to do that.
sigV2 := signing.SignatureV2{
PubKey: priv.PubKey(),
Data: &signing.SingleSignatureData{
SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
Signature: nil,
},
Sequence: txData.GetNonce(),
}
sigsV2 := []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
// Second round: all signer infos are set, so each signer can sign.
signerData := authsigning.SignerData{
ChainID: suite.ctx.ChainID(),
AccountNumber: accNum,
Sequence: txData.GetNonce(),
}
sigV2, err = tx.SignWithPrivKey(
suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData,
txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(),
)
suite.Require().NoError(err)
sigsV2 = []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
}
return txBuilder
}
func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder {
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
txBuilder.SetGasLimit(TestGasLimit)
fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}}
txBuilder.SetFeeAmount(*fees)
err := txBuilder.SetMsgs(msgs...)
suite.Require().NoError(err)
return txBuilder
}
func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder {
// Build MsgSend
recipient := sdk.AccAddress(common.Address{}.Bytes())
msgSend := types2.NewMsgSend(from, recipient, sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(1))))
return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend)
}
func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgDelegate(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder {
// Build MsgSend
valEthAddr := tests.GenerateAddress()
valAddr := sdk.ValAddress(valEthAddr.Bytes())
msgSend := types3.NewMsgDelegate(from, valAddr, sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(20)))
return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend)
}
func (suite *AnteTestSuite) CreateTestEIP712CosmosTxBuilder(
from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins, msg sdk.Msg,
) client.TxBuilder {
var err error
nonce, err := suite.app.AccountKeeper.GetSequence(suite.ctx, from)
suite.Require().NoError(err)
pc, err := types.ParseChainID(chainId)
suite.Require().NoError(err)
ethChainId := pc.Uint64()
// GenerateTypedData TypedData
var ethermintCodec codec.ProtoCodecMarshaler
fee := legacytx.NewStdFee(gas, gasAmount)
accNumber := suite.app.AccountKeeper.GetAccount(suite.ctx, from).GetAccountNumber()
data := legacytx.StdSignBytes(chainId, accNumber, nonce, 0, fee, []sdk.Msg{msg}, "", nil)
typedData, err := eip712.WrapTxToTypedData(ethermintCodec, ethChainId, msg, data, &eip712.FeeDelegationOptions{
FeePayer: from,
})
suite.Require().NoError(err)
sigHash, err := eip712.ComputeTypedDataHash(typedData)
suite.Require().NoError(err)
// Sign typedData
keyringSigner := tests.NewSigner(priv)
signature, pubKey, err := keyringSigner.SignByAddress(from, sigHash)
suite.Require().NoError(err)
signature[crypto.RecoveryIDOffset] += 27 // Transform V from 0/1 to 27/28 according to the yellow paper
// Add ExtensionOptionsWeb3Tx extension
var option *codectypes.Any
option, err = codectypes.NewAnyWithValue(&types.ExtensionOptionsWeb3Tx{
FeePayer: from.String(),
TypedDataChainID: ethChainId,
FeePayerSig: signature,
})
suite.Require().NoError(err)
suite.clientCtx.TxConfig.SignModeHandler()
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder)
suite.Require().True(ok)
builder.SetExtensionOptions(option)
builder.SetFeeAmount(gasAmount)
builder.SetGasLimit(gas)
sigsV2 := signing.SignatureV2{
PubKey: pubKey,
Data: &signing.SingleSignatureData{
SignMode: signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON,
},
Sequence: nonce,
}
err = builder.SetSignatures(sigsV2)
suite.Require().NoError(err)
err = builder.SetMsgs(msg)
suite.Require().NoError(err)
return builder
}
func NextFn(ctx sdk.Context, _ sdk.Tx, _ bool) (sdk.Context, error) {
return ctx, nil
}
var _ sdk.Tx = &invalidTx{}
type invalidTx struct{}
func (invalidTx)
|
GetMsgs
|
identifier_name
|
|
utils_test.go
|
.com/evmos/ethermint/encoding"
"github.com/evmos/ethermint/tests"
"github.com/evmos/ethermint/x/evm/statedb"
evmtypes "github.com/evmos/ethermint/x/evm/types"
feemarkettypes "github.com/evmos/ethermint/x/feemarket/types"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
)
type AnteTestSuite struct {
suite.Suite
ctx sdk.Context
app *app.EthermintApp
clientCtx client.Context
anteHandler sdk.AnteHandler
ethSigner ethtypes.Signer
enableFeemarket bool
enableLondonHF bool
evmParamsOption func(*evmtypes.Params)
}
const TestGasLimit uint64 = 100000
func (suite *AnteTestSuite) StateDB() *statedb.StateDB {
return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes())))
}
func (suite *AnteTestSuite) SetupTest() {
checkTx := false
suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState {
if suite.enableFeemarket {
// setup feemarketGenesis params
feemarketGenesis := feemarkettypes.DefaultGenesisState()
feemarketGenesis.Params.EnableHeight = 1
feemarketGenesis.Params.NoBaseFee = false
// Verify feeMarket genesis
err := feemarketGenesis.Validate()
suite.Require().NoError(err)
genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis)
}
evmGenesis := evmtypes.DefaultGenesisState()
evmGenesis.Params.AllowUnprotectedTxs = false
if !suite.enableLondonHF {
maxInt := sdkmath.NewInt(math.MaxInt64)
evmGenesis.Params.ChainConfig.LondonBlock = &maxInt
evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt
evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt
evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt
}
if suite.evmParamsOption != nil {
suite.evmParamsOption(&evmGenesis.Params)
}
genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis)
return genesis
})
suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()})
suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt())))
suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000))
suite.app.EvmKeeper.WithChainID(suite.ctx)
infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter())
suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams())
encodingConfig := encoding.MakeConfig(app.ModuleBasics)
|
// We're using TestMsg amino encoding in some tests, so register it here.
encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil)
suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig)
anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{
AccountKeeper: suite.app.AccountKeeper,
BankKeeper: suite.app.BankKeeper,
EvmKeeper: suite.app.EvmKeeper,
FeegrantKeeper: suite.app.FeeGrantKeeper,
IBCKeeper: suite.app.IBCKeeper,
FeeMarketKeeper: suite.app.FeeMarketKeeper,
SignModeHandler: encodingConfig.TxConfig.SignModeHandler(),
SigGasConsumer: ante.DefaultSigVerificationGasConsumer,
})
suite.Require().NoError(err)
suite.anteHandler = anteHandler
suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID())
}
func TestAnteTestSuite(t *testing.T) {
suite.Run(t, &AnteTestSuite{
enableLondonHF: true,
})
}
func (s *AnteTestSuite) BuildTestEthTx(
from common.Address,
to common.Address,
amount *big.Int,
input []byte,
gasPrice *big.Int,
gasFeeCap *big.Int,
gasTipCap *big.Int,
accesses *ethtypes.AccessList,
) *evmtypes.MsgEthereumTx {
chainID := s.app.EvmKeeper.ChainID()
nonce := s.app.EvmKeeper.GetNonce(
s.ctx,
common.BytesToAddress(from.Bytes()),
)
msgEthereumTx := evmtypes.NewTx(
chainID,
nonce,
&to,
amount,
TestGasLimit,
gasPrice,
gasFeeCap,
gasTipCap,
input,
accesses,
)
msgEthereumTx.From = from.String()
return msgEthereumTx
}
// CreateTestTx is a helper function to create a tx given multiple inputs.
func (suite *AnteTestSuite) CreateTestTx(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) authsigning.Tx {
return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx()
}
// CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs.
func (suite *AnteTestSuite) CreateTestTxBuilder(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) client.TxBuilder {
var option *codectypes.Any
var err error
if len(unsetExtensionOptions) == 0 {
option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{})
suite.Require().NoError(err)
}
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder)
suite.Require().True(ok)
if len(unsetExtensionOptions) == 0 {
builder.SetExtensionOptions(option)
}
err = msg.Sign(suite.ethSigner, tests.NewSigner(priv))
suite.Require().NoError(err)
msg.From = ""
err = builder.SetMsgs(msg)
suite.Require().NoError(err)
txData, err := evmtypes.UnpackTxData(msg.Data)
suite.Require().NoError(err)
fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee())))
builder.SetFeeAmount(fees)
builder.SetGasLimit(msg.GetGas())
if signCosmosTx {
// First round: we gather all the signer infos. We use the "set empty
// signature" hack to do that.
sigV2 := signing.SignatureV2{
PubKey: priv.PubKey(),
Data: &signing.SingleSignatureData{
SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
Signature: nil,
},
Sequence: txData.GetNonce(),
}
sigsV2 := []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
// Second round: all signer infos are set, so each signer can sign.
signerData := authsigning.SignerData{
ChainID: suite.ctx.ChainID(),
AccountNumber: accNum,
Sequence: txData.GetNonce(),
}
sigV2, err = tx.SignWithPrivKey(
suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData,
txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(),
)
suite.Require().NoError(err)
sigsV2 = []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
}
return txBuilder
}
func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder {
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
txBuilder.SetGasLimit(TestGasLimit)
fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}}
txBuilder.SetFeeAmount(*fees)
err := txBuilder.SetMsgs(msgs...)
suite.Require().NoError(err)
return txBuilder
}
func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder {
// Build MsgSend
recipient := sdk.AccAddress(common.Address{}.Bytes())
msgSend :=
|
random_line_split
|
|
utils_test.go
|
/evmos/ethermint/encoding"
"github.com/evmos/ethermint/tests"
"github.com/evmos/ethermint/x/evm/statedb"
evmtypes "github.com/evmos/ethermint/x/evm/types"
feemarkettypes "github.com/evmos/ethermint/x/feemarket/types"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
)
type AnteTestSuite struct {
suite.Suite
ctx sdk.Context
app *app.EthermintApp
clientCtx client.Context
anteHandler sdk.AnteHandler
ethSigner ethtypes.Signer
enableFeemarket bool
enableLondonHF bool
evmParamsOption func(*evmtypes.Params)
}
const TestGasLimit uint64 = 100000
func (suite *AnteTestSuite) StateDB() *statedb.StateDB {
return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes())))
}
func (suite *AnteTestSuite) SetupTest() {
checkTx := false
suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState {
if suite.enableFeemarket {
// setup feemarketGenesis params
feemarketGenesis := feemarkettypes.DefaultGenesisState()
feemarketGenesis.Params.EnableHeight = 1
feemarketGenesis.Params.NoBaseFee = false
// Verify feeMarket genesis
err := feemarketGenesis.Validate()
suite.Require().NoError(err)
genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis)
}
evmGenesis := evmtypes.DefaultGenesisState()
evmGenesis.Params.AllowUnprotectedTxs = false
if !suite.enableLondonHF {
maxInt := sdkmath.NewInt(math.MaxInt64)
evmGenesis.Params.ChainConfig.LondonBlock = &maxInt
evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt
evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt
evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt
}
if suite.evmParamsOption != nil {
suite.evmParamsOption(&evmGenesis.Params)
}
genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis)
return genesis
})
suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()})
suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt())))
suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000))
suite.app.EvmKeeper.WithChainID(suite.ctx)
infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter())
suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams())
encodingConfig := encoding.MakeConfig(app.ModuleBasics)
// We're using TestMsg amino encoding in some tests, so register it here.
encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil)
suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig)
anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{
AccountKeeper: suite.app.AccountKeeper,
BankKeeper: suite.app.BankKeeper,
EvmKeeper: suite.app.EvmKeeper,
FeegrantKeeper: suite.app.FeeGrantKeeper,
IBCKeeper: suite.app.IBCKeeper,
FeeMarketKeeper: suite.app.FeeMarketKeeper,
SignModeHandler: encodingConfig.TxConfig.SignModeHandler(),
SigGasConsumer: ante.DefaultSigVerificationGasConsumer,
})
suite.Require().NoError(err)
suite.anteHandler = anteHandler
suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID())
}
func TestAnteTestSuite(t *testing.T) {
suite.Run(t, &AnteTestSuite{
enableLondonHF: true,
})
}
func (s *AnteTestSuite) BuildTestEthTx(
from common.Address,
to common.Address,
amount *big.Int,
input []byte,
gasPrice *big.Int,
gasFeeCap *big.Int,
gasTipCap *big.Int,
accesses *ethtypes.AccessList,
) *evmtypes.MsgEthereumTx {
chainID := s.app.EvmKeeper.ChainID()
nonce := s.app.EvmKeeper.GetNonce(
s.ctx,
common.BytesToAddress(from.Bytes()),
)
msgEthereumTx := evmtypes.NewTx(
chainID,
nonce,
&to,
amount,
TestGasLimit,
gasPrice,
gasFeeCap,
gasTipCap,
input,
accesses,
)
msgEthereumTx.From = from.String()
return msgEthereumTx
}
// CreateTestTx is a helper function to create a tx given multiple inputs.
func (suite *AnteTestSuite) CreateTestTx(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) authsigning.Tx {
return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx()
}
// CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs.
func (suite *AnteTestSuite) CreateTestTxBuilder(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) client.TxBuilder {
var option *codectypes.Any
var err error
if len(unsetExtensionOptions) == 0 {
option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{})
suite.Require().NoError(err)
}
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder)
suite.Require().True(ok)
if len(unsetExtensionOptions) == 0 {
builder.SetExtensionOptions(option)
}
err = msg.Sign(suite.ethSigner, tests.NewSigner(priv))
suite.Require().NoError(err)
msg.From = ""
err = builder.SetMsgs(msg)
suite.Require().NoError(err)
txData, err := evmtypes.UnpackTxData(msg.Data)
suite.Require().NoError(err)
fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee())))
builder.SetFeeAmount(fees)
builder.SetGasLimit(msg.GetGas())
if signCosmosTx
|
ChainID: suite.ctx.ChainID(),
AccountNumber: accNum,
Sequence: txData.GetNonce(),
}
sigV2, err = tx.SignWithPrivKey(
suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData,
txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(),
)
suite.Require().NoError(err)
sigsV2 = []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
}
return txBuilder
}
func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder {
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
txBuilder.SetGasLimit(TestGasLimit)
fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}}
txBuilder.SetFeeAmount(*fees)
err := txBuilder.SetMsgs(msgs...)
suite.Require().NoError(err)
return txBuilder
}
func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder {
// Build MsgSend
recipient := sdk.AccAddress(common.Address{}.Bytes())
msgSend :=
|
{
// First round: we gather all the signer infos. We use the "set empty
// signature" hack to do that.
sigV2 := signing.SignatureV2{
PubKey: priv.PubKey(),
Data: &signing.SingleSignatureData{
SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
Signature: nil,
},
Sequence: txData.GetNonce(),
}
sigsV2 := []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
// Second round: all signer infos are set, so each signer can sign.
signerData := authsigning.SignerData{
|
conditional_block
|
utils_test.go
|
github.com/evmos/ethermint/x/feemarket/types"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
)
type AnteTestSuite struct {
suite.Suite
ctx sdk.Context
app *app.EthermintApp
clientCtx client.Context
anteHandler sdk.AnteHandler
ethSigner ethtypes.Signer
enableFeemarket bool
enableLondonHF bool
evmParamsOption func(*evmtypes.Params)
}
const TestGasLimit uint64 = 100000
func (suite *AnteTestSuite) StateDB() *statedb.StateDB {
return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes())))
}
func (suite *AnteTestSuite) SetupTest() {
checkTx := false
suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState {
if suite.enableFeemarket {
// setup feemarketGenesis params
feemarketGenesis := feemarkettypes.DefaultGenesisState()
feemarketGenesis.Params.EnableHeight = 1
feemarketGenesis.Params.NoBaseFee = false
// Verify feeMarket genesis
err := feemarketGenesis.Validate()
suite.Require().NoError(err)
genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis)
}
evmGenesis := evmtypes.DefaultGenesisState()
evmGenesis.Params.AllowUnprotectedTxs = false
if !suite.enableLondonHF {
maxInt := sdkmath.NewInt(math.MaxInt64)
evmGenesis.Params.ChainConfig.LondonBlock = &maxInt
evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt
evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt
evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt
}
if suite.evmParamsOption != nil {
suite.evmParamsOption(&evmGenesis.Params)
}
genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis)
return genesis
})
suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()})
suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt())))
suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000))
suite.app.EvmKeeper.WithChainID(suite.ctx)
infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter())
suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams())
encodingConfig := encoding.MakeConfig(app.ModuleBasics)
// We're using TestMsg amino encoding in some tests, so register it here.
encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil)
suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig)
anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{
AccountKeeper: suite.app.AccountKeeper,
BankKeeper: suite.app.BankKeeper,
EvmKeeper: suite.app.EvmKeeper,
FeegrantKeeper: suite.app.FeeGrantKeeper,
IBCKeeper: suite.app.IBCKeeper,
FeeMarketKeeper: suite.app.FeeMarketKeeper,
SignModeHandler: encodingConfig.TxConfig.SignModeHandler(),
SigGasConsumer: ante.DefaultSigVerificationGasConsumer,
})
suite.Require().NoError(err)
suite.anteHandler = anteHandler
suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID())
}
func TestAnteTestSuite(t *testing.T) {
suite.Run(t, &AnteTestSuite{
enableLondonHF: true,
})
}
func (s *AnteTestSuite) BuildTestEthTx(
from common.Address,
to common.Address,
amount *big.Int,
input []byte,
gasPrice *big.Int,
gasFeeCap *big.Int,
gasTipCap *big.Int,
accesses *ethtypes.AccessList,
) *evmtypes.MsgEthereumTx {
chainID := s.app.EvmKeeper.ChainID()
nonce := s.app.EvmKeeper.GetNonce(
s.ctx,
common.BytesToAddress(from.Bytes()),
)
msgEthereumTx := evmtypes.NewTx(
chainID,
nonce,
&to,
amount,
TestGasLimit,
gasPrice,
gasFeeCap,
gasTipCap,
input,
accesses,
)
msgEthereumTx.From = from.String()
return msgEthereumTx
}
// CreateTestTx is a helper function to create a tx given multiple inputs.
func (suite *AnteTestSuite) CreateTestTx(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) authsigning.Tx {
return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx()
}
// CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs.
func (suite *AnteTestSuite) CreateTestTxBuilder(
msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool,
unsetExtensionOptions ...bool,
) client.TxBuilder {
var option *codectypes.Any
var err error
if len(unsetExtensionOptions) == 0 {
option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{})
suite.Require().NoError(err)
}
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder)
suite.Require().True(ok)
if len(unsetExtensionOptions) == 0 {
builder.SetExtensionOptions(option)
}
err = msg.Sign(suite.ethSigner, tests.NewSigner(priv))
suite.Require().NoError(err)
msg.From = ""
err = builder.SetMsgs(msg)
suite.Require().NoError(err)
txData, err := evmtypes.UnpackTxData(msg.Data)
suite.Require().NoError(err)
fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee())))
builder.SetFeeAmount(fees)
builder.SetGasLimit(msg.GetGas())
if signCosmosTx {
// First round: we gather all the signer infos. We use the "set empty
// signature" hack to do that.
sigV2 := signing.SignatureV2{
PubKey: priv.PubKey(),
Data: &signing.SingleSignatureData{
SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(),
Signature: nil,
},
Sequence: txData.GetNonce(),
}
sigsV2 := []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
// Second round: all signer infos are set, so each signer can sign.
signerData := authsigning.SignerData{
ChainID: suite.ctx.ChainID(),
AccountNumber: accNum,
Sequence: txData.GetNonce(),
}
sigV2, err = tx.SignWithPrivKey(
suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData,
txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(),
)
suite.Require().NoError(err)
sigsV2 = []signing.SignatureV2{sigV2}
err = txBuilder.SetSignatures(sigsV2...)
suite.Require().NoError(err)
}
return txBuilder
}
func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder {
txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
txBuilder.SetGasLimit(TestGasLimit)
fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}}
txBuilder.SetFeeAmount(*fees)
err := txBuilder.SetMsgs(msgs...)
suite.Require().NoError(err)
return txBuilder
}
func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder
|
{
// Build MsgSend
recipient := sdk.AccAddress(common.Address{}.Bytes())
msgSend := types2.NewMsgSend(from, recipient, sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(1))))
return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend)
}
|
identifier_body
|
|
RealTimePlotTemplate.py
|
663e-07, 1.68822071e-07, 2.43800712e-04]])
B = [-28.43905915, 51.22161875, -72.33527491]
global S,B
def BLEconnection(connNode,addr,connType,iface):
''' do ble connection '''
connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface)
connNode.Peripheral.setDelegate(MyDelegate(connNode))
magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C))
calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]]
connNode.magCalibration = Uint8Tofloat(calibrationData)
connNode.accBias = [-0.039746094, -0.012792969, -0.056347656]
connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597]
# connNode.magBias = [57.712502, 27.521484, -37.898438 ]
# connNode.magScale = [0.990893, 1.042146, 0.969697]
connNode.magBias = [52.190625, 26.627929687499996, -24.46171875]
connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602]
#home
# connNode.magBias = [48.312499, 41.460943, -21.877735 ]
# connNode.magScale = [1.005747, 1.009227, 0.985360]
print("accScales: ",S)
print("accBias: ",B)
# print("gyroBias: ",connNode.gyroBias)
print("magBias: ",connNode.magBias)
print("magScale: ",connNode.magScale)
print("magCalibration: ",connNode.magCalibration)
print("connect successfully")
#connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration)
#iface = (iface + 1) % numOfDongle + 1
#Try to get Service , Characteristic and set notification
try:
#need to add 0000fed0-0000-1000-8000-00805f9b34fb
service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb")
char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0]
connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True)
except:
print("get service, characteristic or set notification failed")
def ScanProcess(iface=0):
|
def struct_isqrt(number):
threehalfs = 1.5
x2 = number * 0.5
y = number
packed_y = struct.pack('f', y)
i = struct.unpack('i', packed_y)[0] # treat float's bytes as int
i = 0x5f3759df - (i >> 1) # arithmetic with magic number
packed_i = struct.pack('i', i)
y = struct.unpack('f', packed_i)[0] # treat int's bytes as float
y = y * (threehalfs - (x2 * y * y)) # Newton's method
return y
def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic):
data=[[],[],[],[],[],[]]
windowsLen = []
# print "xxxxxxxxxxxxxxxx"
while True:
# continue
tEnd = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
windowsLen.append([0,1])
windowsLen.append([0,1])
windowsLen.append([0,1])
isStatic.value = True
# data[3].append(plot4.value)
# data[4].append(plot5.value)
# data[5].append(plot6.value)
# data[6].append(timestamp.value)
#,isCapturing.value
plotMyData.setMyData(data,windowsLen) #isCapturing.value
data=[[],[],[],[],[],[],[]]
windowsLen = []
# tStart = time.time()
def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag):
data=[[],[],[],[],[],[],[]]
windowsLen = []
while True:
tStart = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
data[6]= Timestamp[0:endIdx]
plotMyData.setMyData(data,isCapturing.value)
data=[[],[],[],[],[],[],[],[]]
windowsLen = []
tStart = time.time()
isStatic.value = True
def GetBacicData(node,addr,connType,mahony,iface):
'''Get the stopping threshold and the calibration of gyro
Args:
node :
addr : sensor ble address
connType : pubilc/ramdon
iface : which dongle you use to construct the connection
'''
yawCalibration=0.0
BLEconnection(node,addr,connType,iface=iface)
count = 0
gravity = 0
staticLinearAcc = []
staticLinearGyo = []
print "Do not moving!!!"
while count!= 300:
if node.Peripheral.waitForNotifications(0.01):
count = count + 1
rawdata = node.noti
node.gyroBias[0] += rawdata[3]
node.gyroBias[1] += rawdata[4]
node.gyroBias[2] += rawdata[5]
node.gyro
|
'''Scan '''
scanner = btle.Scanner(iface)
while True :
print("Still scanning... count: %s" % 1)
try:
devcies = scanner.scan(timeout = 3)
# print devcies
for dev in devcies:
# print "xx"
if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98
print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi))
return
#Try to create connection
except:
print "failed scan"
exit()
|
identifier_body
|
RealTimePlotTemplate.py
|
(self):
self.Peripheral = None
self.nodeCube = None
self.drawWindowNumber = -1
self.accBias = [0.0,0.0,0.0]
self.gyroBias = [0.0,0.0,0.0]
self.magBias = [0.0,0.0,0.0]
self.magScale = [0.0,0.0,0.0]
self.magCalibration = [0.0,0.0,0.0]
self.noti = None
self.fail_notify=0
self.workingtime=0.0
self.datagram=[]
self.seq=0
self.count_received_data=0
S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07],
[ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07],
[ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]])
B = [-28.43905915, 51.22161875, -72.33527491]
global S,B
def BLEconnection(connNode,addr,connType,iface):
''' do ble connection '''
connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface)
connNode.Peripheral.setDelegate(MyDelegate(connNode))
magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C))
calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]]
connNode.magCalibration = Uint8Tofloat(calibrationData)
connNode.accBias = [-0.039746094, -0.012792969, -0.056347656]
connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597]
# connNode.magBias = [57.712502, 27.521484, -37.898438 ]
# connNode.magScale = [0.990893, 1.042146, 0.969697]
connNode.magBias = [52.190625, 26.627929687499996, -24.46171875]
connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602]
#home
# connNode.magBias = [48.312499, 41.460943, -21.877735 ]
# connNode.magScale = [1.005747, 1.009227, 0.985360]
print("accScales: ",S)
print("accBias: ",B)
# print("gyroBias: ",connNode.gyroBias)
print("magBias: ",connNode.magBias)
print("magScale: ",connNode.magScale)
print("magCalibration: ",connNode.magCalibration)
print("connect successfully")
#connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration)
#iface = (iface + 1) % numOfDongle + 1
#Try to get Service , Characteristic and set notification
try:
#need to add 0000fed0-0000-1000-8000-00805f9b34fb
service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb")
char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0]
connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True)
except:
print("get service, characteristic or set notification failed")
def ScanProcess(iface=0):
'''Scan '''
scanner = btle.Scanner(iface)
while True :
print("Still scanning... count: %s" % 1)
try:
devcies = scanner.scan(timeout = 3)
# print devcies
for dev in devcies:
# print "xx"
if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98
print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi))
return
#Try to create connection
except:
print "failed scan"
exit()
def struct_isqrt(number):
threehalfs = 1.5
x2 = number * 0.5
y = number
packed_y = struct.pack('f', y)
i = struct.unpack('i', packed_y)[0] # treat float's bytes as int
i = 0x5f3759df - (i >> 1) # arithmetic with magic number
packed_i = struct.pack('i', i)
y = struct.unpack('f', packed_i)[0] # treat int's bytes as float
y = y * (threehalfs - (x2 * y * y)) # Newton's method
return y
def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic):
data=[[],[],[],[],[],[]]
windowsLen = []
# print "xxxxxxxxxxxxxxxx"
while True:
# continue
tEnd = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
windowsLen.append([0,1])
windowsLen.append([0,1])
windowsLen.append([0,1])
isStatic.value = True
# data[3].append(plot4.value)
# data[4].append(plot5.value)
# data[5].append(plot6.value)
# data[6].append(timestamp.value)
#,isCapturing.value
plotMyData.setMyData(data,windowsLen) #isCapturing.value
data=[[],[],[],[],[],[],[]]
windowsLen = []
# tStart = time.time()
def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag):
data=[[],[],[],[],[],[],[]]
windowsLen = []
while True:
tStart = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5
|
__init__
|
identifier_name
|
|
RealTimePlotTemplate.py
|
663e-07, 1.68822071e-07, 2.43800712e-04]])
B = [-28.43905915, 51.22161875, -72.33527491]
global S,B
def BLEconnection(connNode,addr,connType,iface):
''' do ble connection '''
connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface)
connNode.Peripheral.setDelegate(MyDelegate(connNode))
magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C))
calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]]
connNode.magCalibration = Uint8Tofloat(calibrationData)
connNode.accBias = [-0.039746094, -0.012792969, -0.056347656]
connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597]
# connNode.magBias = [57.712502, 27.521484, -37.898438 ]
# connNode.magScale = [0.990893, 1.042146, 0.969697]
connNode.magBias = [52.190625, 26.627929687499996, -24.46171875]
connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602]
#home
# connNode.magBias = [48.312499, 41.460943, -21.877735 ]
# connNode.magScale = [1.005747, 1.009227, 0.985360]
print("accScales: ",S)
print("accBias: ",B)
# print("gyroBias: ",connNode.gyroBias)
print("magBias: ",connNode.magBias)
print("magScale: ",connNode.magScale)
print("magCalibration: ",connNode.magCalibration)
print("connect successfully")
#connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration)
#iface = (iface + 1) % numOfDongle + 1
#Try to get Service , Characteristic and set notification
try:
#need to add 0000fed0-0000-1000-8000-00805f9b34fb
service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb")
char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0]
connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True)
except:
print("get service, characteristic or set notification failed")
def ScanProcess(iface=0):
'''Scan '''
scanner = btle.Scanner(iface)
while True :
print("Still scanning... count: %s" % 1)
try:
devcies = scanner.scan(timeout = 3)
# print devcies
for dev in devcies:
# print "xx"
if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98
print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi))
return
#Try to create connection
except:
print "failed scan"
exit()
def struct_isqrt(number):
threehalfs = 1.5
x2 = number * 0.5
y = number
packed_y = struct.pack('f', y)
i = struct.unpack('i', packed_y)[0] # treat float's bytes as int
i = 0x5f3759df - (i >> 1) # arithmetic with magic number
packed_i = struct.pack('i', i)
y = struct.unpack('f', packed_i)[0] # treat int's bytes as float
y = y * (threehalfs - (x2 * y * y)) # Newton's method
return y
def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic):
data=[[],[],[],[],[],[]]
windowsLen = []
# print "xxxxxxxxxxxxxxxx"
while True:
# continue
tEnd = time.time()
while isStatic.value == True:
|
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
windowsLen.append([0,1])
windowsLen.append([0,1])
windowsLen.append([0,1])
isStatic.value = True
# data[3].append(plot4.value)
# data[4].append(plot5.value)
# data[5].append(plot6.value)
# data[6].append(timestamp.value)
#,isCapturing.value
plotMyData.setMyData(data,windowsLen) #isCapturing.value
data=[[],[],[],[],[],[],[]]
windowsLen = []
# tStart = time.time()
def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag):
data=[[],[],[],[],[],[],[]]
windowsLen = []
while True:
tStart = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
data[6]= Timestamp[0:endIdx]
plotMyData.setMyData(data,isCapturing.value)
data=[[],[],[],[],[],[],[],[]]
windowsLen = []
tStart = time.time()
isStatic.value = True
def GetBacicData(node,addr,connType,mahony,iface):
'''Get the stopping threshold and the calibration of gyro
Args:
node :
addr : sensor ble address
connType : pubilc/ramdon
iface : which dongle you use to construct the connection
'''
yawCalibration=0.0
BLEconnection(node,addr,connType,iface=iface)
count = 0
gravity = 0
staticLinearAcc = []
staticLinearGyo = []
print "Do not moving!!!"
while count!= 300:
if node.Peripheral.waitForNotifications(0.01):
count = count + 1
rawdata = node.noti
node.gyroBias[0] += rawdata[3]
node.gyroBias[1] += rawdata[4]
node.gyroBias[2] += rawdata[5]
node.gyro
|
pass
|
conditional_block
|
RealTimePlotTemplate.py
|
self.noti = None
self.fail_notify=0
self.workingtime=0.0
self.datagram=[]
self.seq=0
self.count_received_data=0
S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07],
[ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07],
[ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]])
B = [-28.43905915, 51.22161875, -72.33527491]
global S,B
def BLEconnection(connNode,addr,connType,iface):
''' do ble connection '''
connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface)
connNode.Peripheral.setDelegate(MyDelegate(connNode))
magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C))
calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]]
connNode.magCalibration = Uint8Tofloat(calibrationData)
connNode.accBias = [-0.039746094, -0.012792969, -0.056347656]
connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597]
# connNode.magBias = [57.712502, 27.521484, -37.898438 ]
# connNode.magScale = [0.990893, 1.042146, 0.969697]
connNode.magBias = [52.190625, 26.627929687499996, -24.46171875]
connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602]
#home
# connNode.magBias = [48.312499, 41.460943, -21.877735 ]
# connNode.magScale = [1.005747, 1.009227, 0.985360]
print("accScales: ",S)
print("accBias: ",B)
# print("gyroBias: ",connNode.gyroBias)
print("magBias: ",connNode.magBias)
print("magScale: ",connNode.magScale)
print("magCalibration: ",connNode.magCalibration)
print("connect successfully")
#connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration)
#iface = (iface + 1) % numOfDongle + 1
#Try to get Service , Characteristic and set notification
try:
#need to add 0000fed0-0000-1000-8000-00805f9b34fb
service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb")
char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0]
connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True)
except:
print("get service, characteristic or set notification failed")
def ScanProcess(iface=0):
'''Scan '''
scanner = btle.Scanner(iface)
while True :
print("Still scanning... count: %s" % 1)
try:
devcies = scanner.scan(timeout = 3)
# print devcies
for dev in devcies:
# print "xx"
if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98
print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi))
return
#Try to create connection
except:
print "failed scan"
exit()
def struct_isqrt(number):
threehalfs = 1.5
x2 = number * 0.5
y = number
packed_y = struct.pack('f', y)
i = struct.unpack('i', packed_y)[0] # treat float's bytes as int
i = 0x5f3759df - (i >> 1) # arithmetic with magic number
packed_i = struct.pack('i', i)
y = struct.unpack('f', packed_i)[0] # treat int's bytes as float
y = y * (threehalfs - (x2 * y * y)) # Newton's method
return y
def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic):
data=[[],[],[],[],[],[]]
windowsLen = []
# print "xxxxxxxxxxxxxxxx"
while True:
# continue
tEnd = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
windowsLen.append([0,1])
windowsLen.append([0,1])
windowsLen.append([0,1])
isStatic.value = True
# data[3].append(plot4.value)
# data[4].append(plot5.value)
# data[5].append(plot6.value)
# data[6].append(timestamp.value)
#,isCapturing.value
plotMyData.setMyData(data,windowsLen) #isCapturing.value
data=[[],[],[],[],[],[],[]]
windowsLen = []
# tStart = time.time()
def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag):
data=[[],[],[],[],[],[],[]]
windowsLen = []
while True:
tStart = time.time()
while isStatic.value == True:
pass
if resetFlag.value == True:
plotMyData.ResetGraph()
resetFlag.value = False
endIdx = Idx.value
data[0]= plot1[0:endIdx]
data[1]= plot2[0:endIdx]
data[2]= plot3[0:endIdx]
data[3]= plot4[0:endIdx]
data[4]= plot5[0:endIdx]
data[5]= plot6[0:endIdx]
data[6]= Timestamp[0:endIdx]
plotMyData.setMyData
|
self.accBias = [0.0,0.0,0.0]
self.gyroBias = [0.0,0.0,0.0]
self.magBias = [0.0,0.0,0.0]
self.magScale = [0.0,0.0,0.0]
self.magCalibration = [0.0,0.0,0.0]
|
random_line_split
|
|
main.rs
|
seem simple, but the behavior of code can be
// unexpected in more complicated situations when we want to
// have multiple variables use the data that's been allocated
// on the heap.
//
// + Ways variables and data interact: Move
// Multiple variables can interact with the same data in different
// ways in rust:
// let x = 5;
// let y = x;
//
// So here, we bind the value of 5 to x, then we make a copy
// of the value in x and bind it to y.
// We now have to vars x and y and both equal 5.
// This is exactly what's happening because integers are simple
// values with a known, fixed size, and these two 5 vals are
// pushed onto the stack.
//
// let a = String::from("hello);
// let b = a;
//
// This looks similar and you'd probably assume it behaves the same
// way; the second would make a copy of the val in a and bind it to b.
// This is not what happens.
//
// Under the covers, a String is actually a type with a few values:
// ptr to some memory holding the string, a length, and a capacity.
// This group is stored on the stack. The length is how much memory
// in bytes the contents of the String is curreently using.
// The capacity is the total amount of memory, in bytes, the String
// has received from the OS. Difference between len and cap matters,
// but not the point of this.
//
// When a is assigned to b, the String data is copied, meaning we copy
// the pointer, the len, and the cap on the stack. The heap data is
// not copied, so b's pointer is going to refer to the same heap
// data that a does.
//
// Earlier we said when a variable goes out of scope, Rust will
// automatically call the drop function and clean up the heap mem.
// But in this case, both pointers are pointing to the same heap
// memory. Thiis is a problem. When a and b go out of scope, they
// will both attempt to free the same memory. This is a *double free*
// error and is one of the memory safety bugs we mentioned previously.
// Freeing mem twice can lead to mem corruption, which can lead
// to security vulnerabilities.
//
// To ensure mem safety, there's another detail to what happens in
// this situation in Rust. Instead of trying to copy the allocated
// memory, Rust considers a to no longer be valid and therefore, Rust
// doesn't need to free anything when a goes out of scope.
//
// If you were to try to use a after copying it to b, an error
// is thrown at compile time.
//
// The ideas of "shallow copy" and "deep copy" apply here. The
// concept of copying the pointer, length, and capacity without
// copying the data psounds like a shallow copy. But because rust
// also invalidates the first variable, instead of calling this
// a shallow copy, it's known as a *move*. We would read this by
// saying that a was *moved* into b.
//
// This solves the problem, because with only b as valid, when it
// goes out of scope, it alone will free the mem.
//
// There is an additional design choice implied by this:
// ** Rust will never automatically create "deep" copies of data. **
// Therefore, any *automatic* copying can be assumed to be
// inexpensive in terms of runtime performance.
//
// + Ways variables and data interact: Clones
//
// If you *do* wawnt to deeply copy the heap data of a String, not
// just the stack data, a common method can be used called a *clone*.
//
// let a = String::from("hello");
// let b = a.clone();
//
// When you see a clone call, you know some arbitrary code is being
// executed and that code may be expensive. It's an indiator that
// something different is going on.
//
// + Stack only data: clone
// There's another wrinkle we haven't talked about yet. This code
// is using integers:
// let x = 5;
// let y = x;
//
// println!("x = {}, y = {}", x, y);
//
// This seems to contradict what we said; we don't have to call
// clone, but x is still valid and wasn't moved to y.
//
// The reason is types like integers that have a known size at
// compile time are stored *entirely* on the stack, so copies
// of the actual values are very quick to make. There's no reason
// we would want to prevent x from being valid after we create the
// variable y.
//
// In other words, there's no different between deep and shallow
// copying here, so calling clone wouldn't do anything different
// from the usual shallow copying and we can leave it out.
//
// Rust has a special annotation called the Copy trait that can
// be placed on types like integres that are stored on the stack.
// If a type has the Copy trait, an older variable is still usable
// after assignment. Rust won't let us annotate a type with the Copy
// trait if the type, or any of its parts, has implemented the Drop
// trait.
//
// If the type needs something special to happen when the value goes
// out of scope and we add the Copy annotation to that type, we'll get
// a compile time error.
//
// What types are Copy? Docs can/should be read, but as a general rule,
// any group of simple scalar values can be Copy, and nothing that
// requires allocation or is some form of resource is Copy.
// -> ints, bools, floats, tuples (only if they contain also Copys).
//
// + Ownership and Functions]
// Semantics for passing a value to a function are similar to
// assigning a value to a variable. Passing a variable to a func
// will move or copy just like assignment.
let s = String::from("derpyfoobar"); // s comes into scope
takes_ownership(s); // s's value moves into the function...
// ... and so is no longer valid here.
//println!("{}", s); // COMPILE ERROR!
let x = 5;
makes_copy(x);
println!("{} ", x); // This is fine, because it was a copy.
// [Return values and scope]
// Returning values can also transfer ownership. Here's an ex
// with similar annotations to previous examples:
{
let baz = gives_ownership(); // gives ownership moves its return
// value into baz
let duder = String::from("duder"); // duder comes into scope
let lucha = takes_and_gives_back(duder); // duder is moves into
// takes_and_gives_back, which also moves its return value into lucha
println!("lucha! {}", lucha);
} // Here lucha goes out of scope and is dropped. duder goes out of
// scope but was moved
// The ownership of a variable follows the same pattern every time:
// **assigning a value to another variable moves it**. When a variable
// that includes data on the heap goes out of scope, the value will be
// cleaned up by `drop` unless the data has been moved to be owned by
// another variable.
//
// Taking ownership and then returning ownership with every fn is
// tedious. What if we need to let a function use a value but not take
// ownership? It's quite annoying that anything we pass in also needs
// to be passed back if we want to use it again, in addition to any
// data resulting from the body of the fn that we may want to return
// as well.
//
// It's possible to return multiple values using a tuple.
// But it's still obnoxious to constantly pass back a ton of stuff.
//
// Rust has a way to address this, and its called references!!!
//
////////////////////////////////////////////////////////////////////////////////
}
fn refs()
|
{
// [References and Borrowing]
// The issue with the returning tuple code we've seen elsewhere in
// the ownership section is that we have to return the String to
// the calling function so we can still use the String after the call.
// Here we define calculate_length so that it uses a *reference* to
// an object as a param instead of taking ownership of the value.
let calc_len = |s: &String| -> usize {
s.len()
};
let duderington = String::from("duderington");
println!("the length of the string. -> {}", calc_len(&duderington));
// First, all the tuple code in the variable declaration is gone.
// We pass the string into the function as &duderington, and in the
// definition, we take &String rather than String.
//
// The ampersands are *references*, and they allow you to refer to
|
identifier_body
|
|
main.rs
|
variables are valid
// is similar to other programming langs. Let's build on top
// of this introducing the String type.
//
// + String type
// We're going to illustrate the rules of ownership using a data type
// that's more complex than the ones we've seen before. All the data
// types we've seen before are stored on the stack and popped off the
// stack when their scope is over, but we want to look at data
// that's on the heap and explore how Rust knows to clean that up.
//
// We'll concentrate on the parts of String that relate to ownership.
// They also apply to other complex data types provided by the
// stdlib and those that you create.
//
// We've seen string literals hardcoded into the program. They're
// convenient, but they aren't suitable for every situation in which
// you want to use text. For one reason, they're immutable. Also, not
// every string value is known when we write our code. The other type
// is a String, which is allocated on the heap. It's able to store an
// amount of text that is unknown at compile time. It's created from
// a literal with a `from` function:
let s = String::from("hello");
// Again, double colon (::) is an op that allows us to namespace
// this from function under the String type rather than using a name
// like string_from. It can be mutated:
let mut s = String::from("hello");
s.push_str(", world!"); // appends a literal to a String
println!("{}", s); // Will print the full string.
// Why can Strings be mutated but literals cannot? Difference is
// how they deal with memory.
}
fn
|
() {
// With string literals, we know the contents of the string at compile
// time, so the text is literally hardcoded into the executable,
// making them extremely fast and efficient. This property only comes
// from its immutability. We can't put a blob of memory into the binary
// for each piece of text whose size is unknown at compile time and
// whose size might change while running the program.
//
// To support a mutable, growing piece of text, need to allocate an
// amount of mem on the heap, unknown at compile time, to hold the
// contents. This means:
//
// 1) The memory must be requested from the OS at runtime.
// 2) Need a way of returning the mem to the OS when we're done with
// the allocated string.
//
// First part is done by us: the String::from implementation requests
// the memory it needs from the OS. This is pretty standard for most
// langs.
//
// The second part is different. In langs with GCs, it will keep track
// and clean up mem that isn't used anymore, and the programmer doesn't
// need to think about it. Without a GC, it's the programmer's
// responsibility to know when that memory is no longer being used
// and call code to explicitly return it.
//
// This has historically been a *very* difficult problem to solve.
// If you forget to, we'll waste memory and leak it.
// If we do it too early, we'll have an invalid variable (use after free)
// If we do it twice, that's a bug too.
//
// We need to pair exactly one allocation with one free.
//
// Rust takes its own unique path: the memory is automatically
// returned once the variable that owns it goes out of scope.
// When a variable goes out of scope, Rust calls a special function
// for us. The function is called drop, and it's where the author
// of String can put the code to return the memory. Rust calls
// `drop` automatically at the closing }.
//
// NOTE: C++ calls this pattern of deallocation at the end of its
// lifetime RAII. The drop function in Rust is similar to a dtor
//
// The pattern has a profound impact on the way that Rust code is
// written. Might seem simple, but the behavior of code can be
// unexpected in more complicated situations when we want to
// have multiple variables use the data that's been allocated
// on the heap.
//
// + Ways variables and data interact: Move
// Multiple variables can interact with the same data in different
// ways in rust:
// let x = 5;
// let y = x;
//
// So here, we bind the value of 5 to x, then we make a copy
// of the value in x and bind it to y.
// We now have to vars x and y and both equal 5.
// This is exactly what's happening because integers are simple
// values with a known, fixed size, and these two 5 vals are
// pushed onto the stack.
//
// let a = String::from("hello);
// let b = a;
//
// This looks similar and you'd probably assume it behaves the same
// way; the second would make a copy of the val in a and bind it to b.
// This is not what happens.
//
// Under the covers, a String is actually a type with a few values:
// ptr to some memory holding the string, a length, and a capacity.
// This group is stored on the stack. The length is how much memory
// in bytes the contents of the String is curreently using.
// The capacity is the total amount of memory, in bytes, the String
// has received from the OS. Difference between len and cap matters,
// but not the point of this.
//
// When a is assigned to b, the String data is copied, meaning we copy
// the pointer, the len, and the cap on the stack. The heap data is
// not copied, so b's pointer is going to refer to the same heap
// data that a does.
//
// Earlier we said when a variable goes out of scope, Rust will
// automatically call the drop function and clean up the heap mem.
// But in this case, both pointers are pointing to the same heap
// memory. Thiis is a problem. When a and b go out of scope, they
// will both attempt to free the same memory. This is a *double free*
// error and is one of the memory safety bugs we mentioned previously.
// Freeing mem twice can lead to mem corruption, which can lead
// to security vulnerabilities.
//
// To ensure mem safety, there's another detail to what happens in
// this situation in Rust. Instead of trying to copy the allocated
// memory, Rust considers a to no longer be valid and therefore, Rust
// doesn't need to free anything when a goes out of scope.
//
// If you were to try to use a after copying it to b, an error
// is thrown at compile time.
//
// The ideas of "shallow copy" and "deep copy" apply here. The
// concept of copying the pointer, length, and capacity without
// copying the data psounds like a shallow copy. But because rust
// also invalidates the first variable, instead of calling this
// a shallow copy, it's known as a *move*. We would read this by
// saying that a was *moved* into b.
//
// This solves the problem, because with only b as valid, when it
// goes out of scope, it alone will free the mem.
//
// There is an additional design choice implied by this:
// ** Rust will never automatically create "deep" copies of data. **
// Therefore, any *automatic* copying can be assumed to be
// inexpensive in terms of runtime performance.
//
// + Ways variables and data interact: Clones
//
// If you *do* wawnt to deeply copy the heap data of a String, not
// just the stack data, a common method can be used called a *clone*.
//
// let a = String::from("hello");
// let b = a.clone();
//
// When you see a clone call, you know some arbitrary code is being
// executed and that code may be expensive. It's an indiator that
// something different is going on.
//
// + Stack only data: clone
// There's another wrinkle we haven't talked about yet. This code
// is using integers:
// let x = 5;
// let y = x;
//
// println!("x = {}, y = {}", x, y);
//
// This seems to contradict what we said; we don't have to call
// clone, but x is still valid and wasn't moved to y.
//
// The reason is types like integers that have a known size at
// compile time are stored *entirely* on the stack, so copies
// of the actual values are very quick to make. There's no reason
// we would want to prevent x from being valid after we create the
|
moves_and_mem
|
identifier_name
|
main.rs
|
// the opposite order (LIFO). This is referred to as
// *pushing onto the stack* and *popping off of the stack*
//
// It's fast because of the way it accesses the data: it never has to
// search for a place to put new data or a place to get data from because
// that place is *always* the top of the stack. Another propery is that
// all data on the stack must take up a known, fixed size.
//
// For data that is an unknown size at compile time or a size that may
// changeo ver time, we can store that data on the heap instead. The heap
// is less organized; we just ask for some amount of space. The OS
// finds an empty spot somewhere that's big enough for the request, marks
// it as in use, and returns a pointer to that location. It's called
// *allocating on the heap*. Pushing onto the stack is not considered
// allocation. A pointer is a known, fixed size, so it can sit on the
// heap, but for actual data, we have to follow the pointer.
//
// The heap is slower than the stack because we have to follow a pointer
// to get there (a level of indirection). Processors are faster due to
// temporal and spacial locality and caching if they have to jump around
// less.
//
// When a function is called, the values passed into the function
// (including, potentially pointers to data on the heap) and the fns
// local vars get pushed onto the stack. When its over, the vals get
// popped off the stack.
//
// !!
// Keeping track of what code is using what data on the heap, minimizing
// duplicate data on the heap, and cleaning up unused data on the heap
// so we don't run out of space are all problems that ownership helps.
// Once ownership is understood, you won't have to think about the stack
// and the heap often, but knowing that managing heap data is why
// ownership exists can help explain why it works the way that it does.
// !!
//
// [Ownership Rules]
// There exist 3 very important rules to ownership in Rust:
//
// 1) Each value in Rust has a variable that's called its *owner*
// 2) There can only be one owner at a time (the highlander rule)
// 3) When the owner goes out of scope, the value will be dropped
//
// + Variable Scope
// See scope()
//
// + Memory and Allocation
// See moves_and_mem
//
// + References
// See refs()
fn scope() {
// First example of ownership, we'll look at the *scope* of some
// variables. Scope is the range within a program for which an item
// is valid.
// s is a string literal. the value of the string is hardcoded into
// the text of the program. The variable is valid from the point
// at which it's declared until the end of the current *scope*.
{ // s is not valid here, it's not yet declared
let s = "hello"; // s is valid from this point forwards
// do some stuff with s
} // this scope is now over, and s is no longer valid
// There are two important points in time here:
// 1) When s comes *into* scope, it is valid.
// 2) s remains valid until it is *out of scope*
//
// // The relationship between scopes and when variables are valid
// is similar to other programming langs. Let's build on top
// of this introducing the String type.
//
// + String type
// We're going to illustrate the rules of ownership using a data type
// that's more complex than the ones we've seen before. All the data
// types we've seen before are stored on the stack and popped off the
// stack when their scope is over, but we want to look at data
// that's on the heap and explore how Rust knows to clean that up.
//
// We'll concentrate on the parts of String that relate to ownership.
// They also apply to other complex data types provided by the
// stdlib and those that you create.
//
// We've seen string literals hardcoded into the program. They're
// convenient, but they aren't suitable for every situation in which
// you want to use text. For one reason, they're immutable. Also, not
// every string value is known when we write our code. The other type
// is a String, which is allocated on the heap. It's able to store an
// amount of text that is unknown at compile time. It's created from
// a literal with a `from` function:
let s = String::from("hello");
// Again, double colon (::) is an op that allows us to namespace
// this from function under the String type rather than using a name
// like string_from. It can be mutated:
let mut s = String::from("hello");
s.push_str(", world!"); // appends a literal to a String
println!("{}", s); // Will print the full string.
// Why can Strings be mutated but literals cannot? Difference is
// how they deal with memory.
}
fn moves_and_mem() {
// With string literals, we know the contents of the string at compile
// time, so the text is literally hardcoded into the executable,
// making them extremely fast and efficient. This property only comes
// from its immutability. We can't put a blob of memory into the binary
// for each piece of text whose size is unknown at compile time and
// whose size might change while running the program.
//
// To support a mutable, growing piece of text, need to allocate an
// amount of mem on the heap, unknown at compile time, to hold the
// contents. This means:
//
// 1) The memory must be requested from the OS at runtime.
// 2) Need a way of returning the mem to the OS when we're done with
// the allocated string.
//
// First part is done by us: the String::from implementation requests
// the memory it needs from the OS. This is pretty standard for most
// langs.
//
// The second part is different. In langs with GCs, it will keep track
// and clean up mem that isn't used anymore, and the programmer doesn't
// need to think about it. Without a GC, it's the programmer's
// responsibility to know when that memory is no longer being used
// and call code to explicitly return it.
//
// This has historically been a *very* difficult problem to solve.
// If you forget to, we'll waste memory and leak it.
// If we do it too early, we'll have an invalid variable (use after free)
// If we do it twice, that's a bug too.
//
// We need to pair exactly one allocation with one free.
//
// Rust takes its own unique path: the memory is automatically
// returned once the variable that owns it goes out of scope.
// When a variable goes out of scope, Rust calls a special function
// for us. The function is called drop, and it's where the author
// of String can put the code to return the memory. Rust calls
// `drop` automatically at the closing }.
//
// NOTE: C++ calls this pattern of deallocation at the end of its
// lifetime RAII. The drop function in Rust is similar to a dtor
//
// The pattern has a profound impact on the way that Rust code is
// written. Might seem simple, but the behavior of code can be
// unexpected in more complicated situations when we want to
// have multiple variables use the data that's been allocated
// on the heap.
//
// + Ways variables and data interact: Move
// Multiple variables can interact with the same data in different
// ways in rust:
// let x = 5;
// let y = x;
//
// So here, we bind the value of 5 to x, then we make a copy
// of the value in x and bind it to y.
// We now have to vars x and y and both equal 5.
// This is exactly what's happening because integers are simple
// values with a known, fixed size, and these two 5 vals are
// pushed onto the stack.
//
// let a = String::from("hello);
// let b = a;
//
// This looks similar and you'd probably assume it behaves the same
// way; the second would make a copy of the val in a and bind it to b.
// This is not what happens.
//
// Under the covers, a String is actually a type with a few values:
// ptr to some memory holding the string, a length, and a capacity.
// This group is stored on the stack. The length is how much memory
// in bytes the contents of the String is curreently using.
// The capacity is the total amount of memory, in bytes, the String
// has received from the OS. Difference between
|
// stores values in the order it gets them and removes the values in
|
random_line_split
|
|
audition.js
|
success: (res) => {
if(res.data) {
this.setData({ localAudioState: res.data });
for(var idx in res.data.audios) {
switch(idx) {
case 1:
this.setData({
firstFinished: true
});
break;
case 2:
this.setData({
secondFinished: true
});
break;
case 3:
this.setData({
audioCycleEnded: true
});
break;
}
}
for(var idx in res.data.optAudios) {
this.setData({
optRecordFinished: true,
optFinished: true
});
}
for(var idx in res.data.preAudios) {
this.setData({
preAudioFinshed: true
});
}
}
}
});
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
var that = this
this.setData({
options,
isPreview: !!options.isPreview, //试听
})
if (options.mode == 'opt') { //选修
this.setData({ currentStep: 4 });
}
},
doRequest: function(options) {
var that = this
getApp().ready(() => {
console.log('ready')
this.initPageData(_ => {
let { mainEnded, optEnded } = this.data
if (options.main == 'done' && mainEnded) {
let step = 3, optFinished = false
// if (optEnded) {
// step = 4
// optFinished = true
// }
this.setData({
currentStep: step,
audioCycleEnded: true,
optFinished: optFinished
})
}
wx.getStorage({
key: 'semester_detail_' + this.data.paper.semesterId,
success: (res) => {
let current;
if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) {
this.setData({ disabledClassName: '', finished: true });
}
},
})
wx.getStorage({
key: 'paper_' + this.data.paper.id,
success: (res) => {
if (res.data == 'finished') {
this.setData({ preFinished: true })
}
},
})
wx.getStorage({
key: 'optRecord_' + this.data.paper.id,
success: (res) => {
if (res.data) {
this.setData({ optRecordFinished: true })
}
},
})
})
})
wx.getSetting({
success: function (res) {
if (!res.authSetting['scope.userInfo']) {
wx.openSetting({
success: function (res) {
if (res.authSetting['scope.userInfo']) {
that.initPageData()
}
}
})
}
}
})
},
initPageData: function (cb){
var that = this;
wx.showLoading({
title: '加载中',
})
let url = '', method = 'GET', data, header;
if(that.data.options.paperId) {
url = config.service.paperUrl + '/' + (that.data.options.paperId || 4);
} else if (that.data.options.semesterId) {
url = config.service.todayPaperUrl;
method = 'POST';
data = {
openId: getApp().globalData.userInfo.openId,
semesterId: that.data.options.semesterId,
readToday: that.data.options.date || util.getCurrentDate()
}
header = {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
qcloud.request({
url: url,
data: data,
header: header,
method: method,
login: true,
success(result) {
if (result.statusCode != 200) {
let msg = '系统异常,请联系管理员';
if (result.data.code == 4042) {
msg = '该课程已删除';
} else if (result.data.code == 4043) {
msg = '本学期已结束';
}
wx.showModal({
title: '提示',
content: msg,
showCancel: false,
success: function (res) {
if (res.confirm) {
wx.navigateBack({
delta: 1
})
}
}
})
return
}
// 跳转到听写
if (result.data.type == 3) {
wx.redirectTo({
url: '/pages/dictation/dictation?paperId=' + result.data.id
})
return;
}
// 跳转到测试
if (result.data.type == 2) {
wx.redirectTo({
url: '/pages/test/test?paperId=' + result.data.id
})
return;
}
let content = JSON.parse(result.data.content);
content.audios.forEach(audio => {
audio.key = Math.random() * 100000
})
content.optAudios.forEach(audio => {
audio.key = Math.random() * 100000
})
let mainEnded = wx.getStorageSync('paper_' + result.data.id)
let optEnded = wx.getStorageSync('optPaper_' + result.data.id)
that.setData({
paper: result.data,
content: content,
mainEnded: mainEnded,
optEnded: optEnded
})
WxParse.wxParse('original', 'html', content.original, that, 5);
WxParse.wxParse('handout', 'html', content.handout, that, 5);
WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5);
WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5);
if (that.data.currentStep == 1 && content.preAudio) {
util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000)
}
cb && cb()
},
fail(error) {
// util.showModel('请求失败', error)
console.log('request fail', error)
},
complete() {
wx.hideLoading()
}
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 下一步
*/
next: function() {
const step = this.data.currentStep + 1;
this.setData({
currentStep: step,
fixed: true
})
if (this.data.currentStep == 3) {
setTimeout(() => {
util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500)
} ,1000)
}
this.stopAudio()
},
toggle: function(e) {
var key = e.target.dataset.target
this.setData({
[key]: !this.data[key]
})
},
// 本地保存音频状态
saveLocalState: function(idx, audioKey) {
audioKey = audioKey || 'audios';
var key = 'record_' + this.data.paper.id;
wx.getStorage({
key: key,
success: function(res) {
res.data[audioKey] = res.data[audioKey] || {};
res.data[audioKey][idx] = true;
wx.setStorage({
key: key,
data: res.data
})
},
fail: function() {
wx.setStorage({
key: key,
data: {
[audioKey]: {
[idx]: true
}
}
});
}
});
},
/**
* 音频播放结束
*/
onPreAudioEnded: function() {
var firstFinished = !!this.data.content.audios[0].finished
this.saveLocalState(0, 'preAudios');
this.setData({
preAudioFinshed: true,
firstFinished: firstFinished
})
if (!firstFinished) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
},
onAudioEnded: function(e) {
switch (this.data.currentStep) {
|
this.saveLocalState(0);
var hasPreAudio = this.data.content.preAudio
this.data.content.audios[0].finished = true;
var preAudioFinshed = this.data.preAudioFinshed
if ((!hasPreAudio || preAudioFinshed)) {
this.setData({
firstFinished: true
})
}
if(hasPreAudio && !preAudioFinshed) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
break
case 2:
this.saveLocalState(1);
this.setData({
secondFinished: true
})
break
case 4:
this.saveLocalState(0, 'optAudios');
wx.setStorage({
key: 'optRecord_' + this.data.paper.id,
data: true,
})
this.setData({
optRecordFinished: true,
optFinished: true
})
break
}
if(this.data.currentStep == 3) {
this.saveLocalState(2);
return
}
},
onAudioCycleEnded: function(e)
|
case 1:
|
random_line_split
|
audition.js
|
success: (res) => {
if(res.data) {
this.setData({ localAudioState: res.data });
for(var idx in res.data.audios) {
switch(idx) {
case 1:
this.setData({
firstFinished: true
});
break;
case 2:
this.setData({
secondFinished: true
});
break;
case 3:
this.setData({
audioCycleEnded: true
});
break;
}
}
for(var idx in res.data.optAudios) {
this.setData({
optRecordFinished: true,
optFinished: true
});
}
for(var idx in res.data.preAudios) {
this.setData({
preAudioFinshed: true
});
}
}
}
});
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
var that = this
this.setData({
options,
isPreview: !!options.isPreview, //试听
})
if (options.mode == 'opt') { //选修
this.setData({ currentStep: 4 });
}
},
doRequest: function(options) {
var that = this
getApp().ready(() => {
console.log('ready')
this.initPageData(_ => {
let { mainEnded, optEnded } = this.data
if (options.main == 'done' && mainEnded) {
let step = 3, optFinished = false
// if (optEnded) {
// step = 4
// optFinished = true
// }
this.setData({
currentStep: step,
audioCycleEnded: true,
optFinished: optFinished
})
}
wx.getStorage({
key: 'semester_detail_' + this.data.paper.semesterId,
success: (res) => {
let current;
if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) {
this.setData({ disabledClassName: '', finished: true });
}
},
})
wx.getStorage({
key: 'paper_' + this.data.paper.id,
success: (res) => {
if (res.data == 'finished') {
this.setData({ preFinished: true })
}
},
})
wx.getStorage({
key: 'optRecord_' + this.data.paper.id,
success: (res) => {
if (res.data) {
this.setData({ optRecordFinished: true })
}
},
})
})
})
wx.getSetting({
success: function (res) {
if (!res.authSetting['scope.userInfo']) {
wx.openSetting({
success: function (res) {
if (res.authSetting['scope.userInfo']) {
that.initPageData()
}
}
})
}
}
})
},
initPageData: function (cb){
var that = this;
wx.showLoading({
title: '加载中',
})
let url = '', method = 'GET', data, header;
if(that.data.options.paperId) {
url = config.service.paperUrl + '/' + (that.data.options.paperId || 4);
} else if (that.data.options.semesterId) {
url = config.service.todayPaperUrl;
method = 'POST';
data = {
openId: getApp().globalData.userInfo.openId,
semesterId: that.data.options.semesterId,
readToday: that.data.options.date || util.getCurrentDate()
}
header = {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
qcloud.request({
url: url,
data: data,
header: header,
method: method,
login: true,
success(result) {
if (result.statusCode != 200) {
let msg = '系统异常,请联系管理员';
if (result.data.code == 4042) {
msg = '该课程已删除';
} else if (result.data.code == 4043) {
msg = '本学期已结束';
}
wx.showModal({
title: '提示',
content: msg,
showCancel: false,
success: function (res) {
if (res.confirm) {
wx.navigateBack({
delta: 1
})
}
}
})
return
}
// 跳转到听写
if (result.data.type == 3) {
wx.redirectTo({
url: '/pages/dictation/dictation?paperId=' + result.data.id
})
return;
}
// 跳转到测试
if (result.data.type == 2) {
wx.redirectTo({
url: '/pages/test/test?paperId=' + result.data.id
})
return;
}
let content = JSON.parse(result.data.content);
content.audios.forEach(audio => {
audio.key = Math.random() * 100000
})
content.optAudios.forEach(audio => {
audio.key = Math.random() * 100000
})
let mainEnded = wx.getStorageSync('paper_' + result.data.id)
let optEnded = wx.getStorageSync('optPaper_' + result.data.id)
that.setData({
paper: result.data,
content: content,
mainEnded: mainEnded,
optEnded: optEnded
})
WxParse.wxParse('original', 'html', content.original, that, 5);
WxParse.wxParse('handout', 'html', content.handout, that, 5);
WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5);
WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5);
if (that.data.currentStep == 1 && content.preAudio) {
util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000)
}
cb && cb()
},
fail(error) {
// util.showModel('请求失败', error)
console.log('request fail', error)
},
complete() {
wx.hideLoading()
}
})
},
/**
* 生命周期函数--监听页面初次渲
|
*/
onReady: function () {
},
/**
* 下一步
*/
next: function() {
const step = this.data.currentStep + 1;
this.setData({
currentStep: step,
fixed: true
})
if (this.data.currentStep == 3) {
setTimeout(() => {
util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500)
} ,1000)
}
this.stopAudio()
},
toggle: function(e) {
var key = e.target.dataset.target
this.setData({
[key]: !this.data[key]
})
},
// 本地保存音频状态
saveLocalState: function(idx, audioKey) {
audioKey = audioKey || 'audios';
var key = 'record_' + this.data.paper.id;
wx.getStorage({
key: key,
success: function(res) {
res.data[audioKey] = res.data[audioKey] || {};
res.data[audioKey][idx] = true;
wx.setStorage({
key: key,
data: res.data
})
},
fail: function() {
wx.setStorage({
key: key,
data: {
[audioKey]: {
[idx]: true
}
}
});
}
});
},
/**
* 音频播放结束
*/
onPreAudioEnded: function() {
var firstFinished = !!this.data.content.audios[0].finished
this.saveLocalState(0, 'preAudios');
this.setData({
preAudioFinshed: true,
firstFinished: firstFinished
})
if (!firstFinished) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
},
onAudioEnded: function(e) {
switch (this.data.currentStep) {
case 1:
this.saveLocalState(0);
var hasPreAudio = this.data.content.preAudio
this.data.content.audios[0].finished = true;
var preAudioFinshed = this.data.preAudioFinshed
if ((!hasPreAudio || preAudioFinshed)) {
this.setData({
firstFinished: true
})
}
if(hasPreAudio && !preAudioFinshed) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
break
case 2:
this.saveLocalState(1);
this.setData({
secondFinished: true
})
break
case 4:
this.saveLocalState(0, 'optAudios');
wx.setStorage({
key: 'optRecord_' + this.data.paper.id,
data: true,
})
this.setData({
optRecordFinished: true,
optFinished: true
})
break
}
if(this.data.currentStep == 3) {
this.saveLocalState(2);
return
}
},
onAudioCycleEnded: function(e
|
染完成
|
identifier_name
|
audition.js
|
: (res) => {
if(res.data) {
this.setData({ localAudioState: res.data });
for(var idx in res.data.audios) {
switch(idx) {
case 1:
this.setData({
firstFinished: true
});
break;
case 2:
this.setData({
secondFinished: true
});
break;
case 3:
this.setData({
audioCycleEnded: true
});
break;
}
}
for(var idx in res.data.optAudios) {
this.setData({
optRecordFinished: true,
optFinished: true
});
}
for(var idx in res.data.preAudios) {
this.setData({
preAudioFinshed: true
});
}
}
}
});
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
var that = this
this.setData({
options,
isPreview: !!options.isPreview, //试听
})
if (options.mode == 'opt') { //选修
this.setData({ currentStep: 4 });
}
},
doRequest: function(options) {
var that = this
getApp().ready(() => {
console.log('ready')
this.initPageData(_ => {
let { mainEnded, optEnded } = this.data
if (options.main == 'done' && mainEnded) {
let step = 3, optFinished = false
// if (optEnded) {
// step = 4
// optFinished = true
// }
this.setData({
currentStep: step,
audioCycleEnded: true,
optFinished: optFinished
})
}
wx.getStorage({
key: 'semester_detail_' + this.data.paper.semesterId,
success: (res) => {
let current;
if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) {
this.setData({ disabledClassName: '', finished: true });
}
},
})
wx.getStorage({
key: 'paper_' + this.data.paper.id,
success: (res) => {
if (res.data == 'finished') {
this.setData({ preFinished: true })
}
},
})
wx.getStorage({
key: 'optRecord_' + this.data.paper.id,
success: (res) => {
if (res.data) {
this.setData({ optRecordFinished: true })
}
},
})
})
})
wx.getSetting({
success: function (res) {
if (!res.authSetting['scope.userInfo']) {
wx.openSetting({
success: function (res) {
if (res.authSetting['scope.userInfo']) {
that.initPageData()
}
}
})
}
}
})
},
initPageData: function (cb){
var that = this;
wx.showLoading({
title: '加载中',
})
let url = '', method = 'GET', data, header;
if(that.data.options.paperId) {
url = config.service.paperUrl + '/' + (that.data.options.paperId || 4);
} else if (that.data.options.semesterId) {
url = config.service.todayPaperUrl;
method = 'POST';
data = {
openId: getApp().globalData.userInfo.openId,
semesterId: that.data.options.semesterId,
readToday: that.data.options.date || util.getCurrentDate()
}
header = {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
qcloud.request({
url: url,
data: data,
header: header,
method: method,
login: true,
success(result) {
if (result.statusCode != 200) {
let msg = '系统异常,请联系管理员';
if (result.data.code == 4042) {
msg = '该课程已删除';
} else if (result.data.code == 4043) {
msg = '本学期已结束';
}
wx.showModal({
title: '提示',
content: msg,
showCancel: false,
success: function (res) {
if (res.confirm) {
wx.navigateBack({
delta: 1
})
}
}
})
return
}
// 跳转到听写
if (result.data.type == 3) {
wx.redirectTo({
url: '/pages/dictation/dictation?paperId=' + result.data.id
})
return;
}
// 跳转到测试
if (result.data.type == 2) {
wx.redirectTo({
url: '/pages/test/test?paperId=' + result.data.id
})
return;
}
let content = JSON.parse(result.data.content);
content.audios.forEach(audio => {
audio.key = Math.random() * 100000
})
content.optAudios.forEach(audio => {
audio.key = Math.random() * 100000
})
let mainEnded = wx.getStorageSync('paper_' + result.data.id)
let optEnded = wx.getStorageSync('optPaper_' + result.data.id)
that.setData({
paper: result.data,
content: content,
mainEnded: mainEnded,
optEnded: optEnded
})
WxParse.wxParse('original', 'html', content.original, that, 5);
WxParse.wxParse('handout', 'html', content.handout, that, 5);
WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5);
WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5);
if (that.data.currentStep == 1 && content.preAudio) {
util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000)
}
cb && cb()
},
fail(error) {
// util.showModel('请求失败', error)
console.log('request fail', error)
},
complete() {
wx.hideLoading()
}
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 下一步
*/
next: function() {
const step = this.data.currentStep + 1;
t
|
tep,
fixed: true
})
if (this.data.currentStep == 3) {
setTimeout(() => {
util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500)
} ,1000)
}
this.stopAudio()
},
toggle: function(e) {
var key = e.target.dataset.target
this.setData({
[key]: !this.data[key]
})
},
// 本地保存音频状态
saveLocalState: function(idx, audioKey) {
audioKey = audioKey || 'audios';
var key = 'record_' + this.data.paper.id;
wx.getStorage({
key: key,
success: function(res) {
res.data[audioKey] = res.data[audioKey] || {};
res.data[audioKey][idx] = true;
wx.setStorage({
key: key,
data: res.data
})
},
fail: function() {
wx.setStorage({
key: key,
data: {
[audioKey]: {
[idx]: true
}
}
});
}
});
},
/**
* 音频播放结束
*/
onPreAudioEnded: function() {
var firstFinished = !!this.data.content.audios[0].finished
this.saveLocalState(0, 'preAudios');
this.setData({
preAudioFinshed: true,
firstFinished: firstFinished
})
if (!firstFinished) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
},
onAudioEnded: function(e) {
switch (this.data.currentStep) {
case 1:
this.saveLocalState(0);
var hasPreAudio = this.data.content.preAudio
this.data.content.audios[0].finished = true;
var preAudioFinshed = this.data.preAudioFinshed
if ((!hasPreAudio || preAudioFinshed)) {
this.setData({
firstFinished: true
})
}
if(hasPreAudio && !preAudioFinshed) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
break
case 2:
this.saveLocalState(1);
this.setData({
secondFinished: true
})
break
case 4:
this.saveLocalState(0, 'optAudios');
wx.setStorage({
key: 'optRecord_' + this.data.paper.id,
data: true,
})
this.setData({
optRecordFinished: true,
optFinished: true
})
break
}
if(this.data.currentStep == 3) {
this.saveLocalState(2);
return
}
},
onAudioCycleEnded: function(e
|
his.setData({
currentStep: s
|
identifier_body
|
audition.js
|
success: (res) => {
if(res.data) {
this.setData({ localAudioState: res.data });
for(var idx in res.data.audios) {
switch(idx) {
case 1:
this.setData({
firstFinished: true
});
break;
case 2:
this.setData({
secondFinished: true
});
break;
case 3:
this.setData({
audioCycleEnded: true
});
break;
}
}
for(var idx in res.data.optAudios) {
this.setData({
optRecordFinished: true,
optFinished: true
});
}
for(var idx in res.data.preAudios) {
this.setData({
preAudioFinshed: true
});
}
}
}
});
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
var that = this
this.setData({
options,
isPreview: !!options.isPreview, //试听
})
if (options.mode == 'opt') { //选修
this.setData({ currentStep: 4 });
}
},
doRequest: function(options) {
var that = this
getApp().ready(() => {
console.log('ready')
this.initPageData(_ => {
let { mainEnded, optEnded } = this.data
if (options.main == 'done' && mainEnded) {
let step = 3, optFinished = false
// if (optEnded) {
// step = 4
// optFinished = true
// }
this.setData({
currentStep: step,
audioCycleEnded: true,
optFinished: optFinished
})
}
wx.getStorage({
key: 'semester_detail_' + this.data.paper.semesterId,
success: (res) => {
let current;
if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) {
this.setData({ disabledClassName: '', finished: true });
}
},
})
wx.getStorage({
key: 'paper_' + this.data.paper.id,
success: (res) => {
if (res.data == 'finished') {
this.setData({ preFinished: true })
}
},
})
wx.getStorage({
key: 'optRecord_' + this.data.paper.id,
success: (res) => {
if (res.data) {
this.setData({ optRecordFinished: true })
}
},
})
})
})
wx.getSetting({
success: function (res) {
if (!res.authSetting['scope.userInfo']) {
wx.openSetting({
success: function (res) {
if (res.authSetting['scope.userInfo']) {
that.initPageData()
}
}
})
}
}
})
},
initPageData: function (cb){
var that = this;
wx.showLoading({
title: '加载中',
})
let url = '', method = 'GET', data, header;
if(that.data.options.paperId) {
url = config.service.paperUrl + '/' + (that.data.options.paperId || 4);
} else if (that.data.options.semesterId) {
url = config.service.todayPaperUrl;
method = 'POST';
data = {
openId: getApp().globalData.userInfo.openId,
semesterId: that.data.options.semesterId,
readToday: that.data.options.date || util.getCurrentDate()
}
header = {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
qcloud.request({
url: url,
data: data,
header: header,
method: method,
login: true,
success(result) {
if (result.statusCode != 200) {
let msg = '系统异常,请联系管理员';
if (result.data.code == 4042) {
msg = '该课程已删除';
} else if (result.data.code == 4043) {
msg = '本学期已结束';
}
wx.showModal({
title: '提示',
content: msg,
showCancel: false,
success: function (res) {
if (res.confirm) {
wx.navigateBack({
delta: 1
})
}
}
})
return
}
// 跳转到听写
if (result.data.type == 3) {
wx.redirectTo({
url: '/pages/dictation/dictation?paperId=' + result.data.id
})
return;
}
// 跳转到测试
if (result.data.type == 2) {
wx.redirectTo({
url: '/pages/test/test?paperId=' + result.data.id
})
return;
}
let content = JSON.parse(result.data.content);
content.audios.forEach(audio => {
audio.key = Math.random() * 100000
})
content.optAudios.forEach(audio => {
audio.key = Math.random() * 100000
})
let mainEnded = wx.getStorageSync('paper_' + result.data.id)
let optEnded = wx.getStorageSync('optPaper_' + result.data.id)
that.setData({
paper: result.data,
content: content,
mainEnded: mainEnded,
optEnded: optEnded
})
WxParse.wxParse('original', 'html', content.original, that, 5);
WxParse.wxParse('handout', 'html', content.handout, that, 5);
WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5);
WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5);
if (that.data.currentStep == 1 && content.preAudio) {
util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000)
}
cb && cb()
},
fail(error) {
// util.showModel('请求失败', error)
console.log('request fail', error)
},
complete() {
wx.hideLoading()
}
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 下一步
*/
next: function() {
const step = this.data.currentStep + 1;
this.setData({
currentStep: step,
fixed: true
})
if (this.data.currentStep == 3) {
setTimeout(() => {
util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500)
} ,1000)
}
this.stopAudio()
},
toggle: function(e) {
var key = e.target.dataset.target
this.setData({
[key]: !this.data[key]
})
},
// 本地保存音频状态
saveLocalState: function(idx, audioKey) {
audioKey = audioKey || 'audios';
var key = 'record_' + this.data.paper.id;
wx.getStorage({
key: key,
success: function(res) {
res.data[audioKey] = res.data[audioKey] || {};
res.data[audioKey][idx] = true;
wx.setStorage({
key: key,
data: res.data
})
},
fail: function() {
wx.setStorage({
key: key,
data: {
[audioKey]: {
[idx]: true
}
}
});
}
});
},
/**
* 音频播放结束
*/
onPreAudioEnded: function() {
var firstFinished = !!this.data.content.audios[0].finished
this.saveLocalState(0, 'preAudios');
this.setData({
preAudioFinshed: true,
firstFinished: firstFinished
})
if (!firstFinished) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
},
onAudioEnded: function(e) {
switch (this.data.currentStep) {
case 1:
this.saveLocalState(0);
var hasPreAudio = this.data.content.preAudio
this.data.content.audios[0].finished = true;
var preAudioFinshed = this.data.preAudioFinshed
if ((!hasPreAudio || preAudioFinshed)) {
this.setData({
firstFinished: true
})
}
if(hasPreAudio && !preAudioFinshed) {
util.showToast("Tout écouter pour passer à l’étape suivante.", 3000)
}
break
case 2:
this.saveLocalState(1);
|
case 4:
this.saveLocalState(0, 'optAudios');
wx.setStorage({
key: 'optRecord_' + this.data.paper.id,
data: true,
})
this.setData({
optRecordFinished: true,
optFinished: true
})
break
}
if(this.data.currentStep == 3) {
this.saveLocalState(2);
return
}
},
onAudioCycleEnded: function(e
|
this.setData({
secondFinished: true
})
break
|
conditional_block
|
Minitaur_Env.py
|
numHeightfieldColumns=numHeightfieldColumns)
textureId = p.loadTexture("heightmaps/wm_height_out.png")
terrain = p.createMultiBody(0, terrainShape)
# p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1])
p.changeVisualShape(terrain, -1, textureUniqueId = textureId)
# Remove the previous terrain and establish the new one
# Note: first time this function is called, the default terrain of minitaur_env
# which is plane ground is removed. Subsequent calls remove the previous heightfield
self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id)
self.minitaur_env.ground_id = terrain
return terrain
def is_fallen(self):
"""Decide whether the minitaur has fallen.
If the up directions between the base and the world is larger (the dot
product is smaller than 0.5), the minitaur is considered fallen.
Returns:
Boolean value that indicates whether the minitaur has fallen.
"""
orientation = self.minitaur_env.minitaur.GetBaseOrientation()
rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation)
local_up = rot_mat[6:]
return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3)
def generate_steps(self, numObs=25):
p = self.p
numObs *= 2
linkMasses = [None] * (numObs)
colIdxs = [None] * (numObs)
visIdxs = [None] * (numObs)
posObs = [None] * (numObs)
orientObs = [None] * (numObs)
parentIdxs = [None] * (numObs)
linkInertialFramePositions = [None] * (numObs)
linkInertialFrameOrientations = [None] * (numObs)
linkJointTypes = [None] * (numObs)
linkJointAxis = [None] * (numObs)
for obs in range(numObs):
linkMasses[obs] = 0.0
parentIdxs[obs] = 0
linkInertialFramePositions[obs] = [0, 0, 0]
linkInertialFrameOrientations[obs] = [0, 0, 0, 1]
linkJointTypes[obs] = p.JOINT_FIXED
linkJointAxis[obs] = np.array([0, 0, 1])
orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0])
posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs)
obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0],
baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0],
baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses,
linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs,
linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs,
linkInertialFramePositions=linkInertialFramePositions,
linkInertialFrameOrientations=linkInertialFrameOrientations,
linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis)
for obs in range(numObs):
p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId)
x_goal = self.goal
y_goal = 0
posObs = np.array([None] * 3)
posObs[0] = x_goal
posObs[1] = y_goal
posObs[2] = 0 # set z at ground level
# colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1])
colIdxs = -1
visIdxs = p.createVisualShape(p.GEOM_BOX,
halfExtents=[0.05,5.0,0.15],
rgbaColor=[0.7, 0, 0, 1])
linkMasses = 0.0
parentIdxs = 0
linkInertialFramePositions = [0, 0, 0]
linkInertialFrameOrientations = [0, 0, 0, 1]
linkJointTypes = p.JOINT_FIXED
linkJointAxis = np.array([0, 0, 1])
orientObs = p.getQuaternionFromEuler([0., 0., 0.])
p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#,
return obsUid
def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs):
visIdxs = [None]*numObs
for obs in range(int(numObs/2)): # Cylindrical obstacles
posObs_obs1 = [None] * 3
posObs_obs2 = [None] * 3
theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3)
l1 = 0.5
l2 = l1
theta_rotate = theta
h = l1 * np.sin(np.pi/4 - theta) /(2**0.5)
d = (2**0.5) * l1 * np.cos(np.pi/4 + theta)
halfExtents = [l1/2,5.0,l2/2]
x_temp = 0.5 + obs*l1
y_temp = 0
posObs_obs1[0] = x_temp
posObs_obs1[1] = y_temp
posObs_obs1[2] = -h # set z at ground level
posObs[2*obs] = posObs_obs1
colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0])
visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
# print(posObs_obs[0])
posObs_obs2[0] = x_temp + d
# print(posObs_obs[0])
posObs_obs2[1] = y_temp
posObs_obs2[2] = -h # set z at ground level
posObs[2*obs+1] = posObs_obs2
colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0])
visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
return posObs, orientObs, colIdxs, visIdxs
# def _get_bounding_amplitude(self, prim):
# return self.prim_lib[prim]
def execute_policy(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50,
image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0):
if record_vid:
import cv2
# videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi
|
'''Generate a heightfield.
Resource: https://github.com/bulletphysics/bullet3/blob/master/examples/pybullet/examples/heightfield.py'''
p = self.p
numHeightfieldRows = num_rows
numHeightfieldColumns = num_rows
heightfieldData = [0]*numHeightfieldRows*numHeightfieldColumns
for i in range(numHeightfieldRows*numHeightfieldColumns):
# heightfieldData[i] = 0.1
if (i%2)==0:
heightfieldData[i] = np.random.uniform(self.h_lim[0],self.h_lim[1])
else:
heightfieldData[i] = 0
terrainShape = p.createCollisionShape(shapeType = p.GEOM_HEIGHTFIELD,
flags = p.GEOM_CONCAVE_INTERNAL_EDGE, # this flag ensures foot does not get stuck
meshScale=[1,1,1],
heightfieldTextureScaling=(numHeightfieldRows-1)/2,
heightfieldData=heightfieldData,
numHeightfieldRows=numHeightfieldRows,
|
identifier_body
|
|
Minitaur_Env.py
|
0,l2/2]
x_temp = 0.5 + obs*l1
y_temp = 0
posObs_obs1[0] = x_temp
posObs_obs1[1] = y_temp
posObs_obs1[2] = -h # set z at ground level
posObs[2*obs] = posObs_obs1
colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0])
visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
# print(posObs_obs[0])
posObs_obs2[0] = x_temp + d
# print(posObs_obs[0])
posObs_obs2[1] = y_temp
posObs_obs2[2] = -h # set z at ground level
posObs[2*obs+1] = posObs_obs2
colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0])
visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
return posObs, orientObs, colIdxs, visIdxs
# def _get_bounding_amplitude(self, prim):
# return self.prim_lib[prim]
def execute_policy(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50,
image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0):
if record_vid:
import cv2
# videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'),
# 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT))
videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'),
50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT))
t_flag = 0
goal_cost = np.zeros(1)
coll_cost = np.ones(1)
cost = goal_cost + coll_cost
total_time_steps = comp_len * prim_horizon
# baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0],
# baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses,
# linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs,
# linkPositions=posObs)
# , linkOrientations=orientObs, linkParentIndices=parentIdxs,
# linkInertialFramePositions=linkInertialFramePositions,
# linkInertialFrameOrientations=linkInertialFrameOrientations,
# linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis)
for i in range(5):
action = [0,0,0,0,0,0,0,0]
self.minitaur_env.step(action)
for i in range(comp_len):
# Get current depth map
cam_pos = list(self.minitaur_env.minitaur.GetBasePosition())
cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation())
_, depth = self._mount_cam(cam_pos, cam_orn)
# Decide primitive from the policy
depth = torch.Tensor(depth).view([1, 1, image_size, image_size])
assert depth.nelement()!=0,"Tensor is empty."
motor_angles = torch.Tensor(self.minitaur_env.minitaur.GetMotorAngles()).view([1,8]).detach()
motor_velocities = torch.Tensor(self.minitaur_env.minitaur.GetMotorVelocities()).view([1,8]).detach()
base_pos = torch.Tensor(self.minitaur_env.minitaur.GetBasePosition()).view([1,3]).detach()
base_orn = torch.Tensor(self.minitaur_env.minitaur.GetBaseRollPitchYaw()).view([1,3]).detach()
control_params = policy(depth.to(device),
motor_angles.to(device),
motor_velocities.to(device),
base_pos.to(device),
base_orn.to(device))[0]
amplitude1 = (control_params[0].item()*0.8)+0.2
# amplitude1 = torch.clamp(control_params[0], min=0.2, max=1.0).item()
amplitude2 = (control_params[1].item()*0.8)+0.2
# amplitude2 = torch.clamp(control_params[0], min=0.2, max=1.0).item()
steering_amplitude = torch.clamp(control_params[2], min=0.0, max=min(1-amplitude1, 1-amplitude2)).item()
# phase1 = control_params[3].item() * math.pi
# phase2 = control_params[4].item() * math.pi
speed = control_params[3].item()*20 + 20
for step_counter in range(prim_horizon):
t = step_counter * time_step + t_flag
# if t>4.1:
# import matplotlib.pyplot as plt
# cam_pos = list(self.minitaur_env.minitaur.GetBasePosition())
# cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation())
# rgb, _ = self._mount_cam(cam_pos, cam_orn, w=500, h=500)
# fig = plt.figure()
# ax = plt.subplot(111)
# ax.set_yticklabels([])
# ax.set_xticklabels([])
# plt.imshow(rgb, cmap='gray', interpolation='nearest')
# plt.savefig('minitaur_rgb_view.png')
# time.sleep(600)
# amplitude1 = 0.5
# amplitude2 = 0.5
# steering_amplitude = 0.0
# speed = 50
if record_vid:
rgb = self.minitaur_env.render()
cv2.imshow('Vis_Vid_Rec', cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR))
cv2.waitKey(1)
videoObj.write(cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR))
phase1 = math.pi
phase2 = phase1
# Applying asymmetrical sine gaits to different legs can steer the minitaur.
a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude)
a2 = math.sin(t * speed + phase1) * (amplitude1 - steering_amplitude)
a3 = math.sin(t * speed) * amplitude2
a4 = math.sin(t * speed + phase2) * amplitude2
action = [a1, a2, a2, a1, a3, a4, a4, a3]
self.minitaur_env.step(action)
# Compute costs
# rob_pos = np.array(cam_pos[0:2])
rob_pos = cam_pos[0]
# goal_cost = np.linalg.norm(rob_pos-goal, ord=2)/10
goal_cost = np.abs(rob_pos-goal)/goal
fall_cost = 1 - (step_counter + i*prim_horizon)/(total_time_steps-1)
cost = alpha * fall_cost + (1-alpha) * goal_cost
end_position = self.minitaur_env.minitaur.GetBasePosition()
end_position = end_position[0]
if self.is_fallen():
if record_vid:
videoObj.release()
return cost, fall_cost, goal_cost, end_position
if end_position>goal:
goal_cost = 0.
cost = alpha * fall_cost + (1-alpha) * goal_cost
if record_vid:
videoObj.release()
return cost, fall_cost, goal_cost, end_position
end_position = self.minitaur_env.minitaur.GetBasePosition()
end_position = end_position[0]
# print(end_position)
# print("Speed:", np.linalg.norm(start_position-end_position)/(prim_horizon*time_step))
t_flag += prim_horizon*time_step
if record_vid:
videoObj.release()
return cost, fall_cost, goal_cost, end_position
def _mount_cam(self, base_p, base_o, w=50, h=50):
'''
Mounts an RGB-D camera on a robot in pybullet
|
Parameters
|
random_line_split
|
|
Minitaur_Env.py
|
= p.createMultiBody(0, terrainShape)
# p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1])
p.changeVisualShape(terrain, -1, textureUniqueId = textureId)
# Remove the previous terrain and establish the new one
# Note: first time this function is called, the default terrain of minitaur_env
# which is plane ground is removed. Subsequent calls remove the previous heightfield
self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id)
self.minitaur_env.ground_id = terrain
return terrain
def is_fallen(self):
"""Decide whether the minitaur has fallen.
If the up directions between the base and the world is larger (the dot
product is smaller than 0.5), the minitaur is considered fallen.
Returns:
Boolean value that indicates whether the minitaur has fallen.
"""
orientation = self.minitaur_env.minitaur.GetBaseOrientation()
rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation)
local_up = rot_mat[6:]
return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3)
def generate_steps(self, numObs=25):
p = self.p
numObs *= 2
linkMasses = [None] * (numObs)
colIdxs = [None] * (numObs)
visIdxs = [None] * (numObs)
posObs = [None] * (numObs)
orientObs = [None] * (numObs)
parentIdxs = [None] * (numObs)
linkInertialFramePositions = [None] * (numObs)
linkInertialFrameOrientations = [None] * (numObs)
linkJointTypes = [None] * (numObs)
linkJointAxis = [None] * (numObs)
for obs in range(numObs):
linkMasses[obs] = 0.0
parentIdxs[obs] = 0
linkInertialFramePositions[obs] = [0, 0, 0]
linkInertialFrameOrientations[obs] = [0, 0, 0, 1]
linkJointTypes[obs] = p.JOINT_FIXED
linkJointAxis[obs] = np.array([0, 0, 1])
orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0])
posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs)
obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0],
baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0],
baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses,
linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs,
linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs,
linkInertialFramePositions=linkInertialFramePositions,
linkInertialFrameOrientations=linkInertialFrameOrientations,
linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis)
for obs in range(numObs):
p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId)
x_goal = self.goal
y_goal = 0
posObs = np.array([None] * 3)
posObs[0] = x_goal
posObs[1] = y_goal
posObs[2] = 0 # set z at ground level
# colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1])
colIdxs = -1
visIdxs = p.createVisualShape(p.GEOM_BOX,
halfExtents=[0.05,5.0,0.15],
rgbaColor=[0.7, 0, 0, 1])
linkMasses = 0.0
parentIdxs = 0
linkInertialFramePositions = [0, 0, 0]
linkInertialFrameOrientations = [0, 0, 0, 1]
linkJointTypes = p.JOINT_FIXED
linkJointAxis = np.array([0, 0, 1])
orientObs = p.getQuaternionFromEuler([0., 0., 0.])
p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#,
return obsUid
def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs):
visIdxs = [None]*numObs
for obs in range(int(numObs/2)): # Cylindrical obstacles
posObs_obs1 = [None] * 3
posObs_obs2 = [None] * 3
theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3)
l1 = 0.5
l2 = l1
theta_rotate = theta
h = l1 * np.sin(np.pi/4 - theta) /(2**0.5)
d = (2**0.5) * l1 * np.cos(np.pi/4 + theta)
halfExtents = [l1/2,5.0,l2/2]
x_temp = 0.5 + obs*l1
y_temp = 0
posObs_obs1[0] = x_temp
posObs_obs1[1] = y_temp
posObs_obs1[2] = -h # set z at ground level
posObs[2*obs] = posObs_obs1
colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0])
visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
# print(posObs_obs[0])
posObs_obs2[0] = x_temp + d
# print(posObs_obs[0])
posObs_obs2[1] = y_temp
posObs_obs2[2] = -h # set z at ground level
posObs[2*obs+1] = posObs_obs2
colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0])
visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
return posObs, orientObs, colIdxs, visIdxs
# def _get_bounding_amplitude(self, prim):
# return self.prim_lib[prim]
def
|
(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50,
image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0):
if record_vid:
import cv2
# videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'),
# 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT))
videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'),
50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT))
t_flag = 0
goal_cost = np.zeros(1)
coll_cost = np.ones(1)
cost = goal_cost + coll_cost
total_time_steps = comp_len * prim_horizon
# baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0],
# baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses,
# linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs,
# linkPositions=posObs)
# , linkOrientations=orientObs, linkParentIndices=parentIdx
|
execute_policy
|
identifier_name
|
Minitaur_Env.py
|
= p.createMultiBody(0, terrainShape)
# p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1])
p.changeVisualShape(terrain, -1, textureUniqueId = textureId)
# Remove the previous terrain and establish the new one
# Note: first time this function is called, the default terrain of minitaur_env
# which is plane ground is removed. Subsequent calls remove the previous heightfield
self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id)
self.minitaur_env.ground_id = terrain
return terrain
def is_fallen(self):
"""Decide whether the minitaur has fallen.
If the up directions between the base and the world is larger (the dot
product is smaller than 0.5), the minitaur is considered fallen.
Returns:
Boolean value that indicates whether the minitaur has fallen.
"""
orientation = self.minitaur_env.minitaur.GetBaseOrientation()
rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation)
local_up = rot_mat[6:]
return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3)
def generate_steps(self, numObs=25):
p = self.p
numObs *= 2
linkMasses = [None] * (numObs)
colIdxs = [None] * (numObs)
visIdxs = [None] * (numObs)
posObs = [None] * (numObs)
orientObs = [None] * (numObs)
parentIdxs = [None] * (numObs)
linkInertialFramePositions = [None] * (numObs)
linkInertialFrameOrientations = [None] * (numObs)
linkJointTypes = [None] * (numObs)
linkJointAxis = [None] * (numObs)
for obs in range(numObs):
linkMasses[obs] = 0.0
parentIdxs[obs] = 0
linkInertialFramePositions[obs] = [0, 0, 0]
linkInertialFrameOrientations[obs] = [0, 0, 0, 1]
linkJointTypes[obs] = p.JOINT_FIXED
linkJointAxis[obs] = np.array([0, 0, 1])
orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0])
posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs)
obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0],
baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0],
baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses,
linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs,
linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs,
linkInertialFramePositions=linkInertialFramePositions,
linkInertialFrameOrientations=linkInertialFrameOrientations,
linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis)
for obs in range(numObs):
|
x_goal = self.goal
y_goal = 0
posObs = np.array([None] * 3)
posObs[0] = x_goal
posObs[1] = y_goal
posObs[2] = 0 # set z at ground level
# colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1])
colIdxs = -1
visIdxs = p.createVisualShape(p.GEOM_BOX,
halfExtents=[0.05,5.0,0.15],
rgbaColor=[0.7, 0, 0, 1])
linkMasses = 0.0
parentIdxs = 0
linkInertialFramePositions = [0, 0, 0]
linkInertialFrameOrientations = [0, 0, 0, 1]
linkJointTypes = p.JOINT_FIXED
linkJointAxis = np.array([0, 0, 1])
orientObs = p.getQuaternionFromEuler([0., 0., 0.])
p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#,
return obsUid
def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs):
visIdxs = [None]*numObs
for obs in range(int(numObs/2)): # Cylindrical obstacles
posObs_obs1 = [None] * 3
posObs_obs2 = [None] * 3
theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3)
l1 = 0.5
l2 = l1
theta_rotate = theta
h = l1 * np.sin(np.pi/4 - theta) /(2**0.5)
d = (2**0.5) * l1 * np.cos(np.pi/4 + theta)
halfExtents = [l1/2,5.0,l2/2]
x_temp = 0.5 + obs*l1
y_temp = 0
posObs_obs1[0] = x_temp
posObs_obs1[1] = y_temp
posObs_obs1[2] = -h # set z at ground level
posObs[2*obs] = posObs_obs1
colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0])
visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
# print(posObs_obs[0])
posObs_obs2[0] = x_temp + d
# print(posObs_obs[0])
posObs_obs2[1] = y_temp
posObs_obs2[2] = -h # set z at ground level
posObs[2*obs+1] = posObs_obs2
colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents)
orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0])
visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX,
halfExtents=halfExtents,)
return posObs, orientObs, colIdxs, visIdxs
# def _get_bounding_amplitude(self, prim):
# return self.prim_lib[prim]
def execute_policy(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50,
image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0):
if record_vid:
import cv2
# videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'),
# 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT))
videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'),
50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT))
t_flag = 0
goal_cost = np.zeros(1)
coll_cost = np.ones(1)
cost = goal_cost + coll_cost
total_time_steps = comp_len * prim_horizon
# baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0],
# baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses,
# linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs,
# linkPositions=posObs)
# , linkOrientations=orientObs, linkParentIndices=parentIdx
|
p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId)
|
conditional_block
|
content-parse.ts
|
96;')
const classes = lang ? ` class="language-${lang}"` : ''
return `><pre><code${classes}>${code}</code></pre>`
})
.replace(/`([^`\n]*)`/g, (_1, raw) => {
return raw ? `<code>${htmlToText(raw).replace(/</g, '<').replace(/>/g, '>')}</code>` : ''
})
}
// Always sanitize the raw HTML data *after* it has been modified
const transforms: Transform[] = [
sanitizer,
...options.astTransforms || [],
]
if (hideEmojis) {
transforms.push(removeUnicodeEmoji)
transforms.push(removeCustomEmoji(options.emojis ?? {}))
}
else {
if (replaceUnicodeEmoji)
transforms.push(transformUnicodeEmoji)
transforms.push(replaceCustomEmoji(options.emojis ?? {}))
}
if (markdown)
transforms.push(transformMarkdown)
if (mentions?.length)
transforms.push(createTransformNamedMentions(mentions))
if (convertMentionLink)
transforms.push(transformMentionLink)
transforms.push(transformParagraphs)
if (collapseMentionLink)
transforms.push(transformCollapseMentions(status, inReplyToStatus))
return transformSync(parse(html), transforms)
}
/**
* Converts raw HTML form Mastodon server to HTML for Tiptap editor
*/
export function convertMastodonHTML(html: string, customEmojis: Record<string, mastodon.v1.CustomEmoji> = {}) {
const tree = parseMastodonHTML(html, {
emojis: customEmojis,
markdown: true,
convertMentionLink: true,
})
return render(tree)
}
export function htmlToText(html: string) {
try {
const tree = parse(html)
return (tree.children as Node[]).map(n => treeToText(n)).join('').trim()
}
catch (err) {
console.error(err)
return ''
}
}
export function recursiveTreeToText(input: Node): string {
if (input && input.children && input.children.length > 0)
return input.children.map((n: Node) => recursiveTreeToText(n)).join('')
else
return treeToText(input)
}
const emojiIdNeedsWrappingRE = /^(\d|\w|-|_)+$/
export function treeToText(input: Node): string {
let pre = ''
let body = ''
let post = ''
if (input.type === TEXT_NODE)
return decode(input.value)
if (input.name === 'br')
return '\n'
if (['p', 'pre'].includes(input.name))
pre = '\n'
if (input.attributes?.['data-type'] === 'mention') {
const acct = input.attributes['data-id']
if (acct)
return acct.startsWith('@') ? acct : `@${acct}`
}
if (input.name === 'code') {
if (input.parent?.name === 'pre') {
const lang = input.attributes.class?.replace('language-', '')
pre = `\`\`\`${lang || ''}\n`
post = '\n```'
}
else {
pre = '`'
post = '`'
}
}
else if (input.name === 'b' || input.name === 'strong') {
pre = '**'
post = '**'
}
else if (input.name === 'i' || input.name === 'em') {
pre = '*'
post = '*'
}
else if (input.name === 'del') {
pre = '~~'
post = '~~'
}
if ('children' in input)
body = (input.children as Node[]).map(n => treeToText(n)).join('')
if (input.name === 'img' || input.name === 'picture') {
if (input.attributes.class?.includes('custom-emoji')) {
const id = input.attributes['data-emoji-id'] ?? input.attributes.alt ?? input.attributes.title ?? 'unknown'
return id.match(emojiIdNeedsWrappingRE) ? `:${id}:` : id
}
if (input.attributes.class?.includes('iconify-emoji'))
return input.attributes.alt
}
return pre + body + post
}
// A tree transform function takes an ultrahtml Node object and returns
// new content that will replace the given node in the tree.
// Returning a null removes the node from the tree.
// Strings get converted to text nodes.
// The input node's children have been transformed before the node itself
// gets transformed.
type Transform = (node: Node, root: Node) => (Node | string)[] | Node | string | null
// Helpers for transforming (filtering, modifying, ...) a parsed HTML tree
// by running the given chain of transform functions one-by-one.
function transformSync(doc: Node, transforms: Transform[]) {
function
|
(node: Node, transform: Transform, root: Node) {
if (Array.isArray(node.children)) {
const children = [] as (Node | string)[]
for (let i = 0; i < node.children.length; i++) {
const result = visit(node.children[i], transform, root)
if (Array.isArray(result))
children.push(...result)
else if (result)
children.push(result)
}
node.children = children.map((value) => {
if (typeof value === 'string')
return { type: TEXT_NODE, value, parent: node }
value.parent = node
return value
})
}
return transform(node, root)
}
for (const transform of transforms)
doc = visit(doc, transform, doc) as Node
return doc
}
// A tree transform for sanitizing elements & their attributes.
type AttrSanitizers = Record<string, (value: string | undefined) => string | undefined>
function sanitize(allowedElements: Record<string, AttrSanitizers>): Transform {
return (node) => {
if (node.type !== ELEMENT_NODE)
return node
if (!Object.prototype.hasOwnProperty.call(allowedElements, node.name))
return null
const attrSanitizers = allowedElements[node.name]
const attrs = {} as Record<string, string>
for (const [name, func] of Object.entries(attrSanitizers)) {
const value = func(node.attributes[name])
if (value !== undefined)
attrs[name] = value
}
node.attributes = attrs
return node
}
}
function filterClasses(allowed: RegExp) {
return (c: string | undefined) => {
if (!c)
return undefined
return c.split(/\s/g).filter(cls => allowed.test(cls)).join(' ')
}
}
function keep(value: string | undefined) {
return value
}
function set(value: string) {
return () => value
}
function filterHref() {
const LINK_PROTOCOLS = new Set([
'http:',
'https:',
'dat:',
'dweb:',
'ipfs:',
'ipns:',
'ssb:',
'gopher:',
'xmpp:',
'magnet:',
'gemini:',
])
return (href: string | undefined) => {
if (href === undefined)
return undefined
// Allow relative links
if (href.startsWith('/') || href.startsWith('.'))
return href
let url
try {
url = new URL(href)
}
catch (err) {
if (err instanceof TypeError)
return undefined
throw err
}
if (LINK_PROTOCOLS.has(url.protocol))
return url.toString()
return '#'
}
}
function removeUnicodeEmoji(node: Node) {
if (node.type !== TEXT_NODE)
return node
let start = 0
const matches = [] as (string | Node)[]
findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => {
matches.push(result.slice(start).trimEnd())
start = result.length + match.match.length
return undefined
})
if (matches.length === 0)
return node
matches.push(node.value.slice(start))
return matches.filter(Boolean)
}
function transformUnicodeEmoji(node: Node) {
if (node.type !== TEXT_NODE)
return node
let start = 0
const matches = [] as (string | Node)[]
findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => {
const attrs = getEmojiAttributes(match)
matches.push(result.slice(start))
matches.push(h('img', { src: attrs.src, alt: attrs.alt, class: attrs.class }))
start = result.length + match.match.length
return undefined
})
if (matches.length === 0)
return node
matches.push(node.value.slice(start))
return matches.filter(Boolean)
}
function removeCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform {
return (node) => {
if (node.type !== TEXT_NODE)
return node
const split = node.value.split(/\s?:([\w-]+?):/g)
if (split.length === 1)
return node
return split.map((name, i) => {
if (i % 2 === 0)
return name
const emoji = customEmojis[name] as mastodon.v1.CustomEmoji
if (!emoji)
return `:${name}:`
return ''
}).filter(Boolean)
}
}
|
visit
|
identifier_name
|
content-parse.ts
|
96;')
const classes = lang ? ` class="language-${lang}"` : ''
return `><pre><code${classes}>${code}</code></pre>`
})
.replace(/`([^`\n]*)`/g, (_1, raw) => {
return raw ? `<code>${htmlToText(raw).replace(/</g, '<').replace(/>/g, '>')}</code>` : ''
})
}
// Always sanitize the raw HTML data *after* it has been modified
const transforms: Transform[] = [
sanitizer,
...options.astTransforms || [],
]
if (hideEmojis) {
transforms.push(removeUnicodeEmoji)
transforms.push(removeCustomEmoji(options.emojis ?? {}))
}
else {
if (replaceUnicodeEmoji)
transforms.push(transformUnicodeEmoji)
transforms.push(replaceCustomEmoji(options.emojis ?? {}))
}
if (markdown)
transforms.push(transformMarkdown)
if (mentions?.length)
transforms.push(createTransformNamedMentions(mentions))
if (convertMentionLink)
transforms.push(transformMentionLink)
transforms.push(transformParagraphs)
if (collapseMentionLink)
transforms.push(transformCollapseMentions(status, inReplyToStatus))
return transformSync(parse(html), transforms)
}
/**
* Converts raw HTML form Mastodon server to HTML for Tiptap editor
*/
export function convertMastodonHTML(html: string, customEmojis: Record<string, mastodon.v1.CustomEmoji> = {}) {
const tree = parseMastodonHTML(html, {
emojis: customEmojis,
markdown: true,
convertMentionLink: true,
})
return render(tree)
}
export function htmlToText(html: string) {
try {
const tree = parse(html)
return (tree.children as Node[]).map(n => treeToText(n)).join('').trim()
}
catch (err) {
console.error(err)
return ''
}
}
export function recursiveTreeToText(input: Node): string {
if (input && input.children && input.children.length > 0)
return input.children.map((n: Node) => recursiveTreeToText(n)).join('')
else
return treeToText(input)
}
const emojiIdNeedsWrappingRE = /^(\d|\w|-|_)+$/
export function treeToText(input: Node): string {
let pre = ''
let body = ''
let post = ''
if (input.type === TEXT_NODE)
return decode(input.value)
if (input.name === 'br')
return '\n'
if (['p', 'pre'].includes(input.name))
pre = '\n'
if (input.attributes?.['data-type'] === 'mention') {
const acct = input.attributes['data-id']
if (acct)
return acct.startsWith('@') ? acct : `@${acct}`
}
if (input.name === 'code') {
if (input.parent?.name === 'pre') {
const lang = input.attributes.class?.replace('language-', '')
pre = `\`\`\`${lang || ''}\n`
post = '\n```'
}
else {
pre = '`'
post = '`'
}
}
else if (input.name === 'b' || input.name === 'strong') {
pre = '**'
post = '**'
}
else if (input.name === 'i' || input.name === 'em') {
pre = '*'
post = '*'
}
else if (input.name === 'del') {
pre = '~~'
post = '~~'
}
if ('children' in input)
body = (input.children as Node[]).map(n => treeToText(n)).join('')
if (input.name === 'img' || input.name === 'picture') {
if (input.attributes.class?.includes('custom-emoji')) {
const id = input.attributes['data-emoji-id'] ?? input.attributes.alt ?? input.attributes.title ?? 'unknown'
return id.match(emojiIdNeedsWrappingRE) ? `:${id}:` : id
}
if (input.attributes.class?.includes('iconify-emoji'))
return input.attributes.alt
}
return pre + body + post
}
// A tree transform function takes an ultrahtml Node object and returns
// new content that will replace the given node in the tree.
// Returning a null removes the node from the tree.
// Strings get converted to text nodes.
// The input node's children have been transformed before the node itself
// gets transformed.
type Transform = (node: Node, root: Node) => (Node | string)[] | Node | string | null
// Helpers for transforming (filtering, modifying, ...) a parsed HTML tree
// by running the given chain of transform functions one-by-one.
function transformSync(doc: Node, transforms: Transform[]) {
function visit(node: Node, transform: Transform, root: Node) {
if (Array.isArray(node.children)) {
const children = [] as (Node | string)[]
for (let i = 0; i < node.children.length; i++) {
const result = visit(node.children[i], transform, root)
if (Array.isArray(result))
children.push(...result)
else if (result)
children.push(result)
}
node.children = children.map((value) => {
if (typeof value === 'string')
return { type: TEXT_NODE, value, parent: node }
value.parent = node
return value
})
}
return transform(node, root)
}
for (const transform of transforms)
doc = visit(doc, transform, doc) as Node
return doc
}
// A tree transform for sanitizing elements & their attributes.
type AttrSanitizers = Record<string, (value: string | undefined) => string | undefined>
function sanitize(allowedElements: Record<string, AttrSanitizers>): Transform {
return (node) => {
if (node.type !== ELEMENT_NODE)
return node
if (!Object.prototype.hasOwnProperty.call(allowedElements, node.name))
return null
const attrSanitizers = allowedElements[node.name]
const attrs = {} as Record<string, string>
for (const [name, func] of Object.entries(attrSanitizers)) {
const value = func(node.attributes[name])
if (value !== undefined)
attrs[name] = value
}
node.attributes = attrs
return node
}
}
function filterClasses(allowed: RegExp) {
return (c: string | undefined) => {
if (!c)
return undefined
return c.split(/\s/g).filter(cls => allowed.test(cls)).join(' ')
}
}
function keep(value: string | undefined) {
return value
}
function set(value: string) {
return () => value
}
function filterHref() {
const LINK_PROTOCOLS = new Set([
'http:',
'https:',
'dat:',
'dweb:',
'ipfs:',
'ipns:',
'ssb:',
'gopher:',
'xmpp:',
'magnet:',
'gemini:',
])
return (href: string | undefined) => {
if (href === undefined)
return undefined
// Allow relative links
if (href.startsWith('/') || href.startsWith('.'))
return href
let url
try {
url = new URL(href)
}
catch (err) {
if (err instanceof TypeError)
return undefined
throw err
}
if (LINK_PROTOCOLS.has(url.protocol))
return url.toString()
return '#'
}
}
function removeUnicodeEmoji(node: Node) {
if (node.type !== TEXT_NODE)
return node
let start = 0
const matches = [] as (string | Node)[]
findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => {
matches.push(result.slice(start).trimEnd())
start = result.length + match.match.length
return undefined
})
if (matches.length === 0)
return node
matches.push(node.value.slice(start))
return matches.filter(Boolean)
}
function transformUnicodeEmoji(node: Node) {
if (node.type !== TEXT_NODE)
return node
let start = 0
const matches = [] as (string | Node)[]
findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => {
const attrs = getEmojiAttributes(match)
matches.push(result.slice(start))
matches.push(h('img', { src: attrs.src, alt: attrs.alt, class: attrs.class }))
start = result.length + match.match.length
return undefined
})
if (matches.length === 0)
return node
matches.push(node.value.slice(start))
return matches.filter(Boolean)
}
function removeCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform
|
}
|
{
return (node) => {
if (node.type !== TEXT_NODE)
return node
const split = node.value.split(/\s?:([\w-]+?):/g)
if (split.length === 1)
return node
return split.map((name, i) => {
if (i % 2 === 0)
return name
const emoji = customEmojis[name] as mastodon.v1.CustomEmoji
if (!emoji)
return `:${name}:`
return ''
}).filter(Boolean)
}
|
identifier_body
|
content-parse.ts
|
96;')
const classes = lang ? ` class="language-${lang}"` : ''
return `><pre><code${classes}>${code}</code></pre>`
})
.replace(/`([^`\n]*)`/g, (_1, raw) => {
return raw ? `<code>${htmlToText(raw).replace(/</g, '<').replace(/>/g, '>')}</code>` : ''
})
}
// Always sanitize the raw HTML data *after* it has been modified
const transforms: Transform[] = [
sanitizer,
...options.astTransforms || [],
]
if (hideEmojis) {
transforms.push(removeUnicodeEmoji)
transforms.push(removeCustomEmoji(options.emojis ?? {}))
}
else {
if (replaceUnicodeEmoji)
transforms.push(transformUnicodeEmoji)
transforms.push(replaceCustomEmoji(options.emojis ?? {}))
}
if (markdown)
transforms.push(transformMarkdown)
if (mentions?.length)
transforms.push(createTransformNamedMentions(mentions))
if (convertMentionLink)
transforms.push(transformMentionLink)
transforms.push(transformParagraphs)
if (collapseMentionLink)
transforms.push(transformCollapseMentions(status, inReplyToStatus))
return transformSync(parse(html), transforms)
}
/**
* Converts raw HTML form Mastodon server to HTML for Tiptap editor
*/
export function convertMastodonHTML(html: string, customEmojis: Record<string, mastodon.v1.CustomEmoji> = {}) {
const tree = parseMastodonHTML(html, {
emojis: customEmojis,
markdown: true,
convertMentionLink: true,
})
return render(tree)
}
export function htmlToText(html: string) {
try {
const tree = parse(html)
return (tree.children as Node[]).map(n => treeToText(n)).join('').trim()
}
catch (err) {
console.error(err)
return ''
}
}
export function recursiveTreeToText(input: Node): string {
if (input && input.children && input.children.length > 0)
return input.children.map((n: Node) => recursiveTreeToText(n)).join('')
else
return treeToText(input)
}
const emojiIdNeedsWrappingRE = /^(\d|\w|-|_)+$/
export function treeToText(input: Node): string {
let pre = ''
let body = ''
let post = ''
if (input.type === TEXT_NODE)
return decode(input.value)
if (input.name === 'br')
return '\n'
if (['p', 'pre'].includes(input.name))
pre = '\n'
if (input.attributes?.['data-type'] === 'mention') {
const acct = input.attributes['data-id']
if (acct)
return acct.startsWith('@') ? acct : `@${acct}`
}
if (input.name === 'code') {
if (input.parent?.name === 'pre') {
const lang = input.attributes.class?.replace('language-', '')
pre = `\`\`\`${lang || ''}\n`
post = '\n```'
}
else {
pre = '`'
post = '`'
}
}
else if (input.name === 'b' || input.name === 'strong') {
pre = '**'
post = '**'
}
else if (input.name === 'i' || input.name === 'em') {
pre = '*'
post = '*'
}
else if (input.name === 'del') {
pre = '~~'
post = '~~'
}
if ('children' in input)
body = (input.children as Node[]).map(n => treeToText(n)).join('')
if (input.name === 'img' || input.name === 'picture') {
if (input.attributes.class?.includes('custom-emoji')) {
const id = input.attributes['data-emoji-id'] ?? input.attributes.alt ?? input.attributes.title ?? 'unknown'
return id.match(emojiIdNeedsWrappingRE) ? `:${id}:` : id
}
if (input.attributes.class?.includes('iconify-emoji'))
return input.attributes.alt
}
return pre + body + post
}
// A tree transform function takes an ultrahtml Node object and returns
// new content that will replace the given node in the tree.
// Returning a null removes the node from the tree.
// Strings get converted to text nodes.
// The input node's children have been transformed before the node itself
// gets transformed.
type Transform = (node: Node, root: Node) => (Node | string)[] | Node | string | null
// Helpers for transforming (filtering, modifying, ...) a parsed HTML tree
// by running the given chain of transform functions one-by-one.
function transformSync(doc: Node, transforms: Transform[]) {
function visit(node: Node, transform: Transform, root: Node) {
if (Array.isArray(node.children)) {
const children = [] as (Node | string)[]
for (let i = 0; i < node.children.length; i++) {
const result = visit(node.children[i], transform, root)
if (Array.isArray(result))
children.push(...result)
else if (result)
children.push(result)
}
node.children = children.map((value) => {
if (typeof value === 'string')
return { type: TEXT_NODE, value, parent: node }
value.parent = node
return value
})
}
return transform(node, root)
}
for (const transform of transforms)
doc = visit(doc, transform, doc) as Node
return doc
}
// A tree transform for sanitizing elements & their attributes.
type AttrSanitizers = Record<string, (value: string | undefined) => string | undefined>
function sanitize(allowedElements: Record<string, AttrSanitizers>): Transform {
return (node) => {
if (node.type !== ELEMENT_NODE)
return node
if (!Object.prototype.hasOwnProperty.call(allowedElements, node.name))
return null
const attrSanitizers = allowedElements[node.name]
const attrs = {} as Record<string, string>
for (const [name, func] of Object.entries(attrSanitizers)) {
const value = func(node.attributes[name])
if (value !== undefined)
attrs[name] = value
}
node.attributes = attrs
return node
}
}
function filterClasses(allowed: RegExp) {
return (c: string | undefined) => {
if (!c)
return undefined
return c.split(/\s/g).filter(cls => allowed.test(cls)).join(' ')
}
}
function keep(value: string | undefined) {
return value
}
function set(value: string) {
return () => value
}
function filterHref() {
const LINK_PROTOCOLS = new Set([
'http:',
'https:',
'dat:',
'dweb:',
'ipfs:',
'ipns:',
'ssb:',
'gopher:',
'xmpp:',
'magnet:',
'gemini:',
])
return (href: string | undefined) => {
if (href === undefined)
return undefined
// Allow relative links
if (href.startsWith('/') || href.startsWith('.'))
return href
let url
try {
url = new URL(href)
}
catch (err) {
if (err instanceof TypeError)
return undefined
throw err
}
if (LINK_PROTOCOLS.has(url.protocol))
return url.toString()
return '#'
}
}
|
let start = 0
const matches = [] as (string | Node)[]
findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => {
matches.push(result.slice(start).trimEnd())
start = result.length + match.match.length
return undefined
})
if (matches.length === 0)
return node
matches.push(node.value.slice(start))
return matches.filter(Boolean)
}
function transformUnicodeEmoji(node: Node) {
if (node.type !== TEXT_NODE)
return node
let start = 0
const matches = [] as (string | Node)[]
findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => {
const attrs = getEmojiAttributes(match)
matches.push(result.slice(start))
matches.push(h('img', { src: attrs.src, alt: attrs.alt, class: attrs.class }))
start = result.length + match.match.length
return undefined
})
if (matches.length === 0)
return node
matches.push(node.value.slice(start))
return matches.filter(Boolean)
}
function removeCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform {
return (node) => {
if (node.type !== TEXT_NODE)
return node
const split = node.value.split(/\s?:([\w-]+?):/g)
if (split.length === 1)
return node
return split.map((name, i) => {
if (i % 2 === 0)
return name
const emoji = customEmojis[name] as mastodon.v1.CustomEmoji
if (!emoji)
return `:${name}:`
return ''
}).filter(Boolean)
}
}
function
|
function removeUnicodeEmoji(node: Node) {
if (node.type !== TEXT_NODE)
return node
|
random_line_split
|
3rd_person.rs
|
//! Example 03. 3rd person walk simulator.
//!
//! Difficulty: Advanced.
//!
//! This example based on async example, because it requires to load decent amount of
//! resources which might be slow on some machines.
//!
//! In this example we'll create simple 3rd person game with character that can idle,
//! walk, or jump.
//!
//! Also this example demonstrates the power of animation blending machines. Animation
//! blending machines are used in all modern games to create complex animations from set
//! of simple ones.
//!
//! TODO: Improve explanations. Some places can be explained better.
//!
//! Known bugs: Sometimes character will jump, but jumping animations is not playing.
//!
//! Possible improvements:
//! - Smart camera - camera which will not penetrate walls.
//! - Separate animation machines for upper and lower body - upper machine might be
//! for combat, lower - for locomotion.
//! - Tons of them, this is simple example after all.
pub mod shared;
use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene};
use std::time::Instant;
use winit::keyboard::KeyCode;
use fyrox::{
core::{
algebra::Vector2,
log::{Log, MessageKind},
},
engine::GraphicsContext,
event::{Event, WindowEvent},
event_loop::ControlFlow,
gui::{
message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage,
widget::WidgetMessage,
},
renderer::QualitySettings,
utils::translate_event,
};
fn
|
() {
let (mut game, event_loop) = Game::new("Example 03 - 3rd person");
// Create simple user interface that will show some useful info.
let interface = create_ui(
&mut game.engine.user_interface.build_ctx(),
Vector2::new(100.0, 100.0),
);
let mut previous = Instant::now();
let fixed_timestep = 1.0 / 60.0;
let mut lag = 0.0;
// Finally run our event loop which will respond to OS and window events and update
// engine state accordingly.
event_loop.run(move |event, window_target, control_flow| {
match event {
Event::MainEventsCleared => {
// This is main game loop - it has fixed time step which means that game
// code will run at fixed speed even if renderer can't give you desired
// 60 fps.
let elapsed = previous.elapsed();
previous = Instant::now();
lag += elapsed.as_secs_f32();
while lag >= fixed_timestep {
// ************************
// Put your game logic here.
// ************************
// Check each frame if our scene is created - here we just trying to lock context
// without blocking, it is important for main thread to be functional while other
// thread still loading data.
if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() {
if let Some(load_result) = load_context.scene_data.take() {
// Add scene to engine - engine will take ownership over scene and will return
// you a handle to scene which can be used later on to borrow it and do some
// actions you need.
game.game_scene = Some(GameScene {
scene: game.engine.scenes.add(load_result.scene),
player: load_result.player,
});
// Once scene is loaded, we should hide progress bar and text.
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_bar,
MessageDirection::ToWidget,
false,
));
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_text,
MessageDirection::ToWidget,
false,
));
}
// Report progress in UI.
game.engine
.user_interface
.send_message(ProgressBarMessage::progress(
interface.progress_bar,
MessageDirection::ToWidget,
load_context.progress,
));
game.engine.user_interface.send_message(TextMessage::text(
interface.progress_text,
MessageDirection::ToWidget,
format!(
"Loading scene: {}%\n{}",
load_context.progress * 100.0,
load_context.message
),
));
}
// Update scene only if it is loaded.
if let Some(game_scene) = game.game_scene.as_mut() {
// Use stored scene handle to borrow a mutable reference of scene in
// engine.
let scene = &mut game.engine.scenes[game_scene.scene];
game_scene.player.update(scene, fixed_timestep);
}
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
let debug_text = format!(
"Example 03 - 3rd Person\n\
[W][S][A][D] - walk, [SPACE] - jump.\n\
Use [1][2][3][4] to select graphics quality.\n\
{}",
ctx.renderer.get_statistics()
);
game.engine.user_interface.send_message(TextMessage::text(
interface.debug_text,
MessageDirection::ToWidget,
debug_text,
));
}
// It is very important to "pump" messages from UI. Even if don't need to
// respond to such message, you should call this method, otherwise UI
// might behave very weird.
while let Some(_ui_event) = game.engine.user_interface.poll_message() {
// ************************
// Put your data model synchronization code here. It should
// take message and update data in your game according to
// changes in UI.
// ************************
}
game.engine
.update(fixed_timestep, control_flow, &mut lag, Default::default());
lag -= fixed_timestep;
}
// Rendering must be explicitly requested and handled after RedrawRequested event is received.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
ctx.window.request_redraw();
}
}
Event::Resumed => {
game.engine
.initialize_graphics_context(window_target)
.unwrap();
}
Event::Suspended => {
game.engine.destroy_graphics_context().unwrap();
}
Event::RedrawRequested(_) => {
// Run renderer at max speed - it is not tied to game code.
game.engine.render().unwrap();
}
Event::LoopDestroyed => {
println!("{:?}", fyrox::core::profiler::print());
}
Event::WindowEvent { event, .. } => {
match &event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(size) => {
// It is very important to handle Resized event from window, because
// renderer knows nothing about window size - it must be notified
// directly when window size has changed.
if let Err(e) = game.engine.set_frame_size((*size).into()) {
Log::writeln(
MessageKind::Error,
format!("Unable to set frame size: {:?}", e),
);
}
// Root UI node should be resized too, otherwise progress bar will stay
// in wrong position after resize.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context
{
let size = size.to_logical(ctx.window.scale_factor());
game.engine
.user_interface
.send_message(WidgetMessage::width(
interface.root,
MessageDirection::ToWidget,
size.width,
));
game.engine
.user_interface
.send_message(WidgetMessage::height(
interface.root,
MessageDirection::ToWidget,
size.height,
));
}
}
WindowEvent::KeyboardInput { event: input, .. } => {
// Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32)
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene.player.handle_key_event(input, fixed_timestep);
}
let settings = match input.physical_key {
KeyCode::Digit1 => Some(QualitySettings::ultra()),
KeyCode::Digit2 => Some(QualitySettings::high()),
KeyCode::Digit3 => Some(QualitySettings::medium()),
KeyCode::Digit4 => Some(QualitySettings::low()),
_ => None,
};
if let Some(settings) = settings {
if let GraphicsContext::Initialized(ref mut ctx) =
game.engine.graphics_context
{
ctx.renderer
.set_quality_settings(&fix_shadows_distance(settings))
.unwrap();
}
}
}
_ => (),
}
// It is very important to "feed" user interface (UI) with events coming
// from main window, otherwise UI won't respond to mouse, keyboard, or any
// other event.
if let Some(os_event) = translate_event(&event) {
game.engine.user_interface.process_os_event(&os_event);
}
}
Event::DeviceEvent { event, .. } => {
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene
.player
.handle_device_event(&event, fixed_timestep);
}
}
_ => *control_flow = ControlFlow::Poll,
}
});
}
|
main
|
identifier_name
|
3rd_person.rs
|
//! Example 03. 3rd person walk simulator.
//!
//! Difficulty: Advanced.
//!
//! This example based on async example, because it requires to load decent amount of
//! resources which might be slow on some machines.
//!
//! In this example we'll create simple 3rd person game with character that can idle,
//! walk, or jump.
//!
//! Also this example demonstrates the power of animation blending machines. Animation
//! blending machines are used in all modern games to create complex animations from set
//! of simple ones.
//!
//! TODO: Improve explanations. Some places can be explained better.
//!
//! Known bugs: Sometimes character will jump, but jumping animations is not playing.
//!
//! Possible improvements:
//! - Smart camera - camera which will not penetrate walls.
//! - Separate animation machines for upper and lower body - upper machine might be
//! for combat, lower - for locomotion.
//! - Tons of them, this is simple example after all.
pub mod shared;
use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene};
use std::time::Instant;
use winit::keyboard::KeyCode;
use fyrox::{
core::{
algebra::Vector2,
log::{Log, MessageKind},
},
engine::GraphicsContext,
event::{Event, WindowEvent},
event_loop::ControlFlow,
gui::{
message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage,
widget::WidgetMessage,
},
renderer::QualitySettings,
utils::translate_event,
};
fn main() {
let (mut game, event_loop) = Game::new("Example 03 - 3rd person");
// Create simple user interface that will show some useful info.
let interface = create_ui(
&mut game.engine.user_interface.build_ctx(),
Vector2::new(100.0, 100.0),
);
let mut previous = Instant::now();
let fixed_timestep = 1.0 / 60.0;
let mut lag = 0.0;
// Finally run our event loop which will respond to OS and window events and update
// engine state accordingly.
event_loop.run(move |event, window_target, control_flow| {
match event {
Event::MainEventsCleared => {
// This is main game loop - it has fixed time step which means that game
// code will run at fixed speed even if renderer can't give you desired
// 60 fps.
let elapsed = previous.elapsed();
previous = Instant::now();
lag += elapsed.as_secs_f32();
while lag >= fixed_timestep {
// ************************
// Put your game logic here.
// ************************
// Check each frame if our scene is created - here we just trying to lock context
// without blocking, it is important for main thread to be functional while other
// thread still loading data.
if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() {
if let Some(load_result) = load_context.scene_data.take() {
// Add scene to engine - engine will take ownership over scene and will return
// you a handle to scene which can be used later on to borrow it and do some
// actions you need.
game.game_scene = Some(GameScene {
scene: game.engine.scenes.add(load_result.scene),
player: load_result.player,
});
// Once scene is loaded, we should hide progress bar and text.
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_bar,
MessageDirection::ToWidget,
false,
));
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_text,
MessageDirection::ToWidget,
false,
));
}
// Report progress in UI.
game.engine
.user_interface
.send_message(ProgressBarMessage::progress(
interface.progress_bar,
MessageDirection::ToWidget,
load_context.progress,
));
game.engine.user_interface.send_message(TextMessage::text(
interface.progress_text,
MessageDirection::ToWidget,
format!(
"Loading scene: {}%\n{}",
load_context.progress * 100.0,
load_context.message
),
));
}
// Update scene only if it is loaded.
if let Some(game_scene) = game.game_scene.as_mut() {
// Use stored scene handle to borrow a mutable reference of scene in
// engine.
let scene = &mut game.engine.scenes[game_scene.scene];
game_scene.player.update(scene, fixed_timestep);
}
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
let debug_text = format!(
"Example 03 - 3rd Person\n\
[W][S][A][D] - walk, [SPACE] - jump.\n\
Use [1][2][3][4] to select graphics quality.\n\
{}",
ctx.renderer.get_statistics()
);
game.engine.user_interface.send_message(TextMessage::text(
interface.debug_text,
MessageDirection::ToWidget,
debug_text,
));
}
// It is very important to "pump" messages from UI. Even if don't need to
// respond to such message, you should call this method, otherwise UI
// might behave very weird.
while let Some(_ui_event) = game.engine.user_interface.poll_message() {
// ************************
// Put your data model synchronization code here. It should
// take message and update data in your game according to
// changes in UI.
// ************************
}
game.engine
.update(fixed_timestep, control_flow, &mut lag, Default::default());
lag -= fixed_timestep;
}
// Rendering must be explicitly requested and handled after RedrawRequested event is received.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
ctx.window.request_redraw();
}
}
Event::Resumed => {
game.engine
.initialize_graphics_context(window_target)
.unwrap();
}
Event::Suspended => {
game.engine.destroy_graphics_context().unwrap();
}
Event::RedrawRequested(_) => {
// Run renderer at max speed - it is not tied to game code.
game.engine.render().unwrap();
}
Event::LoopDestroyed => {
println!("{:?}", fyrox::core::profiler::print());
}
Event::WindowEvent { event, .. } => {
match &event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(size) => {
// It is very important to handle Resized event from window, because
// renderer knows nothing about window size - it must be notified
// directly when window size has changed.
if let Err(e) = game.engine.set_frame_size((*size).into()) {
Log::writeln(
MessageKind::Error,
format!("Unable to set frame size: {:?}", e),
);
}
// Root UI node should be resized too, otherwise progress bar will stay
// in wrong position after resize.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context
{
let size = size.to_logical(ctx.window.scale_factor());
game.engine
.user_interface
.send_message(WidgetMessage::width(
interface.root,
MessageDirection::ToWidget,
size.width,
));
game.engine
.user_interface
.send_message(WidgetMessage::height(
interface.root,
MessageDirection::ToWidget,
size.height,
));
}
}
WindowEvent::KeyboardInput { event: input, .. } => {
// Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32)
if let Some(game_scene) = game.game_scene.as_mut()
|
let settings = match input.physical_key {
KeyCode::Digit1 => Some(QualitySettings::ultra()),
KeyCode::Digit2 => Some(QualitySettings::high()),
KeyCode::Digit3 => Some(QualitySettings::medium()),
KeyCode::Digit4 => Some(QualitySettings::low()),
_ => None,
};
if let Some(settings) = settings {
if let GraphicsContext::Initialized(ref mut ctx) =
game.engine.graphics_context
{
ctx.renderer
.set_quality_settings(&fix_shadows_distance(settings))
.unwrap();
}
}
}
_ => (),
}
// It is very important to "feed" user interface (UI) with events coming
// from main window, otherwise UI won't respond to mouse, keyboard, or any
// other event.
if let Some(os_event) = translate_event(&event) {
game.engine.user_interface.process_os_event(&os_event);
}
}
Event::DeviceEvent { event, .. } => {
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene
.player
.handle_device_event(&event, fixed_timestep);
}
}
_ => *control_flow = ControlFlow::Poll,
}
});
}
|
{
game_scene.player.handle_key_event(input, fixed_timestep);
}
|
conditional_block
|
3rd_person.rs
|
//! Example 03. 3rd person walk simulator.
//!
//! Difficulty: Advanced.
//!
//! This example based on async example, because it requires to load decent amount of
//! resources which might be slow on some machines.
//!
//! In this example we'll create simple 3rd person game with character that can idle,
//! walk, or jump.
//!
//! Also this example demonstrates the power of animation blending machines. Animation
//! blending machines are used in all modern games to create complex animations from set
//! of simple ones.
//!
//! TODO: Improve explanations. Some places can be explained better.
//!
//! Known bugs: Sometimes character will jump, but jumping animations is not playing.
//!
//! Possible improvements:
//! - Smart camera - camera which will not penetrate walls.
//! - Separate animation machines for upper and lower body - upper machine might be
//! for combat, lower - for locomotion.
//! - Tons of them, this is simple example after all.
pub mod shared;
use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene};
use std::time::Instant;
use winit::keyboard::KeyCode;
use fyrox::{
core::{
algebra::Vector2,
log::{Log, MessageKind},
},
engine::GraphicsContext,
event::{Event, WindowEvent},
event_loop::ControlFlow,
gui::{
message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage,
widget::WidgetMessage,
},
renderer::QualitySettings,
utils::translate_event,
};
fn main()
|
// 60 fps.
let elapsed = previous.elapsed();
previous = Instant::now();
lag += elapsed.as_secs_f32();
while lag >= fixed_timestep {
// ************************
// Put your game logic here.
// ************************
// Check each frame if our scene is created - here we just trying to lock context
// without blocking, it is important for main thread to be functional while other
// thread still loading data.
if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() {
if let Some(load_result) = load_context.scene_data.take() {
// Add scene to engine - engine will take ownership over scene and will return
// you a handle to scene which can be used later on to borrow it and do some
// actions you need.
game.game_scene = Some(GameScene {
scene: game.engine.scenes.add(load_result.scene),
player: load_result.player,
});
// Once scene is loaded, we should hide progress bar and text.
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_bar,
MessageDirection::ToWidget,
false,
));
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_text,
MessageDirection::ToWidget,
false,
));
}
// Report progress in UI.
game.engine
.user_interface
.send_message(ProgressBarMessage::progress(
interface.progress_bar,
MessageDirection::ToWidget,
load_context.progress,
));
game.engine.user_interface.send_message(TextMessage::text(
interface.progress_text,
MessageDirection::ToWidget,
format!(
"Loading scene: {}%\n{}",
load_context.progress * 100.0,
load_context.message
),
));
}
// Update scene only if it is loaded.
if let Some(game_scene) = game.game_scene.as_mut() {
// Use stored scene handle to borrow a mutable reference of scene in
// engine.
let scene = &mut game.engine.scenes[game_scene.scene];
game_scene.player.update(scene, fixed_timestep);
}
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
let debug_text = format!(
"Example 03 - 3rd Person\n\
[W][S][A][D] - walk, [SPACE] - jump.\n\
Use [1][2][3][4] to select graphics quality.\n\
{}",
ctx.renderer.get_statistics()
);
game.engine.user_interface.send_message(TextMessage::text(
interface.debug_text,
MessageDirection::ToWidget,
debug_text,
));
}
// It is very important to "pump" messages from UI. Even if don't need to
// respond to such message, you should call this method, otherwise UI
// might behave very weird.
while let Some(_ui_event) = game.engine.user_interface.poll_message() {
// ************************
// Put your data model synchronization code here. It should
// take message and update data in your game according to
// changes in UI.
// ************************
}
game.engine
.update(fixed_timestep, control_flow, &mut lag, Default::default());
lag -= fixed_timestep;
}
// Rendering must be explicitly requested and handled after RedrawRequested event is received.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
ctx.window.request_redraw();
}
}
Event::Resumed => {
game.engine
.initialize_graphics_context(window_target)
.unwrap();
}
Event::Suspended => {
game.engine.destroy_graphics_context().unwrap();
}
Event::RedrawRequested(_) => {
// Run renderer at max speed - it is not tied to game code.
game.engine.render().unwrap();
}
Event::LoopDestroyed => {
println!("{:?}", fyrox::core::profiler::print());
}
Event::WindowEvent { event, .. } => {
match &event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(size) => {
// It is very important to handle Resized event from window, because
// renderer knows nothing about window size - it must be notified
// directly when window size has changed.
if let Err(e) = game.engine.set_frame_size((*size).into()) {
Log::writeln(
MessageKind::Error,
format!("Unable to set frame size: {:?}", e),
);
}
// Root UI node should be resized too, otherwise progress bar will stay
// in wrong position after resize.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context
{
let size = size.to_logical(ctx.window.scale_factor());
game.engine
.user_interface
.send_message(WidgetMessage::width(
interface.root,
MessageDirection::ToWidget,
size.width,
));
game.engine
.user_interface
.send_message(WidgetMessage::height(
interface.root,
MessageDirection::ToWidget,
size.height,
));
}
}
WindowEvent::KeyboardInput { event: input, .. } => {
// Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32)
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene.player.handle_key_event(input, fixed_timestep);
}
let settings = match input.physical_key {
KeyCode::Digit1 => Some(QualitySettings::ultra()),
KeyCode::Digit2 => Some(QualitySettings::high()),
KeyCode::Digit3 => Some(QualitySettings::medium()),
KeyCode::Digit4 => Some(QualitySettings::low()),
_ => None,
};
if let Some(settings) = settings {
if let GraphicsContext::Initialized(ref mut ctx) =
game.engine.graphics_context
{
ctx.renderer
.set_quality_settings(&fix_shadows_distance(settings))
.unwrap();
}
}
}
_ => (),
}
// It is very important to "feed" user interface (UI) with events coming
// from main window, otherwise UI won't respond to mouse, keyboard, or any
// other event.
if let Some(os_event) = translate_event(&event) {
game.engine.user_interface.process_os_event(&os_event);
}
}
Event::DeviceEvent { event, .. } => {
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene
.player
.handle_device_event(&event, fixed_timestep);
}
}
_ => *control_flow = ControlFlow::Poll,
}
});
}
|
{
let (mut game, event_loop) = Game::new("Example 03 - 3rd person");
// Create simple user interface that will show some useful info.
let interface = create_ui(
&mut game.engine.user_interface.build_ctx(),
Vector2::new(100.0, 100.0),
);
let mut previous = Instant::now();
let fixed_timestep = 1.0 / 60.0;
let mut lag = 0.0;
// Finally run our event loop which will respond to OS and window events and update
// engine state accordingly.
event_loop.run(move |event, window_target, control_flow| {
match event {
Event::MainEventsCleared => {
// This is main game loop - it has fixed time step which means that game
// code will run at fixed speed even if renderer can't give you desired
|
identifier_body
|
3rd_person.rs
|
//! Example 03. 3rd person walk simulator.
//!
//! Difficulty: Advanced.
//!
//! This example based on async example, because it requires to load decent amount of
//! resources which might be slow on some machines.
//!
//! In this example we'll create simple 3rd person game with character that can idle,
//! walk, or jump.
//!
|
//! blending machines are used in all modern games to create complex animations from set
//! of simple ones.
//!
//! TODO: Improve explanations. Some places can be explained better.
//!
//! Known bugs: Sometimes character will jump, but jumping animations is not playing.
//!
//! Possible improvements:
//! - Smart camera - camera which will not penetrate walls.
//! - Separate animation machines for upper and lower body - upper machine might be
//! for combat, lower - for locomotion.
//! - Tons of them, this is simple example after all.
pub mod shared;
use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene};
use std::time::Instant;
use winit::keyboard::KeyCode;
use fyrox::{
core::{
algebra::Vector2,
log::{Log, MessageKind},
},
engine::GraphicsContext,
event::{Event, WindowEvent},
event_loop::ControlFlow,
gui::{
message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage,
widget::WidgetMessage,
},
renderer::QualitySettings,
utils::translate_event,
};
fn main() {
let (mut game, event_loop) = Game::new("Example 03 - 3rd person");
// Create simple user interface that will show some useful info.
let interface = create_ui(
&mut game.engine.user_interface.build_ctx(),
Vector2::new(100.0, 100.0),
);
let mut previous = Instant::now();
let fixed_timestep = 1.0 / 60.0;
let mut lag = 0.0;
// Finally run our event loop which will respond to OS and window events and update
// engine state accordingly.
event_loop.run(move |event, window_target, control_flow| {
match event {
Event::MainEventsCleared => {
// This is main game loop - it has fixed time step which means that game
// code will run at fixed speed even if renderer can't give you desired
// 60 fps.
let elapsed = previous.elapsed();
previous = Instant::now();
lag += elapsed.as_secs_f32();
while lag >= fixed_timestep {
// ************************
// Put your game logic here.
// ************************
// Check each frame if our scene is created - here we just trying to lock context
// without blocking, it is important for main thread to be functional while other
// thread still loading data.
if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() {
if let Some(load_result) = load_context.scene_data.take() {
// Add scene to engine - engine will take ownership over scene and will return
// you a handle to scene which can be used later on to borrow it and do some
// actions you need.
game.game_scene = Some(GameScene {
scene: game.engine.scenes.add(load_result.scene),
player: load_result.player,
});
// Once scene is loaded, we should hide progress bar and text.
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_bar,
MessageDirection::ToWidget,
false,
));
game.engine
.user_interface
.send_message(WidgetMessage::visibility(
interface.progress_text,
MessageDirection::ToWidget,
false,
));
}
// Report progress in UI.
game.engine
.user_interface
.send_message(ProgressBarMessage::progress(
interface.progress_bar,
MessageDirection::ToWidget,
load_context.progress,
));
game.engine.user_interface.send_message(TextMessage::text(
interface.progress_text,
MessageDirection::ToWidget,
format!(
"Loading scene: {}%\n{}",
load_context.progress * 100.0,
load_context.message
),
));
}
// Update scene only if it is loaded.
if let Some(game_scene) = game.game_scene.as_mut() {
// Use stored scene handle to borrow a mutable reference of scene in
// engine.
let scene = &mut game.engine.scenes[game_scene.scene];
game_scene.player.update(scene, fixed_timestep);
}
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
let debug_text = format!(
"Example 03 - 3rd Person\n\
[W][S][A][D] - walk, [SPACE] - jump.\n\
Use [1][2][3][4] to select graphics quality.\n\
{}",
ctx.renderer.get_statistics()
);
game.engine.user_interface.send_message(TextMessage::text(
interface.debug_text,
MessageDirection::ToWidget,
debug_text,
));
}
// It is very important to "pump" messages from UI. Even if don't need to
// respond to such message, you should call this method, otherwise UI
// might behave very weird.
while let Some(_ui_event) = game.engine.user_interface.poll_message() {
// ************************
// Put your data model synchronization code here. It should
// take message and update data in your game according to
// changes in UI.
// ************************
}
game.engine
.update(fixed_timestep, control_flow, &mut lag, Default::default());
lag -= fixed_timestep;
}
// Rendering must be explicitly requested and handled after RedrawRequested event is received.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context {
ctx.window.request_redraw();
}
}
Event::Resumed => {
game.engine
.initialize_graphics_context(window_target)
.unwrap();
}
Event::Suspended => {
game.engine.destroy_graphics_context().unwrap();
}
Event::RedrawRequested(_) => {
// Run renderer at max speed - it is not tied to game code.
game.engine.render().unwrap();
}
Event::LoopDestroyed => {
println!("{:?}", fyrox::core::profiler::print());
}
Event::WindowEvent { event, .. } => {
match &event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(size) => {
// It is very important to handle Resized event from window, because
// renderer knows nothing about window size - it must be notified
// directly when window size has changed.
if let Err(e) = game.engine.set_frame_size((*size).into()) {
Log::writeln(
MessageKind::Error,
format!("Unable to set frame size: {:?}", e),
);
}
// Root UI node should be resized too, otherwise progress bar will stay
// in wrong position after resize.
if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context
{
let size = size.to_logical(ctx.window.scale_factor());
game.engine
.user_interface
.send_message(WidgetMessage::width(
interface.root,
MessageDirection::ToWidget,
size.width,
));
game.engine
.user_interface
.send_message(WidgetMessage::height(
interface.root,
MessageDirection::ToWidget,
size.height,
));
}
}
WindowEvent::KeyboardInput { event: input, .. } => {
// Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32)
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene.player.handle_key_event(input, fixed_timestep);
}
let settings = match input.physical_key {
KeyCode::Digit1 => Some(QualitySettings::ultra()),
KeyCode::Digit2 => Some(QualitySettings::high()),
KeyCode::Digit3 => Some(QualitySettings::medium()),
KeyCode::Digit4 => Some(QualitySettings::low()),
_ => None,
};
if let Some(settings) = settings {
if let GraphicsContext::Initialized(ref mut ctx) =
game.engine.graphics_context
{
ctx.renderer
.set_quality_settings(&fix_shadows_distance(settings))
.unwrap();
}
}
}
_ => (),
}
// It is very important to "feed" user interface (UI) with events coming
// from main window, otherwise UI won't respond to mouse, keyboard, or any
// other event.
if let Some(os_event) = translate_event(&event) {
game.engine.user_interface.process_os_event(&os_event);
}
}
Event::DeviceEvent { event, .. } => {
if let Some(game_scene) = game.game_scene.as_mut() {
game_scene
.player
.handle_device_event(&event, fixed_timestep);
}
}
_ => *control_flow = ControlFlow::Poll,
}
});
}
|
//! Also this example demonstrates the power of animation blending machines. Animation
|
random_line_split
|
cargo-deploy.rs
|
).into_iter() {
if let cargo_metadata::Message::CompilerArtifact(artifact) =
message.unwrap_or_else(|_| panic!("Failed to parse output of cargo"))
{
if artifact.target.kind == vec![String::from("bin")]
|| artifact.target.kind == vec![String::from("example")]
{
bin.push((
artifact.target.name,
artifact.filenames.into_iter().next().unwrap(),
));
// We're assuming the first filename is the binary – .dSYM etc seem to always be second?
}
}
}
if bin.len() > 1 {
let names = bin
.into_iter()
.map(|(target_name, _)| target_name)
.collect::<Vec<_>>();
println!(
"`cargo deploy` could not determine which binary to run. \
Use the `--bin` option to specify a binary.\n\
available binaries: {}",
names.join(", ")
); // , or the `default-run` manifest key // TODO: work out best way to get this / behave same as cargo run
process::exit(1);
} else if bin.is_empty() {
println!("a bin target must be available for `cargo deploy`");
process::exit(1);
}
let path = bin.into_iter().next().unwrap().1;
let args: Vec<OsString> = iter::once(OsString::from(&path))
.chain(forward_args.into_iter().map(ToOwned::to_owned))
.collect();
let vars: Vec<(OsString, OsString)> = env::vars_os().collect();
let format = Format::Human;
constellation::deploy(host, &path, format, args, vars);
}
fn cli<'a, 'b>() -> App<'a, 'b> {
// https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277
// https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs
App::new("cargo")
.bin_name("cargo")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::SubcommandRequired,
])
.arg(
Arg::opt(
"verbose",
"Use verbose output (-vv very verbose/build.rs output)",
)
.short("v")
.multiple(true)
.global(true),
)
.arg(
Arg::opt("color", "Coloring: auto, always, never")
.value_name("WHEN")
.global(true),
)
.arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
.arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true))
.arg(Arg::opt("offline", "Run without accessing the network").global(true))
.arg(
Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value")
.global(true)
.hidden(true),
)
.arg(
Arg::with_name("unstable-features")
.help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
.short("Z")
.value_name("FLAG")
.multiple(true)
.number_of_values(1)
.global(true),
)
.subcommand(
SubCommand::with_name("deploy")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::DontCollapseArgsInUsage,
AppSettings::TrailingVarArg,
])
.version(crate_version!())
.about("Run a binary or example of the local package on a constellation cluster")
// .arg(Arg::opt("quiet", "No output printed to stdout").short("q"))
.arg(
Arg::with_name("host")
.help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)")
.required(true)
.validator(|host| {
host.parse::<SocketAddr>()
.map(drop)
.map_err(|err| err.to_string())
}),
)
.arg(Arg::with_name("args").multiple(true))
.args(&Arg::targets_bin_example(
"Name of the bin target to run",
"Name of the example target to run",
))
.arg(Arg::package("Package with the target to run"))
.arg(Arg::jobs())
.arg(Arg::release(
"Build artifacts in release mode, with optimizations",
))
.arg(Arg::profile("Build artifacts with the specified profile"))
.args(&Arg::features())
.arg(Arg::target_triple("Build for the target triple"))
.arg(Arg::target_dir())
.arg(Arg::manifest_path())
// .arg(Arg::message_format())
.after_help(
"\
If neither `--bin` nor `--example` are given, then if the package only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run,
and `--example` specifies the example target to run. At most one of `--bin` or
`--example` can be provided.
All the arguments following the two dashes (`--`) are passed to the binary to
run. If you're passing arguments to both Cargo and the binary, the ones after
`--` go to the binary, the ones before go to Cargo.
",
),
)
}
fn cargo(args: &ArgMatches) -> process::Command {
let verbose: u64 = args.occurrences_of("verbose");
let color: Option<&str> = args.value_of("color");
let frozen: bool = args.is_present("frozen");
let locked: bool = args.is_present("locked");
let offline: bool = args.is_present("offline");
let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect();
let unstable_features: Vec<&OsStr> = args
.values_of_os("unstable-features")
.unwrap_or_default()
.collect();
let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect();
let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect();
let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect();
let jobs: Option<&str> = args.value_of("jobs");
let release: bool = args.is_present("release");
let profile: Option<&str> = args.value_of("profile");
let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect();
let all_features: bool = args.is_present("all-features");
let no_default_features: bool = args.is_present("no-default-features");
let target: Option<&str> = args.value_of("target");
let target_dir: Option<&str> = args.value_of("target-dir");
let manifest_path: Option<&str> = args.value_of("manifest-path");
// let mut args: Vec<String> = Vec::new();
let mut cargo = process::Command::new("cargo");
let _ = cargo.arg("build");
let _ = cargo.arg("--message-format=json");
if verbose > 0 {
let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap())));
}
if let Some(color) = color {
let _ = cargo.arg(format!("--color={}", color));
}
if frozen {
let _ = cargo.arg("--frozen");
}
if locked {
let _ = cargo.arg("--locked");
}
if offline {
let _ = cargo.arg("--offline");
}
for config in config {
let _ = cargo.arg(format!("--config={}", config));
}
for unstable_features in unstable_features {
let mut arg = OsString::from("-Z");
arg.push(unstable_features);
let _ = cargo.arg(arg);
}
for bin in bin {
let _ = cargo.arg(format!("--bin={}", bin));
}
for example in example {
let _ = cargo.arg(format!("--example={}", example));
}
for package in package {
let _ = cargo.arg(format!("--package={}", package));
}
if let Some(jobs) = jobs {
let _ = cargo.arg(format!("--jobs={}", jobs));
}
if release {
|
f let Some(profile) = profile {
let _ = cargo.arg(format!("--profile={}", profile));
}
for features in features {
let _ = cargo.arg(format!("--features={}", features));
}
if all_features {
let _ = cargo.arg("--all-features");
}
if no_default_features {
let _ = cargo.arg("--no-default-features");
}
if let Some(target) = target {
let _ = cargo.arg(format!("--target={}", target));
}
if let Some(target_dir) = target
|
let _ = cargo.arg("--release");
}
i
|
conditional_block
|
cargo-deploy.rs
|
7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277
// https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs
App::new("cargo")
.bin_name("cargo")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::SubcommandRequired,
])
.arg(
Arg::opt(
"verbose",
"Use verbose output (-vv very verbose/build.rs output)",
)
.short("v")
.multiple(true)
.global(true),
)
.arg(
Arg::opt("color", "Coloring: auto, always, never")
.value_name("WHEN")
.global(true),
)
.arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
.arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true))
.arg(Arg::opt("offline", "Run without accessing the network").global(true))
.arg(
Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value")
.global(true)
.hidden(true),
)
.arg(
Arg::with_name("unstable-features")
.help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
.short("Z")
.value_name("FLAG")
.multiple(true)
.number_of_values(1)
.global(true),
)
.subcommand(
SubCommand::with_name("deploy")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::DontCollapseArgsInUsage,
AppSettings::TrailingVarArg,
])
.version(crate_version!())
.about("Run a binary or example of the local package on a constellation cluster")
// .arg(Arg::opt("quiet", "No output printed to stdout").short("q"))
.arg(
Arg::with_name("host")
.help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)")
.required(true)
.validator(|host| {
host.parse::<SocketAddr>()
.map(drop)
.map_err(|err| err.to_string())
}),
)
.arg(Arg::with_name("args").multiple(true))
.args(&Arg::targets_bin_example(
"Name of the bin target to run",
"Name of the example target to run",
))
.arg(Arg::package("Package with the target to run"))
.arg(Arg::jobs())
.arg(Arg::release(
"Build artifacts in release mode, with optimizations",
))
.arg(Arg::profile("Build artifacts with the specified profile"))
.args(&Arg::features())
.arg(Arg::target_triple("Build for the target triple"))
.arg(Arg::target_dir())
.arg(Arg::manifest_path())
// .arg(Arg::message_format())
.after_help(
"\
If neither `--bin` nor `--example` are given, then if the package only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run,
and `--example` specifies the example target to run. At most one of `--bin` or
`--example` can be provided.
All the arguments following the two dashes (`--`) are passed to the binary to
run. If you're passing arguments to both Cargo and the binary, the ones after
`--` go to the binary, the ones before go to Cargo.
",
),
)
}
fn cargo(args: &ArgMatches) -> process::Command {
let verbose: u64 = args.occurrences_of("verbose");
let color: Option<&str> = args.value_of("color");
let frozen: bool = args.is_present("frozen");
let locked: bool = args.is_present("locked");
let offline: bool = args.is_present("offline");
let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect();
let unstable_features: Vec<&OsStr> = args
.values_of_os("unstable-features")
.unwrap_or_default()
.collect();
let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect();
let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect();
let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect();
let jobs: Option<&str> = args.value_of("jobs");
let release: bool = args.is_present("release");
let profile: Option<&str> = args.value_of("profile");
let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect();
let all_features: bool = args.is_present("all-features");
let no_default_features: bool = args.is_present("no-default-features");
let target: Option<&str> = args.value_of("target");
let target_dir: Option<&str> = args.value_of("target-dir");
let manifest_path: Option<&str> = args.value_of("manifest-path");
// let mut args: Vec<String> = Vec::new();
let mut cargo = process::Command::new("cargo");
let _ = cargo.arg("build");
let _ = cargo.arg("--message-format=json");
if verbose > 0 {
let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap())));
}
if let Some(color) = color {
let _ = cargo.arg(format!("--color={}", color));
}
if frozen {
let _ = cargo.arg("--frozen");
}
if locked {
let _ = cargo.arg("--locked");
}
if offline {
let _ = cargo.arg("--offline");
}
for config in config {
let _ = cargo.arg(format!("--config={}", config));
}
for unstable_features in unstable_features {
let mut arg = OsString::from("-Z");
arg.push(unstable_features);
let _ = cargo.arg(arg);
}
for bin in bin {
let _ = cargo.arg(format!("--bin={}", bin));
}
for example in example {
let _ = cargo.arg(format!("--example={}", example));
}
for package in package {
let _ = cargo.arg(format!("--package={}", package));
}
if let Some(jobs) = jobs {
let _ = cargo.arg(format!("--jobs={}", jobs));
}
if release {
let _ = cargo.arg("--release");
}
if let Some(profile) = profile {
let _ = cargo.arg(format!("--profile={}", profile));
}
for features in features {
let _ = cargo.arg(format!("--features={}", features));
}
if all_features {
let _ = cargo.arg("--all-features");
}
if no_default_features {
let _ = cargo.arg("--no-default-features");
}
if let Some(target) = target {
let _ = cargo.arg(format!("--target={}", target));
}
if let Some(target_dir) = target_dir {
let _ = cargo.arg(format!("--target-dir={}", target_dir));
}
if let Some(manifest_path) = manifest_path {
let _ = cargo.arg(format!("--manifest-path={}", manifest_path));
}
cargo
}
// https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/cargo/util/command_prelude.rs
trait ArgExt: Sized {
fn opt(name: &'static str, help: &'static str) -> Self;
fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str)
-> Self;
fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self;
fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2];
fn package(package: &'static str) -> Self;
fn jobs() -> Self;
fn release(release: &'static str) -> Self;
fn profile(profile: &'static str) -> Self;
fn features() -> [Self; 3];
fn target_triple(target: &'static str) -> Self;
fn target_dir() -> Self;
fn manifest_path() -> Self;
}
impl<'a, 'b> ArgExt for Arg<'a, 'b> {
fn opt(name: &'static str, help: &'static str) -> Self {
Arg::with_name(name).long(name).help(help)
}
fn optional_multi_opt(
name: &'static str, value_name: &'static str, help: &'static str,
) -> Self {
|
Self::opt(name, help)
.value_name(value_name)
.multiple(true)
.min_values(0)
.number_of_values(1)
}
f
|
identifier_body
|
|
cargo-deploy.rs
|
).into_iter() {
if let cargo_metadata::Message::CompilerArtifact(artifact) =
message.unwrap_or_else(|_| panic!("Failed to parse output of cargo"))
{
if artifact.target.kind == vec![String::from("bin")]
|| artifact.target.kind == vec![String::from("example")]
{
bin.push((
artifact.target.name,
artifact.filenames.into_iter().next().unwrap(),
));
// We're assuming the first filename is the binary – .dSYM etc seem to always be second?
}
}
}
if bin.len() > 1 {
let names = bin
.into_iter()
.map(|(target_name, _)| target_name)
.collect::<Vec<_>>();
println!(
"`cargo deploy` could not determine which binary to run. \
Use the `--bin` option to specify a binary.\n\
available binaries: {}",
names.join(", ")
); // , or the `default-run` manifest key // TODO: work out best way to get this / behave same as cargo run
process::exit(1);
} else if bin.is_empty() {
println!("a bin target must be available for `cargo deploy`");
process::exit(1);
}
let path = bin.into_iter().next().unwrap().1;
let args: Vec<OsString> = iter::once(OsString::from(&path))
.chain(forward_args.into_iter().map(ToOwned::to_owned))
.collect();
let vars: Vec<(OsString, OsString)> = env::vars_os().collect();
let format = Format::Human;
constellation::deploy(host, &path, format, args, vars);
}
fn cli<'a, 'b>() -> App<'a, 'b> {
// https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277
// https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs
App::new("cargo")
.bin_name("cargo")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::SubcommandRequired,
])
.arg(
Arg::opt(
"verbose",
"Use verbose output (-vv very verbose/build.rs output)",
)
.short("v")
.multiple(true)
.global(true),
)
.arg(
Arg::opt("color", "Coloring: auto, always, never")
.value_name("WHEN")
.global(true),
)
.arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
.arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true))
.arg(Arg::opt("offline", "Run without accessing the network").global(true))
.arg(
Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value")
.global(true)
.hidden(true),
)
.arg(
Arg::with_name("unstable-features")
.help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details")
.short("Z")
.value_name("FLAG")
.multiple(true)
.number_of_values(1)
.global(true),
)
.subcommand(
SubCommand::with_name("deploy")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::DontCollapseArgsInUsage,
AppSettings::TrailingVarArg,
])
.version(crate_version!())
.about("Run a binary or example of the local package on a constellation cluster")
// .arg(Arg::opt("quiet", "No output printed to stdout").short("q"))
.arg(
Arg::with_name("host")
.help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)")
.required(true)
.validator(|host| {
host.parse::<SocketAddr>()
.map(drop)
.map_err(|err| err.to_string())
}),
)
.arg(Arg::with_name("args").multiple(true))
.args(&Arg::targets_bin_example(
"Name of the bin target to run",
"Name of the example target to run",
))
.arg(Arg::package("Package with the target to run"))
.arg(Arg::jobs())
.arg(Arg::release(
"Build artifacts in release mode, with optimizations",
))
.arg(Arg::profile("Build artifacts with the specified profile"))
.args(&Arg::features())
.arg(Arg::target_triple("Build for the target triple"))
.arg(Arg::target_dir())
.arg(Arg::manifest_path())
// .arg(Arg::message_format())
.after_help(
"\
If neither `--bin` nor `--example` are given, then if the package only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run,
|
`--` go to the binary, the ones before go to Cargo.
",
),
)
}
fn cargo(args: &ArgMatches) -> process::Command {
let verbose: u64 = args.occurrences_of("verbose");
let color: Option<&str> = args.value_of("color");
let frozen: bool = args.is_present("frozen");
let locked: bool = args.is_present("locked");
let offline: bool = args.is_present("offline");
let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect();
let unstable_features: Vec<&OsStr> = args
.values_of_os("unstable-features")
.unwrap_or_default()
.collect();
let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect();
let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect();
let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect();
let jobs: Option<&str> = args.value_of("jobs");
let release: bool = args.is_present("release");
let profile: Option<&str> = args.value_of("profile");
let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect();
let all_features: bool = args.is_present("all-features");
let no_default_features: bool = args.is_present("no-default-features");
let target: Option<&str> = args.value_of("target");
let target_dir: Option<&str> = args.value_of("target-dir");
let manifest_path: Option<&str> = args.value_of("manifest-path");
// let mut args: Vec<String> = Vec::new();
let mut cargo = process::Command::new("cargo");
let _ = cargo.arg("build");
let _ = cargo.arg("--message-format=json");
if verbose > 0 {
let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap())));
}
if let Some(color) = color {
let _ = cargo.arg(format!("--color={}", color));
}
if frozen {
let _ = cargo.arg("--frozen");
}
if locked {
let _ = cargo.arg("--locked");
}
if offline {
let _ = cargo.arg("--offline");
}
for config in config {
let _ = cargo.arg(format!("--config={}", config));
}
for unstable_features in unstable_features {
let mut arg = OsString::from("-Z");
arg.push(unstable_features);
let _ = cargo.arg(arg);
}
for bin in bin {
let _ = cargo.arg(format!("--bin={}", bin));
}
for example in example {
let _ = cargo.arg(format!("--example={}", example));
}
for package in package {
let _ = cargo.arg(format!("--package={}", package));
}
if let Some(jobs) = jobs {
let _ = cargo.arg(format!("--jobs={}", jobs));
}
if release {
let _ = cargo.arg("--release");
}
if let Some(profile) = profile {
let _ = cargo.arg(format!("--profile={}", profile));
}
for features in features {
let _ = cargo.arg(format!("--features={}", features));
}
if all_features {
let _ = cargo.arg("--all-features");
}
if no_default_features {
let _ = cargo.arg("--no-default-features");
}
if let Some(target) = target {
let _ = cargo.arg(format!("--target={}", target));
}
if let Some(target_dir) = target_dir {
|
and `--example` specifies the example target to run. At most one of `--bin` or
`--example` can be provided.
All the arguments following the two dashes (`--`) are passed to the binary to
run. If you're passing arguments to both Cargo and the binary, the ones after
|
random_line_split
|
cargo-deploy.rs
|
command(
SubCommand::with_name("deploy")
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::DontCollapseArgsInUsage,
AppSettings::TrailingVarArg,
])
.version(crate_version!())
.about("Run a binary or example of the local package on a constellation cluster")
// .arg(Arg::opt("quiet", "No output printed to stdout").short("q"))
.arg(
Arg::with_name("host")
.help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)")
.required(true)
.validator(|host| {
host.parse::<SocketAddr>()
.map(drop)
.map_err(|err| err.to_string())
}),
)
.arg(Arg::with_name("args").multiple(true))
.args(&Arg::targets_bin_example(
"Name of the bin target to run",
"Name of the example target to run",
))
.arg(Arg::package("Package with the target to run"))
.arg(Arg::jobs())
.arg(Arg::release(
"Build artifacts in release mode, with optimizations",
))
.arg(Arg::profile("Build artifacts with the specified profile"))
.args(&Arg::features())
.arg(Arg::target_triple("Build for the target triple"))
.arg(Arg::target_dir())
.arg(Arg::manifest_path())
// .arg(Arg::message_format())
.after_help(
"\
If neither `--bin` nor `--example` are given, then if the package only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run,
and `--example` specifies the example target to run. At most one of `--bin` or
`--example` can be provided.
All the arguments following the two dashes (`--`) are passed to the binary to
run. If you're passing arguments to both Cargo and the binary, the ones after
`--` go to the binary, the ones before go to Cargo.
",
),
)
}
fn cargo(args: &ArgMatches) -> process::Command {
let verbose: u64 = args.occurrences_of("verbose");
let color: Option<&str> = args.value_of("color");
let frozen: bool = args.is_present("frozen");
let locked: bool = args.is_present("locked");
let offline: bool = args.is_present("offline");
let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect();
let unstable_features: Vec<&OsStr> = args
.values_of_os("unstable-features")
.unwrap_or_default()
.collect();
let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect();
let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect();
let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect();
let jobs: Option<&str> = args.value_of("jobs");
let release: bool = args.is_present("release");
let profile: Option<&str> = args.value_of("profile");
let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect();
let all_features: bool = args.is_present("all-features");
let no_default_features: bool = args.is_present("no-default-features");
let target: Option<&str> = args.value_of("target");
let target_dir: Option<&str> = args.value_of("target-dir");
let manifest_path: Option<&str> = args.value_of("manifest-path");
// let mut args: Vec<String> = Vec::new();
let mut cargo = process::Command::new("cargo");
let _ = cargo.arg("build");
let _ = cargo.arg("--message-format=json");
if verbose > 0 {
let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap())));
}
if let Some(color) = color {
let _ = cargo.arg(format!("--color={}", color));
}
if frozen {
let _ = cargo.arg("--frozen");
}
if locked {
let _ = cargo.arg("--locked");
}
if offline {
let _ = cargo.arg("--offline");
}
for config in config {
let _ = cargo.arg(format!("--config={}", config));
}
for unstable_features in unstable_features {
let mut arg = OsString::from("-Z");
arg.push(unstable_features);
let _ = cargo.arg(arg);
}
for bin in bin {
let _ = cargo.arg(format!("--bin={}", bin));
}
for example in example {
let _ = cargo.arg(format!("--example={}", example));
}
for package in package {
let _ = cargo.arg(format!("--package={}", package));
}
if let Some(jobs) = jobs {
let _ = cargo.arg(format!("--jobs={}", jobs));
}
if release {
let _ = cargo.arg("--release");
}
if let Some(profile) = profile {
let _ = cargo.arg(format!("--profile={}", profile));
}
for features in features {
let _ = cargo.arg(format!("--features={}", features));
}
if all_features {
let _ = cargo.arg("--all-features");
}
if no_default_features {
let _ = cargo.arg("--no-default-features");
}
if let Some(target) = target {
let _ = cargo.arg(format!("--target={}", target));
}
if let Some(target_dir) = target_dir {
let _ = cargo.arg(format!("--target-dir={}", target_dir));
}
if let Some(manifest_path) = manifest_path {
let _ = cargo.arg(format!("--manifest-path={}", manifest_path));
}
cargo
}
// https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/cargo/util/command_prelude.rs
trait ArgExt: Sized {
fn opt(name: &'static str, help: &'static str) -> Self;
fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str)
-> Self;
fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self;
fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2];
fn package(package: &'static str) -> Self;
fn jobs() -> Self;
fn release(release: &'static str) -> Self;
fn profile(profile: &'static str) -> Self;
fn features() -> [Self; 3];
fn target_triple(target: &'static str) -> Self;
fn target_dir() -> Self;
fn manifest_path() -> Self;
}
impl<'a, 'b> ArgExt for Arg<'a, 'b> {
fn opt(name: &'static str, help: &'static str) -> Self {
Arg::with_name(name).long(name).help(help)
}
fn optional_multi_opt(
name: &'static str, value_name: &'static str, help: &'static str,
) -> Self {
Self::opt(name, help)
.value_name(value_name)
.multiple(true)
.min_values(0)
.number_of_values(1)
}
fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self {
// Note that all `.multiple(true)` arguments in Cargo should specify
// `.number_of_values(1)` as well, so that `--foo val1 val2` is
// *not* parsed as `foo` with values ["val1", "val2"].
// `number_of_values` should become the default in clap 3.
Self::opt(name, help)
.value_name(value_name)
.multiple(true)
.number_of_values(1)
}
fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2] {
[
Self::optional_multi_opt("bin", "NAME", bin),
Self::optional_multi_opt("example", "NAME", example),
]
}
fn package(package: &'static str) -> Self {
Self::opt("package", package).short("p").value_name("SPEC")
}
fn jobs() -> Self {
Self::opt("jobs", "Number of parallel jobs, defaults to # of CPUs")
.short("j")
.value_name("N")
}
fn release(release: &'static str) -> Self {
Self::opt("release", release)
}
fn profile(profile: &'static str) -> Self {
Self::opt("profile", profile).value_name("PROFILE-NAME")
}
fn features() -> [Self; 3] {
[
Self::multi_opt(
"features",
"FEATURES",
"Space-separated list of features to activate",
),
Self::opt("all-features", "Activate all available features"),
Self::opt(
"no-default-features",
"Do not activate the `default` feature",
),
]
}
fn tar
|
get_triple(ta
|
identifier_name
|
|
generator.rs
|
if let Some(target_docs_dir) = target_docs_dir {
if !target_docs_dir.exists() {
fs::create_dir(&target_docs_dir)?;
}
transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir);
}
Ok(())
}
fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> {
let additional_include_dirs = opencv
.include_paths
.iter()
.map(|path| path.as_path())
.filter(|&include_path| include_path != opencv_header_dir)
.collect::<Vec<_>>();
let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR);
eprintln!("=== Clang: {}", gen.clang_version());
eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args());
let additional_include_dirs = Arc::new(
additional_include_dirs
.into_iter()
.map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string"))
.join(","),
);
let opencv_header_dir = Arc::new(opencv_header_dir.to_owned());
let job_server = build_job_server()?;
let mut join_handles = Vec::with_capacity(modules.len());
let start = Instant::now();
// todo use thread::scope when MSRV is 1.63
eprintln!("=== Generating {} modules", modules.len());
modules.iter().for_each(|module| {
let token = job_server.acquire().expect("Can't acquire token from job server");
let join_handle = thread::spawn({
let additional_include_dirs = Arc::clone(&additional_include_dirs);
let opencv_header_dir = Arc::clone(&opencv_header_dir);
let build_script_path = self.build_script_path.clone();
move || {
let module_start = Instant::now();
let mut bin_generator = Command::new(build_script_path);
bin_generator
.arg(&*opencv_header_dir)
.arg(&*SRC_CPP_DIR)
.arg(&*OUT_DIR)
.arg(module)
.arg(&*additional_include_dirs);
eprintln!("=== Running: {bin_generator:?}");
let res = bin_generator
.status()
.unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}"));
if !res.success() {
panic!("Failed to run the bindings generator for module: {module}");
}
eprintln!("=== Generated: {module} in {:?}", module_start.elapsed());
drop(token); // needed to move the token to the thread
}
});
join_handles.push(join_handle);
});
for join_handle in join_handles {
join_handle.join().expect("Generator process panicked");
}
eprintln!("=== Total binding generation time: {:?}", start.elapsed());
Ok(())
}
}
fn is_type_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type") // ends with ".type"
})
}
fn is_type_externs_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type.externs") // ends with ".type"
})
}
fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> {
let mut line = Vec::with_capacity(100);
while read.read_until(b'\n', &mut line)? != 0 {
write.write_all(indent.as_bytes())?;
write.write_all(&line)?;
line.clear();
}
Ok(())
}
fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> {
if !target_module_dir.exists() {
fs::create_dir(target_module_dir)?;
}
for path in files_with_extension(target_module_dir, "rs")? {
let _ = fs::remove_file(path);
}
fn write_has_module(mut write: impl Write, module: &str) -> Result<()> {
Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?)
}
fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()>
|
let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> {
if manual_dir.join(format!("{module}.rs")).exists() {
writeln!(file, "pub use crate::manual::{module}::*;")?;
Ok(true)
} else {
Ok(false)
}
};
let start = Instant::now();
let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?);
let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?);
writeln!(types_rs)?;
let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?);
writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?;
writeln!(sys_rs)?;
for module in modules {
// merge multiple *-type.cpp files into a single module_types.hpp
let module_cpp = OUT_DIR.join(format!("{module}.cpp"));
if module_cpp.is_file() {
let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp"));
let mut module_types_file = BufWriter::new(
OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(module_types_cpp)?,
);
let mut type_files = files_with_extension(&OUT_DIR, "cpp")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?;
let _ = fs::remove_file(entry);
}
}
// add module entry to hub.rs and move the module file into opencv/
write_has_module(&mut hub_rs, module)?;
write_module_include(&mut hub_rs, module)?;
let module_filename = format!("{module}.rs");
let module_src_file = OUT_DIR.join(&module_filename);
let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?);
// Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when
// module file is include!d (as opposed to connecting the module with `mod` from the parent module).
// The same doesn't apply to `sys` and `types` below because they don't contain top-level comments.
writeln!(module_rs, "pub mod {module} {{")?;
copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?;
add_manual(&mut module_rs, module)?;
writeln!(module_rs, "}}")?;
let _ = fs::remove_file(module_src_file);
// merge multiple *-.type.rs files into a single types.rs
let mut header_written = false;
let mut type_files = files_with_extension(&OUT_DIR, "rs")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 {
if !header_written {
write_has_module(&mut types_rs, module)?;
writeln!(types_rs, "mod {module}_types {{")?;
writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types
|
{
// Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well:
// https://github.com/twistedfall/opencv-rust/issues/418
// https://github.com/rust-lang/rust-analyzer/issues/11682
Ok(writeln!(
write,
r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"#
)?)
}
|
identifier_body
|
generator.rs
|
if let Some(target_docs_dir) = target_docs_dir {
if !target_docs_dir.exists() {
fs::create_dir(&target_docs_dir)?;
}
transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir);
}
Ok(())
}
fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> {
let additional_include_dirs = opencv
.include_paths
.iter()
.map(|path| path.as_path())
.filter(|&include_path| include_path != opencv_header_dir)
.collect::<Vec<_>>();
let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR);
eprintln!("=== Clang: {}", gen.clang_version());
eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args());
let additional_include_dirs = Arc::new(
additional_include_dirs
.into_iter()
.map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string"))
.join(","),
);
let opencv_header_dir = Arc::new(opencv_header_dir.to_owned());
let job_server = build_job_server()?;
let mut join_handles = Vec::with_capacity(modules.len());
let start = Instant::now();
// todo use thread::scope when MSRV is 1.63
eprintln!("=== Generating {} modules", modules.len());
modules.iter().for_each(|module| {
let token = job_server.acquire().expect("Can't acquire token from job server");
let join_handle = thread::spawn({
let additional_include_dirs = Arc::clone(&additional_include_dirs);
let opencv_header_dir = Arc::clone(&opencv_header_dir);
let build_script_path = self.build_script_path.clone();
move || {
let module_start = Instant::now();
let mut bin_generator = Command::new(build_script_path);
bin_generator
.arg(&*opencv_header_dir)
.arg(&*SRC_CPP_DIR)
.arg(&*OUT_DIR)
.arg(module)
.arg(&*additional_include_dirs);
eprintln!("=== Running: {bin_generator:?}");
let res = bin_generator
.status()
.unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}"));
if !res.success() {
panic!("Failed to run the bindings generator for module: {module}");
}
eprintln!("=== Generated: {module} in {:?}", module_start.elapsed());
drop(token); // needed to move the token to the thread
}
});
join_handles.push(join_handle);
});
for join_handle in join_handles {
join_handle.join().expect("Generator process panicked");
}
eprintln!("=== Total binding generation time: {:?}", start.elapsed());
Ok(())
}
}
fn is_type_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type") // ends with ".type"
})
}
fn is_type_externs_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type.externs") // ends with ".type"
})
}
fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> {
let mut line = Vec::with_capacity(100);
while read.read_until(b'\n', &mut line)? != 0 {
write.write_all(indent.as_bytes())?;
write.write_all(&line)?;
line.clear();
}
Ok(())
}
fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> {
if !target_module_dir.exists() {
fs::create_dir(target_module_dir)?;
}
for path in files_with_extension(target_module_dir, "rs")? {
let _ = fs::remove_file(path);
}
fn
|
(mut write: impl Write, module: &str) -> Result<()> {
Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?)
}
fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()> {
// Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well:
// https://github.com/twistedfall/opencv-rust/issues/418
// https://github.com/rust-lang/rust-analyzer/issues/11682
Ok(writeln!(
write,
r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"#
)?)
}
let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> {
if manual_dir.join(format!("{module}.rs")).exists() {
writeln!(file, "pub use crate::manual::{module}::*;")?;
Ok(true)
} else {
Ok(false)
}
};
let start = Instant::now();
let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?);
let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?);
writeln!(types_rs)?;
let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?);
writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?;
writeln!(sys_rs)?;
for module in modules {
// merge multiple *-type.cpp files into a single module_types.hpp
let module_cpp = OUT_DIR.join(format!("{module}.cpp"));
if module_cpp.is_file() {
let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp"));
let mut module_types_file = BufWriter::new(
OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(module_types_cpp)?,
);
let mut type_files = files_with_extension(&OUT_DIR, "cpp")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?;
let _ = fs::remove_file(entry);
}
}
// add module entry to hub.rs and move the module file into opencv/
write_has_module(&mut hub_rs, module)?;
write_module_include(&mut hub_rs, module)?;
let module_filename = format!("{module}.rs");
let module_src_file = OUT_DIR.join(&module_filename);
let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?);
// Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when
// module file is include!d (as opposed to connecting the module with `mod` from the parent module).
// The same doesn't apply to `sys` and `types` below because they don't contain top-level comments.
writeln!(module_rs, "pub mod {module} {{")?;
copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?;
add_manual(&mut module_rs, module)?;
writeln!(module_rs, "}}")?;
let _ = fs::remove_file(module_src_file);
// merge multiple *-.type.rs files into a single types.rs
let mut header_written = false;
let mut type_files = files_with_extension(&OUT_DIR, "rs")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 {
if !header_written {
write_has_module(&mut types_rs, module)?;
writeln!(types_rs, "mod {module}_types {{")?;
writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types,
|
write_has_module
|
identifier_name
|
generator.rs
|
if let Some(target_docs_dir) = target_docs_dir {
if !target_docs_dir.exists() {
fs::create_dir(&target_docs_dir)?;
}
transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir);
}
Ok(())
}
fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> {
let additional_include_dirs = opencv
.include_paths
.iter()
.map(|path| path.as_path())
.filter(|&include_path| include_path != opencv_header_dir)
.collect::<Vec<_>>();
let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR);
eprintln!("=== Clang: {}", gen.clang_version());
eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args());
let additional_include_dirs = Arc::new(
additional_include_dirs
.into_iter()
.map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string"))
.join(","),
);
let opencv_header_dir = Arc::new(opencv_header_dir.to_owned());
let job_server = build_job_server()?;
let mut join_handles = Vec::with_capacity(modules.len());
let start = Instant::now();
// todo use thread::scope when MSRV is 1.63
eprintln!("=== Generating {} modules", modules.len());
modules.iter().for_each(|module| {
let token = job_server.acquire().expect("Can't acquire token from job server");
let join_handle = thread::spawn({
let additional_include_dirs = Arc::clone(&additional_include_dirs);
let opencv_header_dir = Arc::clone(&opencv_header_dir);
let build_script_path = self.build_script_path.clone();
move || {
let module_start = Instant::now();
let mut bin_generator = Command::new(build_script_path);
bin_generator
.arg(&*opencv_header_dir)
.arg(&*SRC_CPP_DIR)
.arg(&*OUT_DIR)
.arg(module)
.arg(&*additional_include_dirs);
eprintln!("=== Running: {bin_generator:?}");
let res = bin_generator
.status()
.unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}"));
if !res.success() {
panic!("Failed to run the bindings generator for module: {module}");
}
eprintln!("=== Generated: {module} in {:?}", module_start.elapsed());
drop(token); // needed to move the token to the thread
}
});
join_handles.push(join_handle);
});
for join_handle in join_handles {
join_handle.join().expect("Generator process panicked");
}
eprintln!("=== Total binding generation time: {:?}", start.elapsed());
Ok(())
}
}
fn is_type_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type") // ends with ".type"
})
}
fn is_type_externs_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type.externs") // ends with ".type"
})
}
fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> {
let mut line = Vec::with_capacity(100);
while read.read_until(b'\n', &mut line)? != 0 {
write.write_all(indent.as_bytes())?;
write.write_all(&line)?;
line.clear();
}
Ok(())
}
fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> {
if !target_module_dir.exists() {
fs::create_dir(target_module_dir)?;
}
for path in files_with_extension(target_module_dir, "rs")? {
let _ = fs::remove_file(path);
}
fn write_has_module(mut write: impl Write, module: &str) -> Result<()> {
Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?)
}
fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()> {
// Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well:
// https://github.com/twistedfall/opencv-rust/issues/418
// https://github.com/rust-lang/rust-analyzer/issues/11682
Ok(writeln!(
write,
r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"#
)?)
}
let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> {
if manual_dir.join(format!("{module}.rs")).exists() {
writeln!(file, "pub use crate::manual::{module}::*;")?;
Ok(true)
} else {
Ok(false)
}
};
let start = Instant::now();
let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?);
let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?);
writeln!(types_rs)?;
let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?);
writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?;
writeln!(sys_rs)?;
for module in modules {
// merge multiple *-type.cpp files into a single module_types.hpp
let module_cpp = OUT_DIR.join(format!("{module}.cpp"));
if module_cpp.is_file()
|
// add module entry to hub.rs and move the module file into opencv/
write_has_module(&mut hub_rs, module)?;
write_module_include(&mut hub_rs, module)?;
let module_filename = format!("{module}.rs");
let module_src_file = OUT_DIR.join(&module_filename);
let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?);
// Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when
// module file is include!d (as opposed to connecting the module with `mod` from the parent module).
// The same doesn't apply to `sys` and `types` below because they don't contain top-level comments.
writeln!(module_rs, "pub mod {module} {{")?;
copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?;
add_manual(&mut module_rs, module)?;
writeln!(module_rs, "}}")?;
let _ = fs::remove_file(module_src_file);
// merge multiple *-.type.rs files into a single types.rs
let mut header_written = false;
let mut type_files = files_with_extension(&OUT_DIR, "rs")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 {
if !header_written {
write_has_module(&mut types_rs, module)?;
writeln!(types_rs, "mod {module}_types {{")?;
writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types
|
{
let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp"));
let mut module_types_file = BufWriter::new(
OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(module_types_cpp)?,
);
let mut type_files = files_with_extension(&OUT_DIR, "cpp")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?;
let _ = fs::remove_file(entry);
}
}
|
conditional_block
|
generator.rs
|
if let Some(target_docs_dir) = target_docs_dir {
if !target_docs_dir.exists() {
fs::create_dir(&target_docs_dir)?;
}
transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir);
}
Ok(())
}
fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> {
let additional_include_dirs = opencv
.include_paths
.iter()
.map(|path| path.as_path())
.filter(|&include_path| include_path != opencv_header_dir)
.collect::<Vec<_>>();
let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR);
eprintln!("=== Clang: {}", gen.clang_version());
eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args());
let additional_include_dirs = Arc::new(
additional_include_dirs
.into_iter()
.map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string"))
.join(","),
);
let opencv_header_dir = Arc::new(opencv_header_dir.to_owned());
let job_server = build_job_server()?;
let mut join_handles = Vec::with_capacity(modules.len());
let start = Instant::now();
// todo use thread::scope when MSRV is 1.63
eprintln!("=== Generating {} modules", modules.len());
modules.iter().for_each(|module| {
let token = job_server.acquire().expect("Can't acquire token from job server");
let join_handle = thread::spawn({
let additional_include_dirs = Arc::clone(&additional_include_dirs);
let opencv_header_dir = Arc::clone(&opencv_header_dir);
let build_script_path = self.build_script_path.clone();
move || {
let module_start = Instant::now();
let mut bin_generator = Command::new(build_script_path);
bin_generator
.arg(&*opencv_header_dir)
.arg(&*SRC_CPP_DIR)
.arg(&*OUT_DIR)
.arg(module)
.arg(&*additional_include_dirs);
eprintln!("=== Running: {bin_generator:?}");
let res = bin_generator
.status()
.unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}"));
if !res.success() {
panic!("Failed to run the bindings generator for module: {module}");
}
eprintln!("=== Generated: {module} in {:?}", module_start.elapsed());
drop(token); // needed to move the token to the thread
}
});
join_handles.push(join_handle);
});
for join_handle in join_handles {
join_handle.join().expect("Generator process panicked");
}
eprintln!("=== Total binding generation time: {:?}", start.elapsed());
Ok(())
}
}
fn is_type_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type") // ends with ".type"
})
}
fn is_type_externs_file(path: &Path, module: &str) -> bool {
path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| {
let mut stem_chars = stem.chars();
(&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits
matches!(stem_chars.next(), Some('-')) && // dash
module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name
matches!(stem_chars.next(), Some('-')) && // dash
stem.ends_with(".type.externs") // ends with ".type"
})
}
fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> {
let mut line = Vec::with_capacity(100);
while read.read_until(b'\n', &mut line)? != 0 {
write.write_all(indent.as_bytes())?;
write.write_all(&line)?;
line.clear();
}
Ok(())
}
fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> {
if !target_module_dir.exists() {
fs::create_dir(target_module_dir)?;
}
for path in files_with_extension(target_module_dir, "rs")? {
let _ = fs::remove_file(path);
}
fn write_has_module(mut write: impl Write, module: &str) -> Result<()> {
Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?)
}
fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()> {
// Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well:
// https://github.com/twistedfall/opencv-rust/issues/418
// https://github.com/rust-lang/rust-analyzer/issues/11682
Ok(writeln!(
write,
r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"#
)?)
}
let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> {
if manual_dir.join(format!("{module}.rs")).exists() {
writeln!(file, "pub use crate::manual::{module}::*;")?;
Ok(true)
} else {
Ok(false)
}
};
let start = Instant::now();
let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?);
let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?);
writeln!(types_rs)?;
let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?);
writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?;
writeln!(sys_rs)?;
for module in modules {
// merge multiple *-type.cpp files into a single module_types.hpp
let module_cpp = OUT_DIR.join(format!("{module}.cpp"));
if module_cpp.is_file() {
let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp"));
let mut module_types_file = BufWriter::new(
OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(module_types_cpp)?,
);
let mut type_files = files_with_extension(&OUT_DIR, "cpp")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
|
io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?;
let _ = fs::remove_file(entry);
}
}
// add module entry to hub.rs and move the module file into opencv/
write_has_module(&mut hub_rs, module)?;
write_module_include(&mut hub_rs, module)?;
let module_filename = format!("{module}.rs");
let module_src_file = OUT_DIR.join(&module_filename);
let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?);
// Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when
// module file is include!d (as opposed to connecting the module with `mod` from the parent module).
// The same doesn't apply to `sys` and `types` below because they don't contain top-level comments.
writeln!(module_rs, "pub mod {module} {{")?;
copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?;
add_manual(&mut module_rs, module)?;
writeln!(module_rs, "}}")?;
let _ = fs::remove_file(module_src_file);
// merge multiple *-.type.rs files into a single types.rs
let mut header_written = false;
let mut type_files = files_with_extension(&OUT_DIR, "rs")?
.filter(|f| is_type_file(f, module))
.collect::<Vec<_>>();
type_files.sort_unstable();
for entry in type_files {
if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 {
if !header_written {
write_has_module(&mut types_rs, module)?;
writeln!(types_rs, "mod {module}_types {{")?;
writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types, sys
|
random_line_split
|
|
Mission_Util_V01.py
|
", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"]
if (self.iden.upper() in illegalFileNames):
showinfo(title="Error", message="Illegal file name.")
return False
for chara in illegalFileNameChars:
if chara in self.iden:
showinfo(title="Error", message="Illegal character in mission ID.")
return False
# next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets
if not self.time_limit.isnumeric():
showinfo(title="Error", message="Illegal character in Time Limit.")
return False
if not self.strikes.isnumeric():
showinfo(title="Error", message="Illegal character in Strikes.")
return False
if not self.needy_activation_time.isnumeric():
showinfo(title="Error", message="Illegal character in Needy Activation Time.")
return False
if not self.widgets.isnumeric():
showinfo(title="Error", message="Illegal character in Widgets.")
return False
# TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable
# currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity
return True
class Gui(tk.Tk):
# currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI
def __init__(self):
super().__init__()
self.title("Mission Asset Utility")
self.geometry('900x750')
iden = tk.StringVar()
name = tk.StringVar()
description = tk.StringVar()
time_limit = tk.StringVar()
strikes = tk.StringVar()
needy_activation_time = tk.StringVar()
self.front_only = tk.StringVar(value=0)
widgets = tk.StringVar()
#modules = tk.StringVar()
separator = tk.StringVar()
self.pacing = tk.StringVar(value=0)
# using tk's grid functionality as it's very nice compared to other options
# column 0 is for names while 1 is for inputs
# row 8 is where module list goes
self.columnconfigure(0, weight = 0)
self.columnconfigure(1, weight = 20)
self.rowconfigure(8, weight = 5)
# sticky="WE" means it fills it's whole grid location left to right (west to east)
iden_label = ttk.Label(self, text="Mission ID:")
iden_label.grid(column=0, row=0, padx=10, pady=5)
self.iden_box = ttk.Entry(self, textvariable=iden)
self.iden_box.grid(column=1, row=0, sticky="WE", padx=10, pady=5)
name_label = ttk.Label(self, text="Name:")
name_label.grid(column=0, row=1, padx=10, pady=5)
self.name_box = ttk.Entry(self, textvariable=name)
self.name_box.grid(column=1, row=1, sticky="WE", padx=10, pady=5)
description_label = ttk.Label(self, text="Description:")
description_label.grid(column=0, row=2, padx=10, pady=5)
self.description_box = ttk.Entry(self, textvariable=description)
self.description_box.grid(column=1, row=2, sticky="WE", padx=10, pady=5)
time_limit_label = ttk.Label(self, text="Time Limit:")
time_limit_label.grid(column=0, row=3, padx=10, pady=5)
self.time_limit_box = ttk.Entry(self, textvariable=time_limit)
self.time_limit_box.grid(column=1, row=3, sticky="WE", padx=10, pady=5)
strikes_label = ttk.Label(self, text="Strikes:")
strikes_label.grid(column=0, row=4, padx=10, pady=5)
self.strikes_box = ttk.Entry(self, textvariable=strikes)
self.strikes_box.grid(column=1, row=4, sticky="WE", padx=10, pady=5)
needy_activation_time_label = ttk.Label(self, text="Needy Activation Time:")
needy_activation_time_label.grid(column=0, row=5, padx=10, pady=5)
self.needy_activation_time_box = ttk.Entry(self, textvariable=needy_activation_time)
self.needy_activation_time_box.grid(column=1, row=5, sticky="WE", padx=10, pady=5)
widgets_label = ttk.Label(self, text="Widget Amount:")
widgets_label.grid(column=0, row=6, padx=10, pady=5)
self.widgets_box = ttk.Entry(self, textvariable=widgets)
self.widgets_box.grid(column=1, row=6, sticky="WE", padx=10, pady=5)
# TODO fix spaghetti code that was made in an attempt to make these look nice, don't ask what I was going for
front_only_check = ttk.Checkbutton(self, text="Front Only", variable=self.front_only, onvalue=1, offvalue=0).grid(column=1, row=7, padx=10, pady=10)
pacing_check = ttk.Checkbutton(self, text="Pacing Events", variable=self.pacing, onvalue=1, offvalue=0).grid(column=1, row=7, sticky="E", padx=10, pady=10, ipadx=100)
# ScrolledText comes with a scrollbar, but it's only for up and down, so I add a Scrollbar to go left and right in case you are using spaces/tabs to separate
modules_label = ttk.Label(self, text="Module List:")
modules_label.grid(column=0, row=8, padx=10, pady=5)
self.modules_box = ScrolledText(self, width=10, height=10, wrap=tk.NONE)
self.modules_box.grid(column=1, row=8, sticky="NSEW", padx=10, pady=0)
modules_scrollbar = ttk.Scrollbar(self, orient='horizontal', command=self.modules_box.xview)
modules_scrollbar.grid(column=1, row=9, sticky="EW", padx=10)
self.modules_box["xscrollcommand"] = modules_scrollbar.set
# TODO make the sheet usable *before* you select this, no idea why this happens
separator_label = ttk.Label(self, text="Separator:")
separator_label.grid(column=0, row=10, padx=10, pady=5)
separator_box = ttk.Combobox(self, textvariable=separator)
separator_box['values'] = ["newlines", "spaces", "tabs"]
separator_box['state'] = 'readonly'
separator_box.grid(column=1, row=10, sticky="W", padx=10, pady=5)
open_dmg_button = ttk.Button(self, text = "Open DMG mission", command = self.parse_dmg).grid(column=1, row=11, padx=10, pady=5)
# runs createMission() with all of the info in the other boxes when pressed
enter_button = ttk.Button(self, text="Create Asset File", command=lambda: self.createMission(iden.get(), name.get(), description.get(), time_limit.get(), strikes.get(), needy_activation_time.get(), self.front_only.get(), widgets.get(), self.modules_box.get("1.0", tk.END), separator.get(), self.pacing.get())).grid(column=1, row=12, padx=10, pady=5)
# shows you the defaults and some important notes before you start
showinfo(title="Info", message="Defaults:\n ID: mission\n Name: Mission\n Description: a mission\n Time Limit: 300\n Strikes: 3\n Needy Activation Time: 90\n Widgets: 5\n\nNote: All times are in seconds.\n\nThe module list should use the module ID's, which can be found at ktane.timwi.de")
# this is important for the tk gui, I imagine it just runs a constant update
self.mainloop()
#Select DMG mission file and parse its values into the GUI inputs
def parse_dmg(self):
default_values = AssetFile()
#Select and open file
filename = askopenfilename(title = "Select DMG mission")
default_values.iden = path.basename(filename).split(".")[0] #Set iden to be the name of the selected file
lines = []
try:
with open(filename, "r") as mission_file:
lines = mission_file.readlines()
except Exception as e:
print(format_exc())
showerror(title="Mission read error", message="Couldn't read the selected file. Are you sure it's a DMG mission? ({})".format(type(e).__name__))
return
#For each line, detect which property of a mission it matches, and change the value of the mission according to that
for line in lines:
line = line.strip()
skip = False
for pattern in DMG_IGNORE:
if line.startswith(pattern):
|
skip = True
break
|
conditional_block
|
|
Mission_Util_V01.py
|
(self):
self.iden = "mission"
self.name = "Mission"
self.description = "a mission"
self.time_limit = "300"
self.strikes = "3"
self.needy_activation_time = "90"
self.front_only = 0
self.widgets = "5"
self.modules = ""
self.separator = "\n"
self.pacing = 1
# ran when pressing the create button, returns true or false based on sanity()
def enter(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing):
# takes all of the inputted info and puts it into AssetFile's variables if they weren't left blank
self.iden = iden.strip() if iden.strip() != "" else self.iden
self.name = name.strip() if name.strip() != "" else self.name
self.description = description.strip() if "\'{}\'".format(description.strip()) != "" else self.description
self.time_limit = time_limit.strip() if time_limit.strip() != "" else self.time_limit
self.strikes = strikes.strip() if strikes.strip() != "" else self.strikes
self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time
self.front_only = front_only
self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets
self.modules = modules.strip() if modules.strip() != "" else self.modules
switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"}
self.separator = switcher.get(separator, "\n")
self.pacing = pacing
# stops the process if any of the variables don't pass the sanity check
if not self.sanity():
return False
# string of pain, the whole asset file before the modlist is written here
retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets)
modlist = self.modules.split(self.separator)
for i in range(len(modlist)):
count = 1 # how many of current module there is
vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded
component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes
modstring = " []" # this goes under ModTypes, default without mods is here
# this counts modules if more than one
# example 2*module would set count to 2 and then continue with everything after 2*
if "*" in modlist[i]:
ind = modlist[i].index("*")
count = modlist[i][:ind]
modlist[i] = modlist[i][ind+1:]
# this removes the formatting of [] around pools
if "[" in modlist[i]:
modlist[i] = modlist[i][1:-1]
# even non-pooled modules are treated like a pool with one module, since they are formatted the same
pooled = modlist[i].split(",")
for module in pooled:
# vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string
if module in VANILLAS:
temp = str(hex(VANILLAS.index(module)))[2:]
component += "0{}000000".format(temp)
else:
vann = 0 # set to zero because if there are mods, the default of " []" is wrong
modstring += "\n - {}".format(module)
# removes default " []"
if vann == 0:
modstring = modstring[3:]
# finally adds the count, compenent, and modstring to what goes in the file below the string of pain
retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring)
retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason
retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging
# creates a .asset file with the name of the ID, since thats what Unity does as well
f = open("{}.asset".format(self.iden), "w")
f.write(retstring)
f.close()
#print(retstring)
return True # returns True to show it passes the sanity check, important for createMission() in the Gui class
# makes sure Unity nor your OS gets mad at you for your asset file, runs within enter()
def sanity(self):
# these first two are illegal characters in Win/Mac files as well as reserved filenames in Win
illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."]
illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"]
if (self.iden.upper() in illegalFileNames):
showinfo(title="Error", message="Illegal file name.")
return False
for chara in illegalFileNameChars:
if chara in self.iden:
showinfo(title="Error", message="Illegal character in mission ID.")
return False
# next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets
if not self.time_limit.isnumeric():
showinfo(title="Error", message="Illegal character in Time Limit.")
return False
if not self.strikes.isnumeric():
showinfo(title="Error", message="Illegal character in Strikes.")
return False
if not self.needy_activation_time.isnumeric():
showinfo(title="Error", message="Illegal character in Needy Activation Time.")
return False
if not self.widgets.isnumeric():
showinfo(title="Error", message="Illegal character in Widgets.")
return False
# TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable
# currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity
return True
class Gui(tk.Tk):
# currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI
def __init__(self):
super().__init__()
self.title("Mission Asset Utility")
self.geometry('900x750')
iden = tk.StringVar()
name = tk.StringVar()
description = tk.StringVar()
time_limit = tk.StringVar()
strikes = tk.StringVar()
needy_activation_time = tk.StringVar()
self.front_only = tk.StringVar(value=0)
widgets = tk.StringVar()
#modules = tk.StringVar()
separator = tk.StringVar()
self.pacing = tk.StringVar(value=0)
# using tk's grid functionality as it's very nice compared to other options
# column 0 is for names while 1 is for inputs
# row 8 is where module list goes
self.columnconfigure(0, weight = 0)
self.columnconfigure(1, weight = 20)
self.rowconfigure(8, weight = 5)
# sticky="WE" means it fills it's whole grid location left to right (west to east)
iden_label = ttk.Label(self, text="Mission ID:")
iden_label.grid(column=0, row=0, padx=10, pady=5)
self.iden_box = ttk.Entry(self,
|
__init__
|
identifier_name
|
|
Mission_Util_V01.py
|
.strikes
self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time
self.front_only = front_only
self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets
self.modules = modules.strip() if modules.strip() != "" else self.modules
switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"}
self.separator = switcher.get(separator, "\n")
self.pacing = pacing
# stops the process if any of the variables don't pass the sanity check
if not self.sanity():
return False
# string of pain, the whole asset file before the modlist is written here
retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets)
modlist = self.modules.split(self.separator)
for i in range(len(modlist)):
count = 1 # how many of current module there is
vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded
component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes
modstring = " []" # this goes under ModTypes, default without mods is here
# this counts modules if more than one
# example 2*module would set count to 2 and then continue with everything after 2*
if "*" in modlist[i]:
ind = modlist[i].index("*")
count = modlist[i][:ind]
modlist[i] = modlist[i][ind+1:]
# this removes the formatting of [] around pools
if "[" in modlist[i]:
modlist[i] = modlist[i][1:-1]
# even non-pooled modules are treated like a pool with one module, since they are formatted the same
pooled = modlist[i].split(",")
for module in pooled:
# vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string
if module in VANILLAS:
temp = str(hex(VANILLAS.index(module)))[2:]
component += "0{}000000".format(temp)
else:
vann = 0 # set to zero because if there are mods, the default of " []" is wrong
modstring += "\n - {}".format(module)
# removes default " []"
if vann == 0:
modstring = modstring[3:]
# finally adds the count, compenent, and modstring to what goes in the file below the string of pain
retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring)
retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason
retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging
# creates a .asset file with the name of the ID, since thats what Unity does as well
f = open("{}.asset".format(self.iden), "w")
f.write(retstring)
f.close()
#print(retstring)
return True # returns True to show it passes the sanity check, important for createMission() in the Gui class
# makes sure Unity nor your OS gets mad at you for your asset file, runs within enter()
def sanity(self):
# these first two are illegal characters in Win/Mac files as well as reserved filenames in Win
illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."]
illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"]
if (self.iden.upper() in illegalFileNames):
showinfo(title="Error", message="Illegal file name.")
return False
for chara in illegalFileNameChars:
if chara in self.iden:
showinfo(title="Error", message="Illegal character in mission ID.")
return False
# next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets
if not self.time_limit.isnumeric():
showinfo(title="Error", message="Illegal character in Time Limit.")
return False
if not self.strikes.isnumeric():
showinfo(title="Error", message="Illegal character in Strikes.")
return False
if not self.needy_activation_time.isnumeric():
showinfo(title="Error", message="Illegal character in Needy Activation Time.")
return False
if not self.widgets.isnumeric():
|
showinfo(title="Error", message="Illegal character in Widgets.")
return False
# TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable
# currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity
return True
class Gui(tk.Tk):
# currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI
def __init__(self):
super().__init__()
self.title("Mission Asset Utility")
self.geometry('900x750')
iden = tk.StringVar()
name = tk.StringVar()
description = tk.StringVar()
time_limit = tk.StringVar()
strikes = tk.StringVar()
needy_activation_time = tk.StringVar()
self.front_only = tk.StringVar(value=0)
widgets = tk.StringVar()
#modules = tk.StringVar()
separator = tk.StringVar()
self.pacing = tk.StringVar(value=0)
# using tk's grid functionality as it's very nice compared to other options
# column 0 is for names while 1 is for inputs
# row 8 is where module list goes
self.columnconfigure(0, weight = 0)
self.columnconfigure(1, weight = 20)
self.rowconfigure(8, weight = 5)
# sticky="WE" means it fills it's whole grid location left to right (west to east)
iden_label = ttk.Label(self, text="Mission ID:")
iden_label.grid(column=0, row=0, padx=10, pady=5)
self.iden_box = ttk.Entry(self, textvariable=iden)
self.iden_box.grid(column=1, row=0, sticky="WE", padx=10, pady=5)
name_label = ttk.Label(self, text="Name:")
name_label.grid(column=0, row=1, padx=10, pady=5)
self.name_box = ttk.Entry(self, textvariable=name)
self.name_box.grid(column=1, row=1, sticky="WE", padx=10, pady=5)
description_label = ttk.Label(self, text="Description:")
description_label.grid(column=0, row=2, padx=10, pady=5)
self.description_box = ttk.Entry(self, textvariable=description)
self.description_box.grid(column=1, row=2, sticky="WE", padx=10, pady=5)
time_limit_label = ttk.Label(self, text="Time Limit:")
time_limit_label.grid(column=0, row=3, padx=10, pady=5)
self.time_limit_box = ttk.Entry(self, textvariable=time_limit)
self.time_limit_box.grid(column=1, row=3, sticky="WE", padx=10, pady=5)
strikes_label =
|
random_line_split
|
|
Mission_Util_V01.py
|
class AssetFile:
# default settings/variable init
def __init__(self):
self.iden = "mission"
self.name = "Mission"
self.description = "a mission"
self.time_limit = "300"
self.strikes = "3"
self.needy_activation_time = "90"
self.front_only = 0
self.widgets = "5"
self.modules = ""
self.separator = "\n"
self.pacing = 1
# ran when pressing the create button, returns true or false based on sanity()
def enter(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing):
# takes all of the inputted info and puts it into AssetFile's variables if they weren't left blank
self.iden = iden.strip() if iden.strip() != "" else self.iden
self.name = name.strip() if name.strip() != "" else self.name
self.description = description.strip() if "\'{}\'".format(description.strip()) != "" else self.description
self.time_limit = time_limit.strip() if time_limit.strip() != "" else self.time_limit
self.strikes = strikes.strip() if strikes.strip() != "" else self.strikes
self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time
self.front_only = front_only
self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets
self.modules = modules.strip() if modules.strip() != "" else self.modules
switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"}
self.separator = switcher.get(separator, "\n")
self.pacing = pacing
# stops the process if any of the variables don't pass the sanity check
if not self.sanity():
return False
# string of pain, the whole asset file before the modlist is written here
retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets)
modlist = self.modules.split(self.separator)
for i in range(len(modlist)):
count = 1 # how many of current module there is
vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded
component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes
modstring = " []" # this goes under ModTypes, default without mods is here
# this counts modules if more than one
# example 2*module would set count to 2 and then continue with everything after 2*
if "*" in modlist[i]:
ind = modlist[i].index("*")
count = modlist[i][:ind]
modlist[i] = modlist[i][ind+1:]
# this removes the formatting of [] around pools
if "[" in modlist[i]:
modlist[i] = modlist[i][1:-1]
# even non-pooled modules are treated like a pool with one module, since they are formatted the same
pooled = modlist[i].split(",")
for module in pooled:
# vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string
if module in VANILLAS:
temp = str(hex(VANILLAS.index(module)))[2:]
component += "0{}000000".format(temp)
else:
vann = 0 # set to zero because if there are mods, the default of " []" is wrong
modstring += "\n - {}".format(module)
# removes default " []"
if vann == 0:
modstring = modstring[3:]
# finally adds the count, compenent, and modstring to what goes in the file below the string of pain
retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring)
retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason
retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging
# creates a .asset file with the name of the ID, since thats what Unity does as well
f = open("{}.asset".format(self.iden), "w")
f.write(retstring)
f.close()
#print(retstring)
return True # returns True to show it passes the sanity check, important for createMission() in the Gui class
# makes sure Unity nor your OS gets mad at you for your asset file, runs within enter()
def sanity(self):
# these first two are illegal characters in Win/Mac files as well as reserved filenames in Win
illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."]
illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"]
if (self.iden.upper() in illegalFileNames):
showinfo(title="Error", message="Illegal file name.")
return False
for chara in illegalFileNameChars:
if chara in self.iden:
showinfo(title="Error", message="Illegal character in mission ID.")
return False
# next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets
if not self.time_limit.isnumeric():
showinfo(title="Error", message="Illegal character in Time Limit.")
return False
if not self.strikes.isnumeric():
showinfo(title="Error", message="Illegal character in Strikes.")
return False
if not self.needy_activation_time.isnumeric():
showinfo(title="Error", message="Illegal character in Needy Activation Time.")
return False
if not self.widgets.isnumeric():
showinfo(title="Error", message="Illegal character in Widgets.")
return False
# TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable
# currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity
return True
class Gui(tk.Tk):
# currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI
def __init__(self):
super().__init__()
self.title("Mission Asset Utility")
self.geometry('900x750')
iden = tk.StringVar()
name = tk.StringVar()
description = tk.StringVar()
time_limit = tk.StringVar()
strikes = tk.StringVar()
needy_activation_time = tk.StringVar()
self.front_only = tk.StringVar(value=0)
widgets = tk.StringVar()
#modules = tk.StringVar()
separator = tk.StringVar()
self.pacing = tk.StringVar(value=0)
# using tk's grid functionality as it's very nice compared to other options
# column 0 is for names while 1 is for inputs
# row 8 is where module list goes
self.columnconfigure(0, weight = 0)
self.columnconfigure(1, weight = 20)
self.rowconfigure(
|
try: #ScrolledText
widget.delete("1.0", tk.END)
widget.insert("1.0", text)
except tk.TclError: #Bad entry index - Entry
widget.delete(0, tk.END)
widget.insert(0, text)
|
identifier_body
|
|
flood_order.rs
|
,
"Tool run with no parameters.",
));
}
for i in 0..args.len() {
let mut arg = args[i].replace("\"", "");
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
if vec[0].to_lowercase() == "-i"
|| vec[0].to_lowercase() == "--input"
|| vec[0].to_lowercase() == "--dem"
{
if keyval {
input_file = vec[1].to_string();
} else {
input_file = args[i + 1].to_string();
}
} else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" {
if keyval {
output_file = vec[1].to_string();
} else {
output_file = args[i + 1].to_string();
}
}
}
if verbose {
let tool_name = self.get_tool_name();
let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28);
// 28 = length of the 'Powered by' by statement.
println!("{}", "*".repeat(welcome_len));
println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len()));
println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28));
println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23));
println!("{}", "*".repeat(welcome_len));
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
if !input_file.contains(&sep) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
if verbose {
println!("Reading data...")
};
let input = Raster::new(&input_file, "r")?;
let start = Instant::now();
let rows = input.configs.rows as isize;
let columns = input.configs.columns as isize;
let num_cells = rows * columns;
let nodata = input.configs.nodata;
// let min_val = input.configs.minimum;
// let elev_digits = ((input.configs.maximum - min_val) as i64).to_string().len();
// let elev_multiplier = 10.0_f64.powi((7 - elev_digits) as i32);
// let mut small_num = 0f64; //1.0 / elev_multiplier as f64;
let background_val = (i32::min_value() + 1) as f64;
let mut filled_dem: Array2D<f64> = Array2D::new(rows, columns, background_val, nodata)?;
let mut output = Raster::initialize_using_file(&output_file, &input);
/*
Find the data edges. This is complicated by the fact that DEMs frequently
have nodata edges, whereby the DEM does not occupy the full extent of
the raster. One approach to doing this would be simply to scan the
raster, looking for cells that neighbour nodata values. However, this
assumes that there are no interior nodata holes in the dataset. Instead,
the approach used here is to perform a region-growing operation, looking
for nodata values along the raster's edges.
*/
let mut queue: VecDeque<(isize, isize)> =
VecDeque::with_capacity((rows * columns) as usize);
for row in 0..rows {
/*
Note that this is only possible because Whitebox rasters
allow you to address cells beyond the raster extent but
return the nodata value for these regions.
*/
queue.push_back((row, -1));
queue.push_back((row, columns));
}
for col in 0..columns {
queue.push_back((-1, col));
queue.push_back((rows, col));
}
/*
minheap is the priority queue. Note that I've tested using integer-based
priority values, by multiplying the elevations, but this didn't result
in a significant performance gain over the use of f64s.
*/
let mut minheap = BinaryHeap::with_capacity((rows * columns) as usize);
let mut num_solved_cells = 0;
let mut zin_n: f64; // value of neighbour of row, col in input raster
let mut zout: f64; // value of row, col in output raster
let mut zout_n: f64; // value of neighbour of row, col in output raster
let dx = [1, 1, 1, 0, -1, -1, -1, 0];
let dy = [-1, 0, 1, 1, 1, 0, -1, -1];
let (mut row, mut col): (isize, isize);
let (mut row_n, mut col_n): (isize, isize);
while !queue.is_empty() {
let cell = queue.pop_front().unwrap();
row = cell.0;
col = cell.1;
for n in 0..8 {
row_n = row + dy[n];
col_n = col + dx[n];
zin_n = input[(row_n, col_n)];
zout_n = filled_dem[(row_n, col_n)];
if zout_n == background_val {
if zin_n == nodata {
filled_dem[(row_n, col_n)] = nodata;
output[(row_n, col_n)] = nodata;
queue.push_back((row_n, col_n));
} else {
filled_dem[(row_n, col_n)] = zin_n;
// Push it onto the priority queue for the priority flood operation
minheap.push(GridCell {
row: row_n,
column: col_n,
priority: zin_n,
});
}
num_solved_cells += 1;
}
}
if verbose {
progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize;
if progress != old_progress {
println!("progress: {}%", progress);
old_progress = progress;
}
}
}
// Perform the priority flood operation.
let mut order_val = 1f64;
while !minheap.is_empty() {
let cell = minheap.pop().expect("Error during pop operation.");
row = cell.row;
col = cell.column;
zout = filled_dem[(row, col)];
output[(row, col)] = order_val;
order_val += 1f64;
for n in 0..8 {
row_n = row + dy[n];
col_n = col + dx[n];
zout_n = filled_dem[(row_n, col_n)];
if zout_n == background_val {
zin_n = input[(row_n, col_n)];
if zin_n != nodata {
if zin_n < zout {
zin_n = zout;
} // We're in a depression. Raise the elevation.
filled_dem[(row_n, col_n)] = zin_n;
minheap.push(GridCell {
row: row_n,
column: col_n,
priority: zin_n,
});
} else {
// Interior nodata cells are still treated as nodata and are not filled.
output[(row_n, col_n)] = nodata;
num_solved_cells += 1;
}
}
}
if verbose {
num_solved_cells += 1;
progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize;
if progress != old_progress {
println!("Progress: {}%", progress);
old_progress = progress;
}
}
}
let elapsed_time = get_formatted_elapsed_time(start);
output.add_metadata_entry(format!(
"Created by whitebox_tools\' {} tool",
self.get_tool_name()
));
output.add_metadata_entry(format!("Input file: {}", input_file));
output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time));
if verbose {
println!("Saving data...")
};
let _ = match output.write() {
Ok(_) => {
if verbose {
println!("Output file written")
}
}
Err(e) => return Err(e),
};
if verbose {
println!(
"{}",
&format!("Elapsed Time (excluding I/O): {}", elapsed_time)
);
}
Ok(())
}
}
#[derive(PartialEq, Debug)]
struct GridCell {
row: isize,
column: isize,
// priority: usize,
priority: f64,
}
impl Eq for GridCell {}
impl PartialOrd for GridCell {
fn
|
partial_cmp
|
identifier_name
|
|
flood_order.rs
|
sep);
FloodOrder {
name: name,
description: description,
toolbox: toolbox,
parameters: parameters,
example_usage: usage,
}
}
}
impl WhiteboxTool for FloodOrder {
fn get_source_file(&self) -> String {
String::from(file!())
}
fn get_tool_name(&self) -> String {
self.name.clone()
}
fn get_tool_description(&self) -> String {
self.description.clone()
}
fn get_tool_parameters(&self) -> String {
match serde_json::to_string(&self.parameters) {
Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str),
Err(err) => return format!("{:?}", err),
}
}
fn get_example_usage(&self) -> String {
self.example_usage.clone()
}
fn get_toolbox(&self) -> String {
self.toolbox.clone()
}
fn run<'a>(
&self,
args: Vec<String>,
working_directory: &'a str,
verbose: bool,
) -> Result<(), Error> {
let mut input_file = String::new();
let mut output_file = String::new();
if args.len() == 0 {
return Err(Error::new(
ErrorKind::InvalidInput,
"Tool run with no parameters.",
));
}
for i in 0..args.len() {
let mut arg = args[i].replace("\"", "");
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
if vec[0].to_lowercase() == "-i"
|| vec[0].to_lowercase() == "--input"
|| vec[0].to_lowercase() == "--dem"
{
if keyval {
input_file = vec[1].to_string();
} else {
input_file = args[i + 1].to_string();
}
} else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" {
if keyval {
output_file = vec[1].to_string();
} else {
output_file = args[i + 1].to_string();
}
}
}
if verbose {
let tool_name = self.get_tool_name();
let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28);
// 28 = length of the 'Powered by' by statement.
println!("{}", "*".repeat(welcome_len));
println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len()));
println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28));
println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23));
println!("{}", "*".repeat(welcome_len));
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
if !input_file.contains(&sep) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
if verbose {
println!("Reading data...")
};
let input = Raster::new(&input_file, "r")?;
let start = Instant::now();
let rows = input.configs.rows as isize;
let columns = input.configs.columns as isize;
let num_cells = rows * columns;
let nodata = input.configs.nodata;
// let min_val = input.configs.minimum;
// let elev_digits = ((input.configs.maximum - min_val) as i64).to_string().len();
// let elev_multiplier = 10.0_f64.powi((7 - elev_digits) as i32);
// let mut small_num = 0f64; //1.0 / elev_multiplier as f64;
let background_val = (i32::min_value() + 1) as f64;
let mut filled_dem: Array2D<f64> = Array2D::new(rows, columns, background_val, nodata)?;
let mut output = Raster::initialize_using_file(&output_file, &input);
/*
Find the data edges. This is complicated by the fact that DEMs frequently
have nodata edges, whereby the DEM does not occupy the full extent of
the raster. One approach to doing this would be simply to scan the
raster, looking for cells that neighbour nodata values. However, this
assumes that there are no interior nodata holes in the dataset. Instead,
the approach used here is to perform a region-growing operation, looking
for nodata values along the raster's edges.
*/
let mut queue: VecDeque<(isize, isize)> =
VecDeque::with_capacity((rows * columns) as usize);
for row in 0..rows {
/*
Note that this is only possible because Whitebox rasters
allow you to address cells beyond the raster extent but
return the nodata value for these regions.
*/
queue.push_back((row, -1));
queue.push_back((row, columns));
}
for col in 0..columns {
queue.push_back((-1, col));
queue.push_back((rows, col));
}
/*
minheap is the priority queue. Note that I've tested using integer-based
priority values, by multiplying the elevations, but this didn't result
in a significant performance gain over the use of f64s.
*/
let mut minheap = BinaryHeap::with_capacity((rows * columns) as usize);
let mut num_solved_cells = 0;
let mut zin_n: f64; // value of neighbour of row, col in input raster
let mut zout: f64; // value of row, col in output raster
let mut zout_n: f64; // value of neighbour of row, col in output raster
let dx = [1, 1, 1, 0, -1, -1, -1, 0];
let dy = [-1, 0, 1, 1, 1, 0, -1, -1];
let (mut row, mut col): (isize, isize);
let (mut row_n, mut col_n): (isize, isize);
while !queue.is_empty() {
let cell = queue.pop_front().unwrap();
row = cell.0;
col = cell.1;
for n in 0..8 {
row_n = row + dy[n];
col_n = col + dx[n];
zin_n = input[(row_n, col_n)];
zout_n = filled_dem[(row_n, col_n)];
if zout_n == background_val {
if zin_n == nodata {
filled_dem[(row_n, col_n)] = nodata;
output[(row_n, col_n)] = nodata;
queue.push_back((row_n, col_n));
} else {
filled_dem[(row_n, col_n)] = zin_n;
// Push it onto the priority queue for the priority flood operation
minheap.push(GridCell {
row: row_n,
column: col_n,
priority: zin_n,
});
}
num_solved_cells += 1;
}
}
if verbose {
progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize;
if progress != old_progress {
println!("progress: {}%", progress);
old_progress = progress;
}
}
}
// Perform the priority flood operation.
let mut order_val = 1f64;
while !minheap.is_empty() {
let cell = minheap.pop().expect("Error during pop operation.");
row = cell.row;
col = cell.column;
zout = filled_dem[(row, col)];
output[(row, col)] = order_val;
order_val += 1f64;
for n in 0..8 {
row_n = row + dy[n];
col_n = col + dx[n];
zout_n = filled_dem[(row_n, col_n)];
if zout_n == background_val {
zin_n = input[(row_n, col_n)];
if zin_n != nodata {
if zin_n < zout {
zin_n = zout;
} // We're in a depression. Raise the elevation.
filled_dem[(row_n, col_n)] = zin_n;
minheap.push(GridCell {
row: row_n,
column: col_n,
priority: zin_n,
});
} else {
// Interior nodata cells are still treated as nodata and are not filled.
output[(row_n, col_n)] = nodata;
num_solved_cells += 1;
}
}
}
if verbose {
num_solved_cells += 1;
|
random_line_split
|
||
flood_order.rs
|
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
if vec[0].to_lowercase() == "-i"
|| vec[0].to_lowercase() == "--input"
|| vec[0].to_lowercase() == "--dem"
{
if keyval {
input_file = vec[1].to_string();
} else {
input_file = args[i + 1].to_string();
}
} else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" {
if keyval {
output_file = vec[1].to_string();
} else {
output_file = args[i + 1].to_string();
}
}
}
if verbose {
let tool_name = self.get_tool_name();
let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28);
// 28 = length of the 'Powered by' by statement.
println!("{}", "*".repeat(welcome_len));
println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len()));
println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28));
println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23));
println!("{}", "*".repeat(welcome_len));
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
if !input_file.contains(&sep) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
if verbose {
println!("Reading data...")
};
let input = Raster::new(&input_file, "r")?;
let start = Instant::now();
let rows = input.configs.rows as isize;
let columns = input.configs.columns as isize;
let num_cells = rows * columns;
let nodata = input.configs.nodata;
// let min_val = input.configs.minimum;
// let elev_digits = ((input.configs.maximum - min_val) as i64).to_string().len();
// let elev_multiplier = 10.0_f64.powi((7 - elev_digits) as i32);
// let mut small_num = 0f64; //1.0 / elev_multiplier as f64;
let background_val = (i32::min_value() + 1) as f64;
let mut filled_dem: Array2D<f64> = Array2D::new(rows, columns, background_val, nodata)?;
let mut output = Raster::initialize_using_file(&output_file, &input);
/*
Find the data edges. This is complicated by the fact that DEMs frequently
have nodata edges, whereby the DEM does not occupy the full extent of
the raster. One approach to doing this would be simply to scan the
raster, looking for cells that neighbour nodata values. However, this
assumes that there are no interior nodata holes in the dataset. Instead,
the approach used here is to perform a region-growing operation, looking
for nodata values along the raster's edges.
*/
let mut queue: VecDeque<(isize, isize)> =
VecDeque::with_capacity((rows * columns) as usize);
for row in 0..rows {
/*
Note that this is only possible because Whitebox rasters
allow you to address cells beyond the raster extent but
return the nodata value for these regions.
*/
queue.push_back((row, -1));
queue.push_back((row, columns));
}
for col in 0..columns {
queue.push_back((-1, col));
queue.push_back((rows, col));
}
/*
minheap is the priority queue. Note that I've tested using integer-based
priority values, by multiplying the elevations, but this didn't result
in a significant performance gain over the use of f64s.
*/
let mut minheap = BinaryHeap::with_capacity((rows * columns) as usize);
let mut num_solved_cells = 0;
let mut zin_n: f64; // value of neighbour of row, col in input raster
let mut zout: f64; // value of row, col in output raster
let mut zout_n: f64; // value of neighbour of row, col in output raster
let dx = [1, 1, 1, 0, -1, -1, -1, 0];
let dy = [-1, 0, 1, 1, 1, 0, -1, -1];
let (mut row, mut col): (isize, isize);
let (mut row_n, mut col_n): (isize, isize);
while !queue.is_empty() {
let cell = queue.pop_front().unwrap();
row = cell.0;
col = cell.1;
for n in 0..8 {
row_n = row + dy[n];
col_n = col + dx[n];
zin_n = input[(row_n, col_n)];
zout_n = filled_dem[(row_n, col_n)];
if zout_n == background_val {
if zin_n == nodata {
filled_dem[(row_n, col_n)] = nodata;
output[(row_n, col_n)] = nodata;
queue.push_back((row_n, col_n));
} else {
filled_dem[(row_n, col_n)] = zin_n;
// Push it onto the priority queue for the priority flood operation
minheap.push(GridCell {
row: row_n,
column: col_n,
priority: zin_n,
});
}
num_solved_cells += 1;
}
}
if verbose {
progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize;
if progress != old_progress {
println!("progress: {}%", progress);
old_progress = progress;
}
}
}
// Perform the priority flood operation.
let mut order_val = 1f64;
while !minheap.is_empty() {
let cell = minheap.pop().expect("Error during pop operation.");
row = cell.row;
col = cell.column;
zout = filled_dem[(row, col)];
output[(row, col)] = order_val;
order_val += 1f64;
for n in 0..8 {
row_n = row + dy[n];
col_n = col + dx[n];
zout_n = filled_dem[(row_n, col_n)];
if zout_n == background_val {
zin_n = input[(row_n, col_n)];
if zin_n != nodata {
if zin_n < zout {
zin_n = zout;
} // We're in a depression. Raise the elevation.
filled_dem[(row_n, col_n)] = zin_n;
minheap.push(GridCell {
row: row_n,
column: col_n,
priority: zin_n,
});
} else {
// Interior nodata cells are still treated as nodata and are not filled.
output[(row_n, col_n)] = nodata;
num_solved_cells += 1;
}
}
}
if verbose {
num_solved_cells += 1;
progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize;
if progress != old_progress {
println!("Progress: {}%", progress);
old_progress = progress;
}
}
}
let elapsed_time = get_formatted_elapsed_time(start);
output.add_metadata_entry(format!(
"Created by whitebox_tools\' {} tool",
self.get_tool_name()
));
output.add_metadata_entry(format!("Input file: {}", input_file));
output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time));
if verbose {
println!("Saving data...")
};
let _ = match output.write() {
Ok(_) => {
if verbose {
println!("Output file written")
}
}
Err(e) => return Err(e),
};
if verbose {
println!(
"{}",
&format!("Elapsed Time (excluding I/O): {}", elapsed_time)
);
}
Ok(())
}
}
#[derive(PartialEq, Debug)]
struct GridCell {
row: isize,
column: isize,
// priority: usize,
priority: f64,
}
impl Eq for GridCell {}
impl PartialOrd for GridCell {
fn partial_cmp(&self, other: &Self) -> Option<Ordering>
|
{
// Some(other.priority.cmp(&self.priority))
other.priority.partial_cmp(&self.priority)
}
|
identifier_body
|
|
main.js
|
this._childs.text.align = 'center';
this._childs.text.fontWeight = 'normal';
this._childs.text.fontSize = 24;
this._childs.text.fill = '#fff';
},
preload: function(){
console.log('loading.preload');
var _this = this;
game.load.onFileComplete.add(function(p){
_this._childs.text.text = p+'%';
_this._childs.line2.moveTo(game.width*0.2, game.height*0.5);
_this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5);
});
game.load.onLoadComplete.addOnce(function(){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){
game.state.start('home');
});
});
game.load.image('loading', 'img/loading.png');
game.load.image('bg', 'img/bg.jpg');
game.load.image('btn', 'img/btn.png');
game.load.spritesheet('food', 'img/food.png', 32, 32, 3);
game.load.image('title', 'img/title.png');
game.load.image('snake-head', 'img/snake-head.png');
game.load.image('snake-body', 'img/snake-body.png');
game.load.spritesheet('result', 'img/result.png', 32, 32, 3);
},
shutdown: function(){
game.world.alpha=1;
}
},
home: {
init: function(data){
this._result = data;
},
create: function(){
console.log('home.create');
var temp, _this = this;
this._childs = {};
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//标题
this._childs.title = game.add.sprite(0, 100, 'title');
this._childs.title.anchor.set(0.5);
this._childs.title.position.set(game.width*0.5, game.height*0.2);
//按钮
this._childs.btn = game.add.button(0,0,'btn');
this._childs.btn.anchor.set(0.5);
this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75);
temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏');
temp.align = 'center';
temp.font = 'arial';
temp.fontWeight = 'normal';
temp.fontSize = 48;
temp.fill = '#fff';
temp.anchor.setTo(0.5, 0.45);
this._childs.btn.addChild(temp);
this._childs.btn.onInputDown.add(function(el, e){
el.scale.set(0.97);
el.getChildAt(0).fill = '#cfc';
});
this._childs.btn.onInputUp.add(function(el, e){
el.scale.set(1);
el.getChildAt(0).fill = '#fff';
if(e.timeUp-e.timeDown<500){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
game.state.start('play');
});
}
});
//动画
game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700);
//成绩
if(this._result){
this._childs.point = game.add.text(0,0,'0');
this._childs.point.align = 'center';
this._childs.point.fontSize = 72;
this._childs.point.fill = '#fff';
this._childs.point.anchor.set(0.5);
this._childs.point.position.set(game.width*0.5, this._childs.title.y+200);
this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60);
this._childs.point_line.lineStyle(3, 0xffffff, 0.75);
this._childs.point_line.moveTo(-game.width*0.3, 0);
this._childs.point_line.lineTo(game.width*0.3, 0);
for(var i=1; i<=3; i++){
temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' ');
temp.font = 'arial';
temp.fontSize = 30;
temp.fill = '#fff';
temp.fontWeight = 'normal';
temp.addChild(game.add.image(-50, 0, 'result', i-1));
this._childs['point'+i] = temp;
}
this._childs.point1.text = '身长 '+this._result.size+'米';
this._childs.point2.text = '吃掉 '+this._result.food+'个';
this._childs.point3.text = '用时 '+this._result.time+'秒';
game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000);
game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200);
game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500);
game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750);
game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000);
game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){
_this._childs.point.text = Math.floor(tween.target.point);
|
_this._childs.point.text = _this._result.point;
});
}
},
update: function(){
this._childs.bg.tilePosition.y+=1;
},
shutdown: function(){
game.world.alpha=1;
}
},
play: {
create: function(){
console.log('play.create');
var _this=this, temp;
this._childs = {};
this._gdata = {
is_end: false,
time_begin: game.time.now,
food_get: 0,
snake_speed: 400,
snake_path: []
}
game.physics.startSystem(Phaser.Physics.ARCADE);
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//食物
this._childs.foods = game.add.group(undefined, 'foods');
this._childs.foods.enableBody = true;
this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE;
function addFood(){
if(_this._gdata.is_end || _this._childs.foods.length>10){ return; }
var type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2);
var temp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type);
temp.name = 'foot'+type;
temp.anchor.set(0.5);
temp.body.enable = false;
game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
if(_this._gdata.is_end){ return; }
temp = 0.7;
this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5);
this.body.enable = true;
game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood);
}, temp);
if(type>0){
game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){
e
|
}).onComplete.addOnce(function(){
|
random_line_split
|
main.js
|
this._childs.text.align = 'center';
this._childs.text.fontWeight = 'normal';
this._childs.text.fontSize = 24;
this._childs.text.fill = '#fff';
},
preload: function(){
console.log('loading.preload');
var _this = this;
game.load.onFileComplete.add(function(p){
_this._childs.text.text = p+'%';
_this._childs.line2.moveTo(game.width*0.2, game.height*0.5);
_this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5);
});
game.load.onLoadComplete.addOnce(function(){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){
game.state.start('home');
});
});
game.load.image('loading', 'img/loading.png');
game.load.image('bg', 'img/bg.jpg');
game.load.image('btn', 'img/btn.png');
game.load.spritesheet('food', 'img/food.png', 32, 32, 3);
game.load.image('title', 'img/title.png');
game.load.image('snake-head', 'img/snake-head.png');
game.load.image('snake-body', 'img/snake-body.png');
game.load.spritesheet('result', 'img/result.png', 32, 32, 3);
},
shutdown: function(){
game.world.alpha=1;
}
},
home: {
init: function(data){
this._result = data;
},
create: function(){
console.log('home.create');
var temp, _this = this;
this._childs = {};
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//标题
this._childs.title = game.add.sprite(0, 100, 'title');
this._childs.title.anchor.set(0.5);
this._childs.title.position.set(game.width*0.5, game.height*0.2);
//按钮
this._childs.btn = game.add.button(0,0,'btn');
this._childs.btn.anchor.set(0.5);
this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75);
temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏');
temp.align = 'center';
temp.font = 'arial';
temp.fontWeight = 'normal';
temp.fontSize = 48;
temp.fill = '#fff';
temp.anchor.setTo(0.5, 0.45);
this._childs.btn.addChild(temp);
this._childs.btn.onInputDown.add(function(el, e){
el.scale.set(0.97);
el.getChildAt(0).fill = '#cfc';
});
this._childs.btn.onInputUp.add(function(el, e){
el.scale.set(1);
el.getChildAt(0).fill = '#fff';
if(e.timeUp-e.timeDown<500){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
game.state.start('play');
});
}
});
//动画
game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700);
//成绩
if(this._result){
this._childs.point = game.add.text(0,0,'0');
this._childs.point.align = 'center';
this._childs.point.fontSize = 72;
this._childs.point.fill = '#fff';
this._childs.point.anchor.set(0.5);
this._childs.point.position.set(game.width*0.5, this._childs.title.y+200);
this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60);
this._childs.point_line.lineStyle(3, 0xffffff, 0.75);
this._childs.point_line.moveTo(-game.width*0.3, 0);
this._childs.point_line.lineTo(game.width*0.3, 0);
for(var i=1; i<=3; i++){
temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' ');
temp.font = 'arial';
temp.fontSize = 30;
temp.fill = '#fff';
temp.fontWeight = 'normal';
temp.addChild(game.add.image(-50, 0, 'result', i-1));
this._childs['point'+i] = temp;
}
this._childs.point1.text = '身长 '+this._result.size+'米';
this._childs.point2.text = '吃掉 '+this._result.food+'个';
this._childs.point3.text = '用时 '+this._result.time+'秒';
game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000);
game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200);
game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500);
game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750);
game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000);
game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){
_this._childs.point.text = Math.floor(tween.target.point);
}).onComplete.addOnce(function(){
_this._childs.point.text = _this._result.point;
});
}
},
update: function(){
this._childs.bg.tilePosition.y+=1;
},
shutdown: function(){
game.world.alpha=1;
}
},
play: {
create: function(){
console.log('play.create');
var _this=this, temp;
this._childs = {};
this._gdata = {
is_end: false,
time_begin: game.time.now,
food_get: 0,
snake_speed: 400,
snake_path: []
}
game.physics.startSystem(Phaser.Physics.ARCADE);
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//食物
this._childs.foods = game.add.group(undefined, 'foods');
this._childs.foods.enableBody = true;
this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE;
function addFood(){
if(_this._gdata.is_end || _this._childs.foods.length>10){ return; }
var type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2);
|
emp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type);
temp.name = 'foot'+type;
temp.anchor.set(0.5);
temp.body.enable = false;
game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
if(_this._gdata.is_end){ return; }
temp = 0.7;
this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5);
this.body.enable = true;
game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood);
}, temp);
if(type>0){
game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){
|
var t
|
conditional_block
|
main.js
|
this._childs.text.align = 'center';
this._childs.text.fontWeight = 'normal';
this._childs.text.fontSize = 24;
this._childs.text.fill = '#fff';
},
preload: function(){
console.log('loading.preload');
var _this = this;
game.load.onFileComplete.add(function(p){
_this._childs.text.text = p+'%';
_this._childs.line2.moveTo(game.width*0.2, game.height*0.5);
_this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5);
});
game.load.onLoadComplete.addOnce(function(){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){
game.state.start('home');
});
});
game.load.image('loading', 'img/loading.png');
game.load.image('bg', 'img/bg.jpg');
game.load.image('btn', 'img/btn.png');
game.load.spritesheet('food', 'img/food.png', 32, 32, 3);
game.load.image('title', 'img/title.png');
game.load.image('snake-head', 'img/snake-head.png');
game.load.image('snake-body', 'img/snake-body.png');
game.load.spritesheet('result', 'img/result.png', 32, 32, 3);
},
shutdown: function(){
game.world.alpha=1;
}
},
home: {
init: function(data){
this._result = data;
},
create: function(){
console.log('home.create');
var temp, _this = this;
this._childs = {};
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//标题
this._childs.title = game.add.sprite(0, 100, 'title');
this._childs.title.anchor.set(0.5);
this._childs.title.position.set(game.width*0.5, game.height*0.2);
//按钮
this._childs.btn = game.add.button(0,0,'btn');
this._childs.btn.anchor.set(0.5);
this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75);
temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏');
temp.align = 'center';
temp.font = 'arial';
temp.fontWeight = 'normal';
temp.fontSize = 48;
temp.fill = '#fff';
temp.anchor.setTo(0.5, 0.45);
this._childs.btn.addChild(temp);
this._childs.btn.onInputDown.add(function(el, e){
el.scale.set(0.97);
el.getChildAt(0).fill = '#cfc';
});
this._childs.btn.onInputUp.add(function(el, e){
el.scale.set(1);
el.getChildAt(0).fill = '#fff';
if(e.timeUp-e.timeDown<500){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
game.state.start('play');
});
}
});
//动画
game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700);
//成绩
if(this._result){
this._childs.point = game.add.text(0,0,'0');
this._childs.point.align = 'center';
this._childs.point.fontSize = 72;
this._childs.point.fill = '#fff';
this._childs.point.anchor.set(0.5);
this._childs.point.position.set(game.width*0.5, this._childs.title.y+200);
this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60);
this._childs.point_line.lineStyle(3, 0xffffff, 0.75);
this._childs.point_line.moveTo(-game.width*0.3, 0);
this._childs.point_line.lineTo(game.width*0.3, 0);
for(var i=1; i<=3; i++){
temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' ');
temp.font = 'arial';
temp.fontSize = 30;
temp.fill = '#fff';
temp.fontWeight = 'normal';
temp.addChild(game.add.image(-50, 0, 'result', i-1));
this._childs['point'+i] = temp;
}
this._childs.point1.text = '身长 '+this._result.size+'米';
this._childs.point2.text = '吃掉 '+this._result.food+'个';
this._childs.point3.text = '用时 '+this._result.time+'秒';
game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000);
game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200);
game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500);
game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750);
game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000);
game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){
_this._childs.point.text = Math.floor(tween.target.point);
}).onComplete.addOnce(function(){
_this._childs.point.text = _this._result.point;
});
}
},
update: function(){
this._childs.bg.tilePosition.y+=1;
},
shutdown: function(){
game.world.alpha=1;
}
},
play: {
create: function(){
console.log('play.create');
var _this=this, temp;
this._childs = {};
this._gdata = {
is_end: false,
time_begin: game.time.now,
food_get: 0,
snake_speed: 400,
snake_path: []
}
game.physics.startSystem(Phaser.Physics.ARCADE);
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//食物
this._childs.foods = game.add.group(undefined, 'foods');
this._childs.foods.enableBody = true;
this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE;
function addFood(){
if(_this._gdata.is_end || _this._childs.foods.length>10){ return; }
|
r type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2);
var temp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type);
temp.name = 'foot'+type;
temp.anchor.set(0.5);
temp.body.enable = false;
game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
if(_this._gdata.is_end){ return; }
temp = 0.7;
this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5);
this.body.enable = true;
game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood);
}, temp);
if(type>0){
game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){
e
|
va
|
identifier_name
|
main.js
|
._childs.text.fill = '#fff';
},
preload: function(){
console.log('loading.preload');
var _this = this;
game.load.onFileComplete.add(function(p){
_this._childs.text.text = p+'%';
_this._childs.line2.moveTo(game.width*0.2, game.height*0.5);
_this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5);
});
game.load.onLoadComplete.addOnce(function(){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){
game.state.start('home');
});
});
game.load.image('loading', 'img/loading.png');
game.load.image('bg', 'img/bg.jpg');
game.load.image('btn', 'img/btn.png');
game.load.spritesheet('food', 'img/food.png', 32, 32, 3);
game.load.image('title', 'img/title.png');
game.load.image('snake-head', 'img/snake-head.png');
game.load.image('snake-body', 'img/snake-body.png');
game.load.spritesheet('result', 'img/result.png', 32, 32, 3);
},
shutdown: function(){
game.world.alpha=1;
}
},
home: {
init: function(data){
this._result = data;
},
create: function(){
console.log('home.create');
var temp, _this = this;
this._childs = {};
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//标题
this._childs.title = game.add.sprite(0, 100, 'title');
this._childs.title.anchor.set(0.5);
this._childs.title.position.set(game.width*0.5, game.height*0.2);
//按钮
this._childs.btn = game.add.button(0,0,'btn');
this._childs.btn.anchor.set(0.5);
this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75);
temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏');
temp.align = 'center';
temp.font = 'arial';
temp.fontWeight = 'normal';
temp.fontSize = 48;
temp.fill = '#fff';
temp.anchor.setTo(0.5, 0.45);
this._childs.btn.addChild(temp);
this._childs.btn.onInputDown.add(function(el, e){
el.scale.set(0.97);
el.getChildAt(0).fill = '#cfc';
});
this._childs.btn.onInputUp.add(function(el, e){
el.scale.set(1);
el.getChildAt(0).fill = '#fff';
if(e.timeUp-e.timeDown<500){
game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
game.state.start('play');
});
}
});
//动画
game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true);
game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700);
//成绩
if(this._result){
this._childs.point = game.add.text(0,0,'0');
this._childs.point.align = 'center';
this._childs.point.fontSize = 72;
this._childs.point.fill = '#fff';
this._childs.point.anchor.set(0.5);
this._childs.point.position.set(game.width*0.5, this._childs.title.y+200);
this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60);
this._childs.point_line.lineStyle(3, 0xffffff, 0.75);
this._childs.point_line.moveTo(-game.width*0.3, 0);
this._childs.point_line.lineTo(game.width*0.3, 0);
for(var i=1; i<=3; i++){
temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' ');
temp.font = 'arial';
temp.fontSize = 30;
temp.fill = '#fff';
temp.fontWeight = 'normal';
temp.addChild(game.add.image(-50, 0, 'result', i-1));
this._childs['point'+i] = temp;
}
this._childs.point1.text = '身长 '+this._result.size+'米';
this._childs.point2.text = '吃掉 '+this._result.food+'个';
this._childs.point3.text = '用时 '+this._result.time+'秒';
game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000);
game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200);
game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500);
game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750);
game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000);
game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){
_this._childs.point.text = Math.floor(tween.target.point);
}).onComplete.addOnce(function(){
_this._childs.point.text = _this._result.point;
});
}
},
update: function(){
this._childs.bg.tilePosition.y+=1;
},
shutdown: function(){
game.world.alpha=1;
}
},
play: {
create: function(){
console.log('play.create');
var _this=this, temp;
this._childs = {};
this._gdata = {
is_end: false,
time_begin: game.time.now,
food_get: 0,
snake_speed: 400,
snake_path: []
}
game.physics.startSystem(Phaser.Physics.ARCADE);
//背景
this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg');
//食物
this._childs.foods = game.add.group(undefined, 'foods');
this._childs.foods.enableBody = true;
this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE;
function addFood(){
if(_this._gdata.is_end || _this._childs.foods.length>10){ return; }
var
|
type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2);
var temp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type);
temp.name = 'foot'+type;
temp.anchor.set(0.5);
temp.body.enable = false;
game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){
if(_this._gdata.is_end){ return; }
temp = 0.7;
this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5);
this.body.enable = true;
game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood);
}, temp);
if(type>0){
game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){
e.parent.remove(e, true);
});
}
}
game.time.events.add(800, addFood);
//蛇头
|
identifier_body
|
|
worldcup.js
|
// * resize radius of circles from each team per category
teamG
.select("circle")
.transition()
.duration(1000)
.attr('r', function(p) {
return radiusScale(p[datapoint]);
});
}
*/
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on("mouseover", highlightRegion);
function highlightRegion(d) {
teamG // = d3.selectAll("g.overallG")
.select("circle")
.style('fill', function(p) { // changed to p because d already defined
return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red)
});
}
// ** add interactivity on mouseout
teamG.on('mouseout', function() {
teamG
.select("circle")
.style("fill", "pink");
});
*/
// --------------------------------------------------------------------
// \ access dom element with 'this' (only in inline function) or '.node()'
// \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element
d3.select("circle").each(function(d,i) { // select one circle so first team
console.log(this); // this: <circle r="3.6.."></circle>
});
d3.select("circle").node() // <circle r="3.6.."></circle>
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
d3.select(this) // this = <circle>
.select("text")
.classed("active", true)
.attr('y', 60); // move text down by 60 px
d3.selectAll("g.overallG")
.select('circle')
.each(function(p,i) {
p.region == d.region ?
d3.select(this).classed("active", true) : // increase label font-size - css: circle.active
d3.select(this).classed("inactive", true);
})
}
// ** add interactivity on mouseout
teamG.on("mouseout", unHighlight)
function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger
// \ you can disable like this:
// teamG.select("text").style("pointer-events","none");
teamG
.select("circle")
.attr("class", ""); // remove active class
teamG
.select("text")
.classed("highlight", false)
.classed("active", false) // remove active class
.attr("y", 30);
};
// */
// --------------------------------------------------------------------
// *** use color
// /*
// ** add interactivity on mouseover
// \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency)
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
// \ colors in rgb get muddy, unless you break the color ramp in multiple stops
teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)")
d3.select(this)
.select('text')
.classed('highlight', true)
.attr('y,10')
teamG // = d3.selectAll('g.overallG')
.select('circle')
.style('fill', function(p) {
return p.region == d.region?
teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten()
})
this.parentElement.appendChild(this);
}
// ** add interactivity on buttonClick
function buttonClick(datapoint) {
var maxValue = d3.max(incomingData, function(d) {
return parseFloat(d[datapoint]);
});
var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]);
// var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']);
// \ use interpolate to use any other scale than rgb - hsl, hcl
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green
var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying)
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige
var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]);
// \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges
var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value
teamG
.select("circle")
.transition()
.duration(1000)
.attr('r', function(p) {
return radiusScale(p[datapoint]);
})
// .style('fill', function(p) {
// return ybRamp(p[datapoint]) // adds color for data values (magnitude)
// });
// .style('fill', function(p) {
// return tenColorScale(p.region) // adds color per region
// })
.style('fill', function(p) {
return colorQuantize(p[datapoint]); // each category has a different shade of red
})
}
// */
// *** use images
// \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc)
teamG
.insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images
.attr('xlink:href', function(d) {
return "img/" + d.team + ".png";
})
.attr('width', '45px') // you need to set width & height for svg images to show!
.attr('height', '20px')
.attr('x', '-22') // - 1/2 x value to center image
.attr('y', '40'); // - 1/2 y value to center image = -10
// */
// --------------------------------------------------------------------
// *** modal with stats per team - uses modal.html
// ** use d3.text() with .html()
d3.text('modal.html', function(data) {
d3.select('body')
.append('div') // creates a new div
.attr('id', 'modal') // with id as in main.css
.html(data); // and fills it with html content from modal.html
});
teamG.on('click', teamClick);
function teamClick(d) { //selects and updates td.data as you click on a team
d3.selectAll('td.data') // td with class data from modal.html
.data(d3.values(d))
.html(function(p) {
return p
});
}
// --------------------------------------------------------------------
// *** pregenerated .svg
// * add with 'x-link:href'
teamG // add svg molecule image to each team
.insert('image', 'text')
.attr('x-link:href', 'img/molecule.svg')
.attr('width', '60')
.attr('height', '60')
.attr('x', '-30')
.attr('y', '-80');
// ** use d3.html() so you can do more manipulation on HTML nodes with:
// d3.html('img/football.svg', function(data) {
// console.log(data); // contains <svg> > <g> > <path>, but we only want <p>
// })
// ----- when you don't add svg to data
d3.html('img/football.svg', loadSVG1);
function loadSVG1(svgData) { // load svg into the fragment
// \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg
// \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment
while (!d3.select(svgData).selectAll('path').empty()) {
|
d3.select('svg').node().appendChild( // use .node() to access dom elements
d3.select(svgData).select('path').node());
}
d
|
conditional_block
|
|
worldcup.js
|
/*
// ** add interactivity on mouseover
teamG.on("mouseover", highlightRegion);
function highlightRegion(d) {
teamG // = d3.selectAll("g.overallG")
.select("circle")
.style('fill', function(p) { // changed to p because d already defined
return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red)
});
}
// ** add interactivity on mouseout
teamG.on('mouseout', function() {
teamG
.select("circle")
.style("fill", "pink");
});
*/
// --------------------------------------------------------------------
// \ access dom element with 'this' (only in inline function) or '.node()'
// \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element
d3.select("circle").each(function(d,i) { // select one circle so first team
console.log(this); // this: <circle r="3.6.."></circle>
});
d3.select("circle").node() // <circle r="3.6.."></circle>
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
d3.select(this) // this = <circle>
.select("text")
.classed("active", true)
.attr('y', 60); // move text down by 60 px
d3.selectAll("g.overallG")
.select('circle')
.each(function(p,i) {
p.region == d.region ?
d3.select(this).classed("active", true) : // increase label font-size - css: circle.active
d3.select(this).classed("inactive", true);
})
}
// ** add interactivity on mouseout
teamG.on("mouseout", unHighlight)
function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger
// \ you can disable like this:
// teamG.select("text").style("pointer-events","none");
teamG
.select("circle")
.attr("class", ""); // remove active class
teamG
.select("text")
.classed("highlight", false)
.classed("active", false) // remove active class
.attr("y", 30);
};
// */
// --------------------------------------------------------------------
// *** use color
// /*
// ** add interactivity on mouseover
// \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency)
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
// \ colors in rgb get muddy, unless you break the color ramp in multiple stops
teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)")
d3.select(this)
.select('text')
.classed('highlight', true)
.attr('y,10')
teamG // = d3.selectAll('g.overallG')
.select('circle')
.style('fill', function(p) {
return p.region == d.region?
teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten()
})
this.parentElement.appendChild(this);
}
// ** add interactivity on buttonClick
function buttonClick(datapoint) {
var maxValue = d3.max(incomingData, function(d) {
return parseFloat(d[datapoint]);
});
var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]);
// var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']);
// \ use interpolate to use any other scale than rgb - hsl, hcl
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green
var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying)
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige
var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]);
// \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges
var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value
teamG
.select("circle")
.transition()
.duration(1000)
.attr('r', function(p) {
return radiusScale(p[datapoint]);
})
// .style('fill', function(p) {
// return ybRamp(p[datapoint]) // adds color for data values (magnitude)
// });
// .style('fill', function(p) {
// return tenColorScale(p.region) // adds color per region
// })
.style('fill', function(p) {
return colorQuantize(p[datapoint]); // each category has a different shade of red
})
}
// */
// *** use images
// \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc)
teamG
.insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images
.attr('xlink:href', function(d) {
return "img/" + d.team + ".png";
})
.attr('width', '45px') // you need to set width & height for svg images to show!
.attr('height', '20px')
.attr('x', '-22') // - 1/2 x value to center image
.attr('y', '40'); // - 1/2 y value to center image = -10
// */
// --------------------------------------------------------------------
// *** modal with stats per team - uses modal.html
// ** use d3.text() with .html()
d3.text('modal.html', function(data) {
d3.select('body')
.append('div') // creates a new div
.attr('id', 'modal') // with id as in main.css
.html(data); // and fills it with html content from modal.html
});
teamG.on('click', teamClick);
function teamClick(d) { //selects and updates td.data as you click on a team
d3.selectAll('td.data') // td with class data from modal.html
.data(d3.values(d))
.html(function(p) {
return p
});
}
// --------------------------------------------------------------------
// *** pregenerated .svg
// * add with 'x-link:href'
teamG // add svg molecule image to each team
.insert('image', 'text')
.attr('x-link:href', 'img/molecule.svg')
.attr('width', '60')
.attr('height', '60')
.attr('x', '-30')
.attr('y', '-80');
// ** use d3.html() so you can do more manipulation on HTML nodes with:
// d3.html('img/football.svg', function(data) {
// console.log(data); // contains <svg> > <g> > <path>, but we only want <p>
// })
// ----- when you don't add svg to data
d3.html('img/football.svg', loadSVG1);
function loadSVG1(svgData) { // load svg into the fragment
// \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg
// \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment
while (!d3.select(svgData).selectAll('path').empty()) {
d3.select('svg').node().appendChild( // use .node() to access dom elements
d3.select(svgData).select('path').node());
}
d3.selectAll('path').attr('transform', 'translate(50,50)'); // move 50x and 50y from left corner (0,0)
}
// ----- when you add svg to data
d3.html('img/football.svg', loadSVG2);
function load
|
SVG2(svg
|
identifier_name
|
|
worldcup.js
|
') // how the label aligns compared to the position you give it
.attr('y', 30)
// .style('font-size', '10px')
.text(function(d) { return d.team;
})
// --------------------------------------------------------------------
// ** add buttons to filter / adjust the chart
// \ creating buttons dynamically (like this) is scalable for different datasets
var dataKeys = d3.keys(incomingData[0]).filter(function(el) { // d3.keys returns the names of the attr of an object as an array ( labels are [0] - team, region, win, loss ... )
return el != "team" && el != "region"; // we want buttons for everything except team and region
});
d3.select("#controls")
.selectAll("button.teams") // select all buttons with the class 'team'
// \ dataKeys consists of an array of attr names, so the d corresponds to one of those names and makes a good button title
.data(dataKeys) // numerical data (all data except "team" and "region"(are both strings))
.enter()
.append('button')
.on("click", buttonClick) // gives onclick behaviour to each button. with a wrapper that gives access to the data that was bound to it when it was created
// .attr("onclick", "console.log('click')" // alternative for on('click') to access HTML mouse events - notice " " at console.log
// \ There’s a D3-specific reason to use the .on function: it sends the bound data to the function automatically and in the same format as the anonymous inline functions we’ve been using to set style and attribute.
.html(function(d) { return d; }); // shows text on buttons - dataKeys = incomingData[0]
/*
// ** add interactivity to button click
// \ We can create buttons based on the attributes of the data and dynamically measure the data based on the attribute bound to the button.
function buttonClick(datapoint) { // fires on button click - bound data is automatically sent as first argument
var maxValue = d3.max(incomingData, function(d) {
return parseFloat(d[datapoint]); // click on button sends datapoint
});
var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]);
// * resize radius of circles from each team per category
teamG
.select("circle")
.transition()
.duration(1000)
.attr('r', function(p) {
return radiusScale(p[datapoint]);
});
}
*/
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on("mouseover", highlightRegion);
function highlightRegion(d) {
teamG // = d3.selectAll("g.overallG")
.select("circle")
.style('fill', function(p) { // changed to p because d already defined
return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red)
});
}
// ** add interactivity on mouseout
teamG.on('mouseout', function() {
teamG
.select("circle")
|
// --------------------------------------------------------------------
// \ access dom element with 'this' (only in inline function) or '.node()'
// \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element
d3.select("circle").each(function(d,i) { // select one circle so first team
console.log(this); // this: <circle r="3.6.."></circle>
});
d3.select("circle").node() // <circle r="3.6.."></circle>
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
d3.select(this) // this = <circle>
.select("text")
.classed("active", true)
.attr('y', 60); // move text down by 60 px
d3.selectAll("g.overallG")
.select('circle')
.each(function(p,i) {
p.region == d.region ?
d3.select(this).classed("active", true) : // increase label font-size - css: circle.active
d3.select(this).classed("inactive", true);
})
}
// ** add interactivity on mouseout
teamG.on("mouseout", unHighlight)
function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger
// \ you can disable like this:
// teamG.select("text").style("pointer-events","none");
teamG
.select("circle")
.attr("class", ""); // remove active class
teamG
.select("text")
.classed("highlight", false)
.classed("active", false) // remove active class
.attr("y", 30);
};
// */
// --------------------------------------------------------------------
// *** use color
// /*
// ** add interactivity on mouseover
// \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency)
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
// \ colors in rgb get muddy, unless you break the color ramp in multiple stops
teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)")
d3.select(this)
.select('text')
.classed('highlight', true)
.attr('y,10')
teamG // = d3.selectAll('g.overallG')
.select('circle')
.style('fill', function(p) {
return p.region == d.region?
teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten()
})
this.parentElement.appendChild(this);
}
// ** add interactivity on buttonClick
function buttonClick(datapoint) {
var maxValue = d3.max(incomingData, function(d) {
return parseFloat(d[datapoint]);
});
var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]);
// var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']);
// \ use interpolate to use any other scale than rgb - hsl, hcl
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green
var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying)
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige
var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]);
// \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges
var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value
teamG
.select("circle")
.transition()
.duration(1000)
.attr('r', function(p) {
return radiusScale(p[datapoint]);
})
// .style('fill', function(p) {
// return ybRamp(p[datapoint]) // adds color for data values (magnitude)
// });
// .style('fill', function(p) {
// return tenColorScale(p.region) // adds color per region
// })
.style('fill', function(p) {
return colorQuantize(p[datapoint]); // each category has a different shade of red
})
}
// */
// *** use images
// \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc)
teamG
.insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images
.attr('xlink:href', function(d) {
return "img/" + d.team + ".png";
})
.attr('width', '45px') // you need to set width & height for svg images to show!
.attr('height', '20px')
.attr('x', '-22') // - 1/2 x
|
.style("fill", "pink");
});
*/
|
random_line_split
|
worldcup.js
|
', function(p) {
return radiusScale(p[datapoint]);
});
}
*/
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on("mouseover", highlightRegion);
function highlightRegion(d) {
teamG // = d3.selectAll("g.overallG")
.select("circle")
.style('fill', function(p) { // changed to p because d already defined
return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red)
});
}
// ** add interactivity on mouseout
teamG.on('mouseout', function() {
teamG
.select("circle")
.style("fill", "pink");
});
*/
// --------------------------------------------------------------------
// \ access dom element with 'this' (only in inline function) or '.node()'
// \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element
d3.select("circle").each(function(d,i) { // select one circle so first team
console.log(this); // this: <circle r="3.6.."></circle>
});
d3.select("circle").node() // <circle r="3.6.."></circle>
// --------------------------------------------------------------------
/*
// ** add interactivity on mouseover
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
d3.select(this) // this = <circle>
.select("text")
.classed("active", true)
.attr('y', 60); // move text down by 60 px
d3.selectAll("g.overallG")
.select('circle')
.each(function(p,i) {
p.region == d.region ?
d3.select(this).classed("active", true) : // increase label font-size - css: circle.active
d3.select(this).classed("inactive", true);
})
}
// ** add interactivity on mouseout
teamG.on("mouseout", unHighlight)
function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger
// \ you can disable like this:
// teamG.select("text").style("pointer-events","none");
teamG
.select("circle")
.attr("class", ""); // remove active class
teamG
.select("text")
.classed("highlight", false)
.classed("active", false) // remove active class
.attr("y", 30);
};
// */
// --------------------------------------------------------------------
// *** use color
// /*
// ** add interactivity on mouseover
// \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency)
teamG.on('mouseover', highlightRegion2);
function highlightRegion2(d,i) {
// \ colors in rgb get muddy, unless you break the color ramp in multiple stops
teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)")
d3.select(this)
.select('text')
.classed('highlight', true)
.attr('y,10')
teamG // = d3.selectAll('g.overallG')
.select('circle')
.style('fill', function(p) {
return p.region == d.region?
teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten()
})
this.parentElement.appendChild(this);
}
// ** add interactivity on buttonClick
function buttonClick(datapoint) {
var maxValue = d3.max(incomingData, function(d) {
return parseFloat(d[datapoint]);
});
var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]);
// var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']);
// \ use interpolate to use any other scale than rgb - hsl, hcl
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green
var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying)
// var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige
var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]);
// \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges
var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value
teamG
.select("circle")
.transition()
.duration(1000)
.attr('r', function(p) {
return radiusScale(p[datapoint]);
})
// .style('fill', function(p) {
// return ybRamp(p[datapoint]) // adds color for data values (magnitude)
// });
// .style('fill', function(p) {
// return tenColorScale(p.region) // adds color per region
// })
.style('fill', function(p) {
return colorQuantize(p[datapoint]); // each category has a different shade of red
})
}
// */
// *** use images
// \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc)
teamG
.insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images
.attr('xlink:href', function(d) {
return "img/" + d.team + ".png";
})
.attr('width', '45px') // you need to set width & height for svg images to show!
.attr('height', '20px')
.attr('x', '-22') // - 1/2 x value to center image
.attr('y', '40'); // - 1/2 y value to center image = -10
// */
// --------------------------------------------------------------------
// *** modal with stats per team - uses modal.html
// ** use d3.text() with .html()
d3.text('modal.html', function(data) {
d3.select('body')
.append('div') // creates a new div
.attr('id', 'modal') // with id as in main.css
.html(data); // and fills it with html content from modal.html
});
teamG.on('click', teamClick);
function teamClick(d) { //selects and updates td.data as you click on a team
d3.selectAll('td.data') // td with class data from modal.html
.data(d3.values(d))
.html(function(p) {
return p
});
}
// --------------------------------------------------------------------
// *** pregenerated .svg
// * add with 'x-link:href'
teamG // add svg molecule image to each team
.insert('image', 'text')
.attr('x-link:href', 'img/molecule.svg')
.attr('width', '60')
.attr('height', '60')
.attr('x', '-30')
.attr('y', '-80');
// ** use d3.html() so you can do more manipulation on HTML nodes with:
// d3.html('img/football.svg', function(data) {
// console.log(data); // contains <svg> > <g> > <path>, but we only want <p>
// })
// ----- when you don't add svg to data
d3.html('img/football.svg', loadSVG1);
function loadSVG1(svgData) { //
|
load svg into the fragment
// \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg
// \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment
while (!d3.select(svgData).selectAll('path').empty()) {
d3.select('svg').node().appendChild( // use .node() to access dom elements
d3.select(svgData).select('path').node());
}
d3.selectAll('path').attr('transform', 'translate(50,50)'); // move 50x and 50y from left corner (0,0)
}
/
|
identifier_body
|
|
System.py
|
System` with parameters
# `pivotalLines = []` and `nonPivotalLines` as the previous result of
# the gluing of both sides of the equation.
# example: if the equation of the system is
# 0 1 2 | x 9
# 3 4 5 | y = 9
# 6 7 8 | z 9
# then `maybeSystemInit would call:
# maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ])
# maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n)
def maybeSystemInit(matrix, rightSide):
def fuseToEnd(line, rightValue):
return line + [rightValue]
fusedLines = zipWith(fuseToEnd, matrix, rightSide)
return maybeSystem([], fusedLines)
# ==== next step:
# `maybeSystem` is a 'smart' constructor that returns Nothing if
# any line in either of its parameters is of the form [0,0,...,0,x]
# with x != 0, which would correspond to an equation 0 = x != 0`
# which would make the result of `systemSolution` be Nothing automatically,
# thanks to the magic of the type `Maybe`!
#
# the constructor `maybeSystem` will be called at each step of the
# algorithm, ensuring that if at any point, the system is found unsolvable,
# no further operation will be performed.
# maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n)
def maybeSystem(pivotalLines, nonPivotalLines):
if forall(pivotalLines + nonPivotalLines, isValidLine):
return Maybe(value = System(pivotalLines, nonPivotalLines))
else: return Nothing
# returns True if and only if the list
# is not a series of zeroes ended with one
# last non-zero value, as it would amount to
# an equation of the form 0 = x != 0
#
# isValidLine : List n -> Bool
def isValidLine(line):
if len(line) <= 1:
error(isValidLine, "input list is too short to be part of a `System n`")
leftSide = line[:-1]
rightSide = line[-1]
if forall(leftSide, isZero):
return isZero(rightSide)
else:
return True
# (you'll notice i grew tired of mentioning the ever-present
# type class [Num n] of the parameter `n`...)
# ==== small interlude to introduce the type/class `System n`:
# class representing a system in the process of being solved.
# mostly just contains two attributes, `pivotalLines` and
# `nonPivotalLines`, each one being a list of vectors/lines.
#
# we'll search new pivots in the nonPivotalLines list, and
# everytime we find a new pivot in a column, we'll move the corresponding
# line to the group of the "pivotalLines".
class System():
# System : List (Row n) . List (Row n) -> System n
def __init__(self, pivotalLines, nonPivotalLines):
self.pivotalLines = pivotalLines
self.nonPivotalLines = nonPivotalLines
allLines = pivotalLines + nonPivotalLines
if len(allLines) == 0:
error(System.__init__, "wrong input (two empty lists) "
+ "for System constructor")
self.leftSideWidth = len(allLines[0]) - 1
# number of columns of the leftside matrix of the equation.
# -1 because the last element of each line is
# part of the right side (the vector Y in AX = Y)
# this value will be used to avoid trying to find
# a pivot in the right side column vector
# ==== next step of the algorithm:
#
# back in `systemSolution`, with `Maybe` a valid system. If it is so,
# the method `maybeDo` will call
# echelonized : System n . Index -> Maybe (System n)
# with this valid system as its first argument, and an additional parameter
# `colIndex = 0`.
# this function returns either `Just` an echelonized system,
# or Nothing if at some point we encounter `0 = x != 0`.
#
# this function is recursive (indirectly). colIndex represents an index of
# the column of the leftside matrix `A` in which we'll try
# to find a pivot. the recursion will thus go through each column index
# between 0 and `ncols(A)`.
# echelonized : System n . Index -> Maybe (System n)
def echelonized(system, colIndex):
#print "DBG"
#printMatrix(system.pivotalLines)
#printMatrix(system.nonPivotalLines)
maybePivotalLineIndex = findPivot(system, colIndex)
if maybePivotalLineIndex == Nothing:
# pivot not found => this column is filled with zeroes
# on the non pivotal lines, so we do nothing
maybeSystem = Just(system)
else:
pivotalLineIndex = maybePivotalLineIndex.justValue()
maybeSystem = usePivot(system, pivotalLineIndex, colIndex)
if maybeSystem == Nothing:
return Nothing
else:
newSystem = maybeSystem.justValue()
if colIndex >= newSystem.leftSideWidth - 1:
# we reached the end of recursion, having
# walked through all the columns of the leftside
# matrix of the equation
return Just(newSystem)
else:
# we repeat the process for the next column
return echelonized(newSystem, colIndex + 1)
# the previous function starts by calling
# findPivot : System n . Index -> Maybe Index
# which `Maybe` returns the index of the first non-pivotal line
# (that is the line which wasn't used previously for the pivot
# of another column) which
# contains a non-null element at the index column `colIndex`.
# returns Nothing if the whole column at that index is null,
# or if there aren't any non pivotal lines remaining.
|
# to isolate the line into which the pivot was found from the aforementioned list.
# findPivot : System n . Index -> Maybe Index
def findPivot(system, colIndex):
if len(system.nonPivotalLines) == 0:
return Nothing
col = columnAt(colIndex, system.nonPivotalLines)
maybeLineIndex = firstIndex(isNotZero, col)
#print "findPivot", x, colIndex
return maybeLineIndex
# ==== next step:
# back in `echelonized`: if the index of the line of the pivot
# given by `findPivot` is Nothing, we do nothing with the system
# else, we call `usePivot` with the index of the soon-to-be pivotal line
# usePivot : System n . Index . Index -> Maybe (System n)
#
# in usePivot: we start by isolating the new pivotal lines
# from the rest of the still-not-yet pivotal lines.
# then we recuperate the value of the pivot, using the
# index of the column we're operating over.
#
# we create a function which will be used over all the lines
# in `system`, both the pivotal ones and the non pivotal ones,
# except the one that was just isolated, the one that contains
# the pivot.
#
# the operation consists in creating zeroes everywhere in the column
# except for the pivot. the core of the process is in the function
# `modifiedLine` (i know, the name is not very appropriate... it's
# the best i found though).
# usePivot : System n . Index . Index -> Maybe (System n)
def usePivot(system, pivotalLineIndex, colIndex):
(pivotalLine, nonPivotalLines) = \
isolateItem(system.nonPivotalLines, pivotalLineIndex)
pivot = pivotalLine[colIndex]
#print "pivot", pivot
def forEachLine(line):
val = line[colIndex]
coeff = val / pivot # that way, val - coeff * pivot == 0
return modifiedLine(line, pivotalLine, coeff)
newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine]
newNonPivotalLines = map(forEachLine, nonPivotalLines)
return maybeSystem(newPivotalLines, newNonPivotalLines)
# the function `modifiedLine` is straightforward:
# subtract each value of `line` by the multiplication
# of an appropriate coefficient with each value of
# `otherLine`.
# the appropriate coefficient is calculated so
# that the element at the column of the current pivot
# in the `line` become zero (cf `forEachLine` in usePivot)
# modifiedLine : List n . List n . n -> List n
def modifiedLine(line, otherLine, coeff):
def f(val
|
# that line index depends on the number of lines in system.nonPivotalLines,
# but that's not a problem because we'll use that index only
|
random_line_split
|
System.py
|
ivotalLines
allLines = pivotalLines + nonPivotalLines
if len(allLines) == 0:
error(System.__init__, "wrong input (two empty lists) "
+ "for System constructor")
self.leftSideWidth = len(allLines[0]) - 1
# number of columns of the leftside matrix of the equation.
# -1 because the last element of each line is
# part of the right side (the vector Y in AX = Y)
# this value will be used to avoid trying to find
# a pivot in the right side column vector
# ==== next step of the algorithm:
#
# back in `systemSolution`, with `Maybe` a valid system. If it is so,
# the method `maybeDo` will call
# echelonized : System n . Index -> Maybe (System n)
# with this valid system as its first argument, and an additional parameter
# `colIndex = 0`.
# this function returns either `Just` an echelonized system,
# or Nothing if at some point we encounter `0 = x != 0`.
#
# this function is recursive (indirectly). colIndex represents an index of
# the column of the leftside matrix `A` in which we'll try
# to find a pivot. the recursion will thus go through each column index
# between 0 and `ncols(A)`.
# echelonized : System n . Index -> Maybe (System n)
def echelonized(system, colIndex):
#print "DBG"
#printMatrix(system.pivotalLines)
#printMatrix(system.nonPivotalLines)
maybePivotalLineIndex = findPivot(system, colIndex)
if maybePivotalLineIndex == Nothing:
# pivot not found => this column is filled with zeroes
# on the non pivotal lines, so we do nothing
maybeSystem = Just(system)
else:
pivotalLineIndex = maybePivotalLineIndex.justValue()
maybeSystem = usePivot(system, pivotalLineIndex, colIndex)
if maybeSystem == Nothing:
return Nothing
else:
newSystem = maybeSystem.justValue()
if colIndex >= newSystem.leftSideWidth - 1:
# we reached the end of recursion, having
# walked through all the columns of the leftside
# matrix of the equation
return Just(newSystem)
else:
# we repeat the process for the next column
return echelonized(newSystem, colIndex + 1)
# the previous function starts by calling
# findPivot : System n . Index -> Maybe Index
# which `Maybe` returns the index of the first non-pivotal line
# (that is the line which wasn't used previously for the pivot
# of another column) which
# contains a non-null element at the index column `colIndex`.
# returns Nothing if the whole column at that index is null,
# or if there aren't any non pivotal lines remaining.
# that line index depends on the number of lines in system.nonPivotalLines,
# but that's not a problem because we'll use that index only
# to isolate the line into which the pivot was found from the aforementioned list.
# findPivot : System n . Index -> Maybe Index
def findPivot(system, colIndex):
if len(system.nonPivotalLines) == 0:
return Nothing
col = columnAt(colIndex, system.nonPivotalLines)
maybeLineIndex = firstIndex(isNotZero, col)
#print "findPivot", x, colIndex
return maybeLineIndex
# ==== next step:
# back in `echelonized`: if the index of the line of the pivot
# given by `findPivot` is Nothing, we do nothing with the system
# else, we call `usePivot` with the index of the soon-to-be pivotal line
# usePivot : System n . Index . Index -> Maybe (System n)
#
# in usePivot: we start by isolating the new pivotal lines
# from the rest of the still-not-yet pivotal lines.
# then we recuperate the value of the pivot, using the
# index of the column we're operating over.
#
# we create a function which will be used over all the lines
# in `system`, both the pivotal ones and the non pivotal ones,
# except the one that was just isolated, the one that contains
# the pivot.
#
# the operation consists in creating zeroes everywhere in the column
# except for the pivot. the core of the process is in the function
# `modifiedLine` (i know, the name is not very appropriate... it's
# the best i found though).
# usePivot : System n . Index . Index -> Maybe (System n)
def usePivot(system, pivotalLineIndex, colIndex):
(pivotalLine, nonPivotalLines) = \
isolateItem(system.nonPivotalLines, pivotalLineIndex)
pivot = pivotalLine[colIndex]
#print "pivot", pivot
def forEachLine(line):
val = line[colIndex]
coeff = val / pivot # that way, val - coeff * pivot == 0
return modifiedLine(line, pivotalLine, coeff)
newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine]
newNonPivotalLines = map(forEachLine, nonPivotalLines)
return maybeSystem(newPivotalLines, newNonPivotalLines)
# the function `modifiedLine` is straightforward:
# subtract each value of `line` by the multiplication
# of an appropriate coefficient with each value of
# `otherLine`.
# the appropriate coefficient is calculated so
# that the element at the column of the current pivot
# in the `line` become zero (cf `forEachLine` in usePivot)
# modifiedLine : List n . List n . n -> List n
def modifiedLine(line, otherLine, coeff):
def f(val, otherVal):
return val - otherVal * coeff
return zipWith(f, line, otherLine)
# ==== next step
# back in usePivot:
# as previously mentioned, we used forEachLine over both
# the pivotal and non pivotal lines, except the currently
# pivotal line (the one one which we found the pivot we're
# actually using).
#
# from then on, we `Maybe` build a new system, not forgetting
# to stash the latest pivotal lines with all the old pivotal lines
#
# maybeSystem will thereafter test that no line is invalid (0 = x != 0).
# ==== next step
# back to echelonized: we get back the result of usePivot. if Nothing,
# we return Nothing and therefore break the recursion. if not,
# we check if we're at the last column of the matrix A in AX = Y, and
# if so, we return the result of usePivot and break the recursion.
# if not, we call `echelonized` recursively again with the justValue() of
# the result of `usePivot`, and an incremented column index.
# i deem the recursion acceptable even in Python because
# nobody will ever use this program to solve a system
# of several hundreds of variables, will they?
# ==== next step:
# we exited `echelonized`, so we're back in `systemSolution`, with either
# Nothing (in which case we directly return Nothing)
# or with `Just` an echelonized system.
# in which case: we have to take care of an extreme, special case:
# when both sides of the equation are null (a null matrix and vector).
# cf the body of `systemSolution` for more details.
#
# from now on we'll assume the leftside is not a null matrix,
# and therefore we at least found one pivot.
# we call keepPivotalLines over the echelonized system.
# keepPivotalLines : System n -> List (Row n)
#
# this function is extremely short and nearly useless, but is standing alone
# for the sake of clarity. its purpose is to mark the moment when
# we throw away the remaining nonPivotalLines, because they're
# necessarily just full of zeroes (otherwise they'd either be invalid,
# or would imply the Gauss Algorithm implemented here utterly failed somehow
# along the way).
# this, of course, should never happen.
# the result of keepPivotalLines is thus `system.pivotalLines : List (Row n)`
# keepPivotalLines : System n -> List (Row n)
def keepPivotalLines(system):
if not forall(system.nonPivotalLines, isNullVector):
error(keepPivotalLines, "somehow after successfully echelonizing, "
+ "one non pivotal line is not full of zeroes")
return system.pivotalLines
# ==== next step:
# once keepPivotalLines has been called, it's the turn of
# normalized : List (Row n) -> List (Row n)
# whose job is to normalize each pivotal line, aka to
# multiply each pivotal line by a coefficient
# so that each pivot (which is also the first non-zero value
# on each line) take the value of 1.
# normalized : List (Row n) -> List (Row n)
def normalized(pivotalLines):
|
return map(normalizedLine, pivotalLines)
|
identifier_body
|
|
System.py
|
System` with parameters
# `pivotalLines = []` and `nonPivotalLines` as the previous result of
# the gluing of both sides of the equation.
# example: if the equation of the system is
# 0 1 2 | x 9
# 3 4 5 | y = 9
# 6 7 8 | z 9
# then `maybeSystemInit would call:
# maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ])
# maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n)
def maybeSystemInit(matrix, rightSide):
def fuseToEnd(line, rightValue):
return line + [rightValue]
fusedLines = zipWith(fuseToEnd, matrix, rightSide)
return maybeSystem([], fusedLines)
# ==== next step:
# `maybeSystem` is a 'smart' constructor that returns Nothing if
# any line in either of its parameters is of the form [0,0,...,0,x]
# with x != 0, which would correspond to an equation 0 = x != 0`
# which would make the result of `systemSolution` be Nothing automatically,
# thanks to the magic of the type `Maybe`!
#
# the constructor `maybeSystem` will be called at each step of the
# algorithm, ensuring that if at any point, the system is found unsolvable,
# no further operation will be performed.
# maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n)
def maybeSystem(pivotalLines, nonPivotalLines):
if forall(pivotalLines + nonPivotalLines, isValidLine):
return Maybe(value = System(pivotalLines, nonPivotalLines))
else: return Nothing
# returns True if and only if the list
# is not a series of zeroes ended with one
# last non-zero value, as it would amount to
# an equation of the form 0 = x != 0
#
# isValidLine : List n -> Bool
def isValidLine(line):
if len(line) <= 1:
error(isValidLine, "input list is too short to be part of a `System n`")
leftSide = line[:-1]
rightSide = line[-1]
if forall(leftSide, isZero):
return isZero(rightSide)
else:
return True
# (you'll notice i grew tired of mentioning the ever-present
# type class [Num n] of the parameter `n`...)
# ==== small interlude to introduce the type/class `System n`:
# class representing a system in the process of being solved.
# mostly just contains two attributes, `pivotalLines` and
# `nonPivotalLines`, each one being a list of vectors/lines.
#
# we'll search new pivots in the nonPivotalLines list, and
# everytime we find a new pivot in a column, we'll move the corresponding
# line to the group of the "pivotalLines".
class System():
# System : List (Row n) . List (Row n) -> System n
def __init__(self, pivotalLines, nonPivotalLines):
self.pivotalLines = pivotalLines
self.nonPivotalLines = nonPivotalLines
allLines = pivotalLines + nonPivotalLines
if len(allLines) == 0:
error(System.__init__, "wrong input (two empty lists) "
+ "for System constructor")
self.leftSideWidth = len(allLines[0]) - 1
# number of columns of the leftside matrix of the equation.
# -1 because the last element of each line is
# part of the right side (the vector Y in AX = Y)
# this value will be used to avoid trying to find
# a pivot in the right side column vector
# ==== next step of the algorithm:
#
# back in `systemSolution`, with `Maybe` a valid system. If it is so,
# the method `maybeDo` will call
# echelonized : System n . Index -> Maybe (System n)
# with this valid system as its first argument, and an additional parameter
# `colIndex = 0`.
# this function returns either `Just` an echelonized system,
# or Nothing if at some point we encounter `0 = x != 0`.
#
# this function is recursive (indirectly). colIndex represents an index of
# the column of the leftside matrix `A` in which we'll try
# to find a pivot. the recursion will thus go through each column index
# between 0 and `ncols(A)`.
# echelonized : System n . Index -> Maybe (System n)
def echelonized(system, colIndex):
#print "DBG"
#printMatrix(system.pivotalLines)
#printMatrix(system.nonPivotalLines)
maybePivotalLineIndex = findPivot(system, colIndex)
if maybePivotalLineIndex == Nothing:
# pivot not found => this column is filled with zeroes
# on the non pivotal lines, so we do nothing
maybeSystem = Just(system)
else:
pivotalLineIndex = maybePivotalLineIndex.justValue()
maybeSystem = usePivot(system, pivotalLineIndex, colIndex)
if maybeSystem == Nothing:
return Nothing
else:
newSystem = maybeSystem.justValue()
if colIndex >= newSystem.leftSideWidth - 1:
# we reached the end of recursion, having
# walked through all the columns of the leftside
# matrix of the equation
return Just(newSystem)
else:
# we repeat the process for the next column
|
# the previous function starts by calling
# findPivot : System n . Index -> Maybe Index
# which `Maybe` returns the index of the first non-pivotal line
# (that is the line which wasn't used previously for the pivot
# of another column) which
# contains a non-null element at the index column `colIndex`.
# returns Nothing if the whole column at that index is null,
# or if there aren't any non pivotal lines remaining.
# that line index depends on the number of lines in system.nonPivotalLines,
# but that's not a problem because we'll use that index only
# to isolate the line into which the pivot was found from the aforementioned list.
# findPivot : System n . Index -> Maybe Index
def findPivot(system, colIndex):
if len(system.nonPivotalLines) == 0:
return Nothing
col = columnAt(colIndex, system.nonPivotalLines)
maybeLineIndex = firstIndex(isNotZero, col)
#print "findPivot", x, colIndex
return maybeLineIndex
# ==== next step:
# back in `echelonized`: if the index of the line of the pivot
# given by `findPivot` is Nothing, we do nothing with the system
# else, we call `usePivot` with the index of the soon-to-be pivotal line
# usePivot : System n . Index . Index -> Maybe (System n)
#
# in usePivot: we start by isolating the new pivotal lines
# from the rest of the still-not-yet pivotal lines.
# then we recuperate the value of the pivot, using the
# index of the column we're operating over.
#
# we create a function which will be used over all the lines
# in `system`, both the pivotal ones and the non pivotal ones,
# except the one that was just isolated, the one that contains
# the pivot.
#
# the operation consists in creating zeroes everywhere in the column
# except for the pivot. the core of the process is in the function
# `modifiedLine` (i know, the name is not very appropriate... it's
# the best i found though).
# usePivot : System n . Index . Index -> Maybe (System n)
def usePivot(system, pivotalLineIndex, colIndex):
(pivotalLine, nonPivotalLines) = \
isolateItem(system.nonPivotalLines, pivotalLineIndex)
pivot = pivotalLine[colIndex]
#print "pivot", pivot
def forEachLine(line):
val = line[colIndex]
coeff = val / pivot # that way, val - coeff * pivot == 0
return modifiedLine(line, pivotalLine, coeff)
newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine]
newNonPivotalLines = map(forEachLine, nonPivotalLines)
return maybeSystem(newPivotalLines, newNonPivotalLines)
# the function `modifiedLine` is straightforward:
# subtract each value of `line` by the multiplication
# of an appropriate coefficient with each value of
# `otherLine`.
# the appropriate coefficient is calculated so
# that the element at the column of the current pivot
# in the `line` become zero (cf `forEachLine` in usePivot)
# modifiedLine : List n . List n . n -> List n
def modifiedLine(line, otherLine, coeff):
def
|
return echelonized(newSystem, colIndex + 1)
|
conditional_block
|
System.py
|
System` with parameters
# `pivotalLines = []` and `nonPivotalLines` as the previous result of
# the gluing of both sides of the equation.
# example: if the equation of the system is
# 0 1 2 | x 9
# 3 4 5 | y = 9
# 6 7 8 | z 9
# then `maybeSystemInit would call:
# maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ])
# maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n)
def maybeSystemInit(matrix, rightSide):
def fuseToEnd(line, rightValue):
return line + [rightValue]
fusedLines = zipWith(fuseToEnd, matrix, rightSide)
return maybeSystem([], fusedLines)
# ==== next step:
# `maybeSystem` is a 'smart' constructor that returns Nothing if
# any line in either of its parameters is of the form [0,0,...,0,x]
# with x != 0, which would correspond to an equation 0 = x != 0`
# which would make the result of `systemSolution` be Nothing automatically,
# thanks to the magic of the type `Maybe`!
#
# the constructor `maybeSystem` will be called at each step of the
# algorithm, ensuring that if at any point, the system is found unsolvable,
# no further operation will be performed.
# maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n)
def
|
(pivotalLines, nonPivotalLines):
if forall(pivotalLines + nonPivotalLines, isValidLine):
return Maybe(value = System(pivotalLines, nonPivotalLines))
else: return Nothing
# returns True if and only if the list
# is not a series of zeroes ended with one
# last non-zero value, as it would amount to
# an equation of the form 0 = x != 0
#
# isValidLine : List n -> Bool
def isValidLine(line):
if len(line) <= 1:
error(isValidLine, "input list is too short to be part of a `System n`")
leftSide = line[:-1]
rightSide = line[-1]
if forall(leftSide, isZero):
return isZero(rightSide)
else:
return True
# (you'll notice i grew tired of mentioning the ever-present
# type class [Num n] of the parameter `n`...)
# ==== small interlude to introduce the type/class `System n`:
# class representing a system in the process of being solved.
# mostly just contains two attributes, `pivotalLines` and
# `nonPivotalLines`, each one being a list of vectors/lines.
#
# we'll search new pivots in the nonPivotalLines list, and
# everytime we find a new pivot in a column, we'll move the corresponding
# line to the group of the "pivotalLines".
class System():
# System : List (Row n) . List (Row n) -> System n
def __init__(self, pivotalLines, nonPivotalLines):
self.pivotalLines = pivotalLines
self.nonPivotalLines = nonPivotalLines
allLines = pivotalLines + nonPivotalLines
if len(allLines) == 0:
error(System.__init__, "wrong input (two empty lists) "
+ "for System constructor")
self.leftSideWidth = len(allLines[0]) - 1
# number of columns of the leftside matrix of the equation.
# -1 because the last element of each line is
# part of the right side (the vector Y in AX = Y)
# this value will be used to avoid trying to find
# a pivot in the right side column vector
# ==== next step of the algorithm:
#
# back in `systemSolution`, with `Maybe` a valid system. If it is so,
# the method `maybeDo` will call
# echelonized : System n . Index -> Maybe (System n)
# with this valid system as its first argument, and an additional parameter
# `colIndex = 0`.
# this function returns either `Just` an echelonized system,
# or Nothing if at some point we encounter `0 = x != 0`.
#
# this function is recursive (indirectly). colIndex represents an index of
# the column of the leftside matrix `A` in which we'll try
# to find a pivot. the recursion will thus go through each column index
# between 0 and `ncols(A)`.
# echelonized : System n . Index -> Maybe (System n)
def echelonized(system, colIndex):
#print "DBG"
#printMatrix(system.pivotalLines)
#printMatrix(system.nonPivotalLines)
maybePivotalLineIndex = findPivot(system, colIndex)
if maybePivotalLineIndex == Nothing:
# pivot not found => this column is filled with zeroes
# on the non pivotal lines, so we do nothing
maybeSystem = Just(system)
else:
pivotalLineIndex = maybePivotalLineIndex.justValue()
maybeSystem = usePivot(system, pivotalLineIndex, colIndex)
if maybeSystem == Nothing:
return Nothing
else:
newSystem = maybeSystem.justValue()
if colIndex >= newSystem.leftSideWidth - 1:
# we reached the end of recursion, having
# walked through all the columns of the leftside
# matrix of the equation
return Just(newSystem)
else:
# we repeat the process for the next column
return echelonized(newSystem, colIndex + 1)
# the previous function starts by calling
# findPivot : System n . Index -> Maybe Index
# which `Maybe` returns the index of the first non-pivotal line
# (that is the line which wasn't used previously for the pivot
# of another column) which
# contains a non-null element at the index column `colIndex`.
# returns Nothing if the whole column at that index is null,
# or if there aren't any non pivotal lines remaining.
# that line index depends on the number of lines in system.nonPivotalLines,
# but that's not a problem because we'll use that index only
# to isolate the line into which the pivot was found from the aforementioned list.
# findPivot : System n . Index -> Maybe Index
def findPivot(system, colIndex):
if len(system.nonPivotalLines) == 0:
return Nothing
col = columnAt(colIndex, system.nonPivotalLines)
maybeLineIndex = firstIndex(isNotZero, col)
#print "findPivot", x, colIndex
return maybeLineIndex
# ==== next step:
# back in `echelonized`: if the index of the line of the pivot
# given by `findPivot` is Nothing, we do nothing with the system
# else, we call `usePivot` with the index of the soon-to-be pivotal line
# usePivot : System n . Index . Index -> Maybe (System n)
#
# in usePivot: we start by isolating the new pivotal lines
# from the rest of the still-not-yet pivotal lines.
# then we recuperate the value of the pivot, using the
# index of the column we're operating over.
#
# we create a function which will be used over all the lines
# in `system`, both the pivotal ones and the non pivotal ones,
# except the one that was just isolated, the one that contains
# the pivot.
#
# the operation consists in creating zeroes everywhere in the column
# except for the pivot. the core of the process is in the function
# `modifiedLine` (i know, the name is not very appropriate... it's
# the best i found though).
# usePivot : System n . Index . Index -> Maybe (System n)
def usePivot(system, pivotalLineIndex, colIndex):
(pivotalLine, nonPivotalLines) = \
isolateItem(system.nonPivotalLines, pivotalLineIndex)
pivot = pivotalLine[colIndex]
#print "pivot", pivot
def forEachLine(line):
val = line[colIndex]
coeff = val / pivot # that way, val - coeff * pivot == 0
return modifiedLine(line, pivotalLine, coeff)
newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine]
newNonPivotalLines = map(forEachLine, nonPivotalLines)
return maybeSystem(newPivotalLines, newNonPivotalLines)
# the function `modifiedLine` is straightforward:
# subtract each value of `line` by the multiplication
# of an appropriate coefficient with each value of
# `otherLine`.
# the appropriate coefficient is calculated so
# that the element at the column of the current pivot
# in the `line` become zero (cf `forEachLine` in usePivot)
# modifiedLine : List n . List n . n -> List n
def modifiedLine(line, otherLine, coeff):
def
|
maybeSystem
|
identifier_name
|
ecoliver.py
|
orrelation Function
+ve lags mean x leads y
-ve lags mean x lags y
'''
x = (x-np.mean(x))/(np.std(x)*len(x))
y = (y-np.mean(y))/np.std(y)
ccf = np.correlate(x, y, mode='full')
lags = np.arange(len(ccf)) - (len(x)-1)
return lags, ccf
def ttest_serialcorr(x, y):
'''
Calculates the t-test for the means of two samples under an assumption of serial
correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995)
'''
# Valid (non-Nan) data, and return NaN if insufficient valid data
validx = ~np.isnan(x)
validy = ~np.isnan(y)
if (validx.sum() <= 1) + (validy.sum() <= 1):
return np.nan, np.nan
else:
# Sample lengths
nx = len(x[validx])
ny = len(y[validy])
# Autocorrelation Function (pad NaN values for an approximation)
rhox = acf(pad(x - np.nanmean(x)))
rhoy = acf(pad(y - np.nanmean(y)))
# Equivalent sample lengths
nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum())
ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum())
#if (nx < 30) or (ny < 30):
# print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution'
# Sample standard deviations
sx = np.sqrt(x[validx].var())
sy = np.sqrt(y[validy].var())
s = np.sqrt(sx**2/nx + sy**2/ny)
# t-statistic
t = (np.nanmean(x) - np.nanmean(y))/s
# Degrees of freedom
df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1))
# p-value
p = 1 - stats.t.cdf(t, df)
return t, p
def pad(data, maxPadLength=False):
'''
Linearly interpolate over missing data (NaNs) in a time series.
Inputs:
data Time series [1D numpy array]
maxPadLength Specifies the maximum length over which to interpolate,
i.e., any consecutive blocks of NaNs with length greater
than maxPadLength will be left as NaN. Set as an integer.
maxPadLength=False (default) interpolates over all NaNs.
Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015
'''
data_padded = data.copy()
if len(data) == np.isnan(data).sum():
return np.nan*data_padded
else:
bad_indexes = np.isnan(data)
good_indexes = np.logical_not(bad_indexes)
good_data = data[good_indexes]
interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data)
data_padded[bad_indexes] = interpolated
if maxPadLength:
blocks, n_blocks = ndimage.label(np.isnan(data))
for bl in range(1, n_blocks+1):
if (blocks==bl).sum() > maxPadLength:
data_padded[blocks==bl] = np.nan
return data_padded
def runavg_periodic(ts, w):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Assumes periodicity of ts.
'''
N = len(ts)
ts = np.append(ts, np.append(ts, ts))
ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same')
ts = ts_smooth[N:2*N]
return ts
def runavg(ts, w, mode='same'):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Pads with NaNs outside of valid range.
Option 'mode' specifies if output should be defined over
'''
if mode == 'same':
ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode)
elif mode == 'valid':
ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2))
return ts_smooth
def timevector(date_start, date_end):
'''
Generated daily time vector, along with year, month, day, day-of-year,
and full date information, given start and and date. Format is a 3-element
list so that a start date of 3 May 2005 is specified date_start = [2005,5,3]
Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366]
for leap years.
returns: t, dates, T, year, month, day, doy
'''
# Time vector
t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1)
T = len(t)
# Date list
dates = [date.fromordinal(tt.astype(int)) for tt in t]
# Vectors for year, month, day-of-month
year = np.zeros((T))
month = np.zeros((T))
day = np.zeros((T))
for tt in range(T):
year[tt] = date.fromordinal(t[tt]).year
month[tt] = date.fromordinal(t[tt]).month
day[tt] = date.fromordinal(t[tt]).day
year = year.astype(int)
month = month.astype(int)
day = day.astype(int)
# Leap-year baseline for defining day-of-year values
year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366
t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1)
dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear]
month_leapYear = np.zeros((len(t_leapYear)))
day_leapYear = np.zeros((len(t_leapYear)))
doy_leapYear = np.zeros((len(t_leapYear)))
for tt in range(len(t_leapYear)):
month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month
day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day
doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1
# Calculate day-of-year values
doy = np.zeros((T))
for tt in range(T):
doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])]
doy = doy.astype(int)
return t, dates, T, year, month, day, doy
def spatial_filter(field, res, cut_lon, cut_lat):
'''
Performs a spatial filter, removing all features with
wavelenth scales larger than cut_lon in longitude and
cut_lat in latitude from field. Field has spatial
resolution of res and land identified by np.nan's
'''
field_filt = np.zeros(field.shape)
# see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5
sig_lon = (cut_lon/5.) / res
sig_lat = (cut_lat/5.) / res
land = np.isnan(field)
field[land] = nanmean(field)
field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon])
field_filt[land] = np.nan
return field_filt
def trend(x, y, alpha=0.05):
'''
Calculates the trend of y given the linear
independent variable x. Outputs the mean,
trend, and alpha-level (e.g., 0.05 for 95%)
confidence limit on the trend.
returns mean, trend, dtrend_95
'''
valid = ~np.isnan(y)
if valid.sum() <= 1:
|
else:
X = np.array([np.ones(len
|
return np.nan, np.nan, np.nan
|
conditional_block
|
ecoliver.py
|
orrelation Function
+ve lags mean x leads y
-ve lags mean x lags y
'''
x = (x-np.mean(x))/(np.std(x)*len(x))
y = (y-np.mean(y))/np.std(y)
ccf = np.correlate(x, y, mode='full')
lags = np.arange(len(ccf)) - (len(x)-1)
return lags, ccf
def ttest_serialcorr(x, y):
'''
Calculates the t-test for the means of two samples under an assumption of serial
correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995)
'''
# Valid (non-Nan) data, and return NaN if insufficient valid data
validx = ~np.isnan(x)
validy = ~np.isnan(y)
if (validx.sum() <= 1) + (validy.sum() <= 1):
return np.nan, np.nan
else:
# Sample lengths
nx = len(x[validx])
ny = len(y[validy])
# Autocorrelation Function (pad NaN values for an approximation)
rhox = acf(pad(x - np.nanmean(x)))
rhoy = acf(pad(y - np.nanmean(y)))
# Equivalent sample lengths
nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum())
ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum())
#if (nx < 30) or (ny < 30):
# print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution'
# Sample standard deviations
sx = np.sqrt(x[validx].var())
|
# t-statistic
t = (np.nanmean(x) - np.nanmean(y))/s
# Degrees of freedom
df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1))
# p-value
p = 1 - stats.t.cdf(t, df)
return t, p
def pad(data, maxPadLength=False):
'''
Linearly interpolate over missing data (NaNs) in a time series.
Inputs:
data Time series [1D numpy array]
maxPadLength Specifies the maximum length over which to interpolate,
i.e., any consecutive blocks of NaNs with length greater
than maxPadLength will be left as NaN. Set as an integer.
maxPadLength=False (default) interpolates over all NaNs.
Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015
'''
data_padded = data.copy()
if len(data) == np.isnan(data).sum():
return np.nan*data_padded
else:
bad_indexes = np.isnan(data)
good_indexes = np.logical_not(bad_indexes)
good_data = data[good_indexes]
interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data)
data_padded[bad_indexes] = interpolated
if maxPadLength:
blocks, n_blocks = ndimage.label(np.isnan(data))
for bl in range(1, n_blocks+1):
if (blocks==bl).sum() > maxPadLength:
data_padded[blocks==bl] = np.nan
return data_padded
def runavg_periodic(ts, w):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Assumes periodicity of ts.
'''
N = len(ts)
ts = np.append(ts, np.append(ts, ts))
ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same')
ts = ts_smooth[N:2*N]
return ts
def runavg(ts, w, mode='same'):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Pads with NaNs outside of valid range.
Option 'mode' specifies if output should be defined over
'''
if mode == 'same':
ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode)
elif mode == 'valid':
ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2))
return ts_smooth
def timevector(date_start, date_end):
'''
Generated daily time vector, along with year, month, day, day-of-year,
and full date information, given start and and date. Format is a 3-element
list so that a start date of 3 May 2005 is specified date_start = [2005,5,3]
Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366]
for leap years.
returns: t, dates, T, year, month, day, doy
'''
# Time vector
t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1)
T = len(t)
# Date list
dates = [date.fromordinal(tt.astype(int)) for tt in t]
# Vectors for year, month, day-of-month
year = np.zeros((T))
month = np.zeros((T))
day = np.zeros((T))
for tt in range(T):
year[tt] = date.fromordinal(t[tt]).year
month[tt] = date.fromordinal(t[tt]).month
day[tt] = date.fromordinal(t[tt]).day
year = year.astype(int)
month = month.astype(int)
day = day.astype(int)
# Leap-year baseline for defining day-of-year values
year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366
t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1)
dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear]
month_leapYear = np.zeros((len(t_leapYear)))
day_leapYear = np.zeros((len(t_leapYear)))
doy_leapYear = np.zeros((len(t_leapYear)))
for tt in range(len(t_leapYear)):
month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month
day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day
doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1
# Calculate day-of-year values
doy = np.zeros((T))
for tt in range(T):
doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])]
doy = doy.astype(int)
return t, dates, T, year, month, day, doy
def spatial_filter(field, res, cut_lon, cut_lat):
'''
Performs a spatial filter, removing all features with
wavelenth scales larger than cut_lon in longitude and
cut_lat in latitude from field. Field has spatial
resolution of res and land identified by np.nan's
'''
field_filt = np.zeros(field.shape)
# see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5
sig_lon = (cut_lon/5.) / res
sig_lat = (cut_lat/5.) / res
land = np.isnan(field)
field[land] = nanmean(field)
field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon])
field_filt[land] = np.nan
return field_filt
def trend(x, y, alpha=0.05):
'''
Calculates the trend of y given the linear
independent variable x. Outputs the mean,
trend, and alpha-level (e.g., 0.05 for 95%)
confidence limit on the trend.
returns mean, trend, dtrend_95
'''
valid = ~np.isnan(y)
if valid.sum() <= 1:
return np.nan, np.nan, np.nan
else:
X = np.array([np.ones(len
|
sy = np.sqrt(y[validy].var())
s = np.sqrt(sx**2/nx + sy**2/ny)
|
random_line_split
|
ecoliver.py
|
relation Function
+ve lags mean x leads y
-ve lags mean x lags y
'''
x = (x-np.mean(x))/(np.std(x)*len(x))
y = (y-np.mean(y))/np.std(y)
ccf = np.correlate(x, y, mode='full')
lags = np.arange(len(ccf)) - (len(x)-1)
return lags, ccf
def
|
(x, y):
'''
Calculates the t-test for the means of two samples under an assumption of serial
correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995)
'''
# Valid (non-Nan) data, and return NaN if insufficient valid data
validx = ~np.isnan(x)
validy = ~np.isnan(y)
if (validx.sum() <= 1) + (validy.sum() <= 1):
return np.nan, np.nan
else:
# Sample lengths
nx = len(x[validx])
ny = len(y[validy])
# Autocorrelation Function (pad NaN values for an approximation)
rhox = acf(pad(x - np.nanmean(x)))
rhoy = acf(pad(y - np.nanmean(y)))
# Equivalent sample lengths
nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum())
ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum())
#if (nx < 30) or (ny < 30):
# print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution'
# Sample standard deviations
sx = np.sqrt(x[validx].var())
sy = np.sqrt(y[validy].var())
s = np.sqrt(sx**2/nx + sy**2/ny)
# t-statistic
t = (np.nanmean(x) - np.nanmean(y))/s
# Degrees of freedom
df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1))
# p-value
p = 1 - stats.t.cdf(t, df)
return t, p
def pad(data, maxPadLength=False):
'''
Linearly interpolate over missing data (NaNs) in a time series.
Inputs:
data Time series [1D numpy array]
maxPadLength Specifies the maximum length over which to interpolate,
i.e., any consecutive blocks of NaNs with length greater
than maxPadLength will be left as NaN. Set as an integer.
maxPadLength=False (default) interpolates over all NaNs.
Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015
'''
data_padded = data.copy()
if len(data) == np.isnan(data).sum():
return np.nan*data_padded
else:
bad_indexes = np.isnan(data)
good_indexes = np.logical_not(bad_indexes)
good_data = data[good_indexes]
interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data)
data_padded[bad_indexes] = interpolated
if maxPadLength:
blocks, n_blocks = ndimage.label(np.isnan(data))
for bl in range(1, n_blocks+1):
if (blocks==bl).sum() > maxPadLength:
data_padded[blocks==bl] = np.nan
return data_padded
def runavg_periodic(ts, w):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Assumes periodicity of ts.
'''
N = len(ts)
ts = np.append(ts, np.append(ts, ts))
ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same')
ts = ts_smooth[N:2*N]
return ts
def runavg(ts, w, mode='same'):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Pads with NaNs outside of valid range.
Option 'mode' specifies if output should be defined over
'''
if mode == 'same':
ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode)
elif mode == 'valid':
ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2))
return ts_smooth
def timevector(date_start, date_end):
'''
Generated daily time vector, along with year, month, day, day-of-year,
and full date information, given start and and date. Format is a 3-element
list so that a start date of 3 May 2005 is specified date_start = [2005,5,3]
Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366]
for leap years.
returns: t, dates, T, year, month, day, doy
'''
# Time vector
t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1)
T = len(t)
# Date list
dates = [date.fromordinal(tt.astype(int)) for tt in t]
# Vectors for year, month, day-of-month
year = np.zeros((T))
month = np.zeros((T))
day = np.zeros((T))
for tt in range(T):
year[tt] = date.fromordinal(t[tt]).year
month[tt] = date.fromordinal(t[tt]).month
day[tt] = date.fromordinal(t[tt]).day
year = year.astype(int)
month = month.astype(int)
day = day.astype(int)
# Leap-year baseline for defining day-of-year values
year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366
t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1)
dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear]
month_leapYear = np.zeros((len(t_leapYear)))
day_leapYear = np.zeros((len(t_leapYear)))
doy_leapYear = np.zeros((len(t_leapYear)))
for tt in range(len(t_leapYear)):
month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month
day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day
doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1
# Calculate day-of-year values
doy = np.zeros((T))
for tt in range(T):
doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])]
doy = doy.astype(int)
return t, dates, T, year, month, day, doy
def spatial_filter(field, res, cut_lon, cut_lat):
'''
Performs a spatial filter, removing all features with
wavelenth scales larger than cut_lon in longitude and
cut_lat in latitude from field. Field has spatial
resolution of res and land identified by np.nan's
'''
field_filt = np.zeros(field.shape)
# see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5
sig_lon = (cut_lon/5.) / res
sig_lat = (cut_lat/5.) / res
land = np.isnan(field)
field[land] = nanmean(field)
field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon])
field_filt[land] = np.nan
return field_filt
def trend(x, y, alpha=0.05):
'''
Calculates the trend of y given the linear
independent variable x. Outputs the mean,
trend, and alpha-level (e.g., 0.05 for 95%)
confidence limit on the trend.
returns mean, trend, dtrend_95
'''
valid = ~np.isnan(y)
if valid.sum() <= 1:
return np.nan, np.nan, np.nan
else:
X = np.array([np.ones(len
|
ttest_serialcorr
|
identifier_name
|
ecoliver.py
|
95)
'''
# Valid (non-Nan) data, and return NaN if insufficient valid data
validx = ~np.isnan(x)
validy = ~np.isnan(y)
if (validx.sum() <= 1) + (validy.sum() <= 1):
return np.nan, np.nan
else:
# Sample lengths
nx = len(x[validx])
ny = len(y[validy])
# Autocorrelation Function (pad NaN values for an approximation)
rhox = acf(pad(x - np.nanmean(x)))
rhoy = acf(pad(y - np.nanmean(y)))
# Equivalent sample lengths
nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum())
ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum())
#if (nx < 30) or (ny < 30):
# print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution'
# Sample standard deviations
sx = np.sqrt(x[validx].var())
sy = np.sqrt(y[validy].var())
s = np.sqrt(sx**2/nx + sy**2/ny)
# t-statistic
t = (np.nanmean(x) - np.nanmean(y))/s
# Degrees of freedom
df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1))
# p-value
p = 1 - stats.t.cdf(t, df)
return t, p
def pad(data, maxPadLength=False):
'''
Linearly interpolate over missing data (NaNs) in a time series.
Inputs:
data Time series [1D numpy array]
maxPadLength Specifies the maximum length over which to interpolate,
i.e., any consecutive blocks of NaNs with length greater
than maxPadLength will be left as NaN. Set as an integer.
maxPadLength=False (default) interpolates over all NaNs.
Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015
'''
data_padded = data.copy()
if len(data) == np.isnan(data).sum():
return np.nan*data_padded
else:
bad_indexes = np.isnan(data)
good_indexes = np.logical_not(bad_indexes)
good_data = data[good_indexes]
interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data)
data_padded[bad_indexes] = interpolated
if maxPadLength:
blocks, n_blocks = ndimage.label(np.isnan(data))
for bl in range(1, n_blocks+1):
if (blocks==bl).sum() > maxPadLength:
data_padded[blocks==bl] = np.nan
return data_padded
def runavg_periodic(ts, w):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Assumes periodicity of ts.
'''
N = len(ts)
ts = np.append(ts, np.append(ts, ts))
ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same')
ts = ts_smooth[N:2*N]
return ts
def runavg(ts, w, mode='same'):
'''
Perform running average of ts (1D numpy array) using uniform window
of width w (w must be odd). Pads with NaNs outside of valid range.
Option 'mode' specifies if output should be defined over
'''
if mode == 'same':
ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode)
elif mode == 'valid':
ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2))
return ts_smooth
def timevector(date_start, date_end):
'''
Generated daily time vector, along with year, month, day, day-of-year,
and full date information, given start and and date. Format is a 3-element
list so that a start date of 3 May 2005 is specified date_start = [2005,5,3]
Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366]
for leap years.
returns: t, dates, T, year, month, day, doy
'''
# Time vector
t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1)
T = len(t)
# Date list
dates = [date.fromordinal(tt.astype(int)) for tt in t]
# Vectors for year, month, day-of-month
year = np.zeros((T))
month = np.zeros((T))
day = np.zeros((T))
for tt in range(T):
year[tt] = date.fromordinal(t[tt]).year
month[tt] = date.fromordinal(t[tt]).month
day[tt] = date.fromordinal(t[tt]).day
year = year.astype(int)
month = month.astype(int)
day = day.astype(int)
# Leap-year baseline for defining day-of-year values
year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366
t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1)
dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear]
month_leapYear = np.zeros((len(t_leapYear)))
day_leapYear = np.zeros((len(t_leapYear)))
doy_leapYear = np.zeros((len(t_leapYear)))
for tt in range(len(t_leapYear)):
month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month
day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day
doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1
# Calculate day-of-year values
doy = np.zeros((T))
for tt in range(T):
doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])]
doy = doy.astype(int)
return t, dates, T, year, month, day, doy
def spatial_filter(field, res, cut_lon, cut_lat):
'''
Performs a spatial filter, removing all features with
wavelenth scales larger than cut_lon in longitude and
cut_lat in latitude from field. Field has spatial
resolution of res and land identified by np.nan's
'''
field_filt = np.zeros(field.shape)
# see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5
sig_lon = (cut_lon/5.) / res
sig_lat = (cut_lat/5.) / res
land = np.isnan(field)
field[land] = nanmean(field)
field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon])
field_filt[land] = np.nan
return field_filt
def trend(x, y, alpha=0.05):
|
'''
Calculates the trend of y given the linear
independent variable x. Outputs the mean,
trend, and alpha-level (e.g., 0.05 for 95%)
confidence limit on the trend.
returns mean, trend, dtrend_95
'''
valid = ~np.isnan(y)
if valid.sum() <= 1:
return np.nan, np.nan, np.nan
else:
X = np.array([np.ones(len(x)), x-x.mean()])
beta = linalg.lstsq(X[:,valid].T, y[valid])[0]
yhat = np.sum(beta*X.T, axis=1)
t_stat = stats.t.isf(alpha/2, len(x[valid])-2)
s = np.sqrt(np.sum((y[valid] - yhat[valid])**2) / (len(x[valid])-2))
Sxx = np.sum(X[1,valid]**2) - (np.sum(X[1,valid])**2)/len(x[valid]) # np.var(X, axis=1)[1]
return beta[0], beta[1], t_stat * s / np.sqrt(Sxx)
|
identifier_body
|
|
window.rs
|
);
// support f64 seconds by multiplying then using from_millis
let interval = std::time::Duration::from_millis((interval * 1000.0) as u64);
crossterm::terminal::enable_raw_mode()?;
// stdout().execute(crossterm::event::EnableMouseCapture)?
stdout().execute(cursor::Hide)?;
stdout().execute(terminal::EnterAlternateScreen)?;
stdout().execute(terminal::SetTitle("apachetop"))?;
loop {
self.redraw()?;
if crossterm::event::poll(interval)? && !self.handle_event()? {
break;
}
}
crossterm::terminal::disable_raw_mode()?;
stdout().execute(terminal::LeaveAlternateScreen)?;
stdout().execute(cursor::Show)?;
// stdout().execute(crossterm::event::DisableMouseCapture)?;
Ok(())
}
fn redraw(&mut self) -> Result<(), Error> {
let mut stdout = stdout();
stdout
.queue(terminal::Clear(terminal::ClearType::All))?
.queue(cursor::MoveTo(0, 0))?
.queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))?
.queue(cursor::MoveTo(self.cols / 2, 0))?
.queue(Print(self.started_at.to_string()))?
.queue(cursor::MoveTo(self.cols - 8 as u16, 0))?
.queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?;
{
let alltime_stats = self.alltime_stats.lock().unwrap();
let elapsed = self.started_at.elapsed().as_secs() as f64;
stdout
.queue(cursor::MoveTo(0, 1))?
.queue(Print(self.primary_stats_line(
&alltime_stats,
elapsed,
true,
)))?
.queue(cursor::MoveTo(0, 2))?
.queue(Print(self.per_code_line(&alltime_stats)))?;
} // mutex on alltime_stats
{
let mut ring_buffer = self.ring_buffer.lock().unwrap();
// TODO: better in another thread, not at display time?
ring_buffer.cleanup()?;
let elapsed = match ring_buffer.first() {
Some(f) => {
let first = chrono::DateTime::<chrono::Utc>::from(f.time);
(chrono::Utc::now() - first).num_seconds() as f64
}
None => 1.0, // avoid divide by zero later
};
stdout
.queue(cursor::MoveTo(0, 3))?
.queue(Print(self.primary_stats_line(
&ring_buffer.stats,
elapsed,
false,
)))?
.queue(cursor::MoveTo(0, 4))?
.queue(Print(self.per_code_line(&ring_buffer.stats)))?;
{
let options = self.options.lock().unwrap();
stdout.queue(cursor::MoveTo(0, 6))?.queue(Print(
format!(
"{:width$}",
&format!(
" REQS REQS/S SIZE SZ/S {}",
options.group.to_string()
),
width = self.cols as usize
)
.negative(),
))?;
} // read lock on options
if let Some(grouped) = &ring_buffer.grouped {
use lazysort::SortedBy;
// convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>,
// sort it by the RingBuffers, then lazy-sort the first n lines for display.
for (key, ring_buffer) in grouped
.iter()
.filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work
.collect::<Vec<(&GroupKey, &RingBuffer)>>()
.iter()
.sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer
.take((self.lines - 7/* lines used for header */) as usize)
{
stdout
.queue(cursor::MoveToNextLine(1))?
.queue(Print(self.table_line(key, ring_buffer, elapsed)))?;
}
}
} // mutex on ring_buffer
stdout.flush()?;
Ok(())
}
fn
|
(&mut self) -> Result<bool, Error> {
use crossterm::event::Event::{Key, Mouse, Resize};
use crossterm::event::KeyCode::Char;
use crossterm::event::{KeyEvent, KeyModifiers};
match crossterm::event::read()? {
Key(KeyEvent {
code: Char('q'), ..
})
| Key(KeyEvent {
modifiers: KeyModifiers::CONTROL,
code: Char('c'),
}) => return Ok(false),
Key(KeyEvent {
code: Char('o'), ..
}) => {
self.toggle_sort();
}
Key(KeyEvent {
code: Char('g'), ..
}) => {
self.toggle_group();
}
Key(event) => info!("{:?}", event),
Mouse(event) => info!("{:?}", event),
Resize(cols, lines) => {
self.lines = lines;
self.cols = cols;
}
}
Ok(true)
}
fn toggle_sort(&self) {
self.options.lock().unwrap().toggle_sort();
}
fn toggle_group(&self) {
let mut o = self.options.lock().unwrap();
let group_by = o.toggle_group();
drop(o);
self.ring_buffer.lock().unwrap().regroup(group_by);
}
fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String {
let reqs = rr.stats.global.requests as f64;
format!(
"{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}",
width = (self.cols - 30) as usize,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(rr.stats.global.bytes as f64),
hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed),
key = key
)
}
// All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req)
fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String {
let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64;
let reqs = stats.global.requests as f64;
let header = if alltime { "All:" } else { "R:" };
format!(
"{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req",
bold = Attribute::Bold,
reset = Attribute::Reset,
space = ((self.cols - 50) / 2) as usize,
header = header,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(stats.global.bytes as f64),
hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed),
hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero)
)
}
// 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%)
fn per_code_line(&self, stats: &Stats) -> String {
let stats_2 = &stats.by_status_code[2];
let stats_3 = &stats.by_status_code[3];
let stats_4 = &stats.by_status_code[4];
let stats_5 = &stats.by_status_code[5];
// closure to reduce some duplication for some munging below
let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) {
// avoid divide by zero if there's no requests yet
let pct = if stats.global.requests > 0 {
100.0 * (rb_stats.requests as f64 / stats.global.requests as f64)
} else {
0.0
};
// intelligent dp detection: eg 2.34%, 10.5%, 100%
let dp = if (pct - 100.0).abs() < f64::EPSILON {
0
} else if pct < 10.0 {
2
} else {
1
};
(pct, dp)
};
let (code_2_pct, code_2_dp) = c(stats_2);
let (code_3_pct, code_3_dp)
|
handle_event
|
identifier_name
|
window.rs
|
);
// support f64 seconds by multiplying then using from_millis
let interval = std::time::Duration::from_millis((interval * 1000.0) as u64);
crossterm::terminal::enable_raw_mode()?;
// stdout().execute(crossterm::event::EnableMouseCapture)?
stdout().execute(cursor::Hide)?;
stdout().execute(terminal::EnterAlternateScreen)?;
stdout().execute(terminal::SetTitle("apachetop"))?;
loop {
self.redraw()?;
if crossterm::event::poll(interval)? && !self.handle_event()? {
break;
}
}
crossterm::terminal::disable_raw_mode()?;
stdout().execute(terminal::LeaveAlternateScreen)?;
stdout().execute(cursor::Show)?;
// stdout().execute(crossterm::event::DisableMouseCapture)?;
Ok(())
}
fn redraw(&mut self) -> Result<(), Error>
|
elapsed,
true,
)))?
.queue(cursor::MoveTo(0, 2))?
.queue(Print(self.per_code_line(&alltime_stats)))?;
} // mutex on alltime_stats
{
let mut ring_buffer = self.ring_buffer.lock().unwrap();
// TODO: better in another thread, not at display time?
ring_buffer.cleanup()?;
let elapsed = match ring_buffer.first() {
Some(f) => {
let first = chrono::DateTime::<chrono::Utc>::from(f.time);
(chrono::Utc::now() - first).num_seconds() as f64
}
None => 1.0, // avoid divide by zero later
};
stdout
.queue(cursor::MoveTo(0, 3))?
.queue(Print(self.primary_stats_line(
&ring_buffer.stats,
elapsed,
false,
)))?
.queue(cursor::MoveTo(0, 4))?
.queue(Print(self.per_code_line(&ring_buffer.stats)))?;
{
let options = self.options.lock().unwrap();
stdout.queue(cursor::MoveTo(0, 6))?.queue(Print(
format!(
"{:width$}",
&format!(
" REQS REQS/S SIZE SZ/S {}",
options.group.to_string()
),
width = self.cols as usize
)
.negative(),
))?;
} // read lock on options
if let Some(grouped) = &ring_buffer.grouped {
use lazysort::SortedBy;
// convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>,
// sort it by the RingBuffers, then lazy-sort the first n lines for display.
for (key, ring_buffer) in grouped
.iter()
.filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work
.collect::<Vec<(&GroupKey, &RingBuffer)>>()
.iter()
.sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer
.take((self.lines - 7/* lines used for header */) as usize)
{
stdout
.queue(cursor::MoveToNextLine(1))?
.queue(Print(self.table_line(key, ring_buffer, elapsed)))?;
}
}
} // mutex on ring_buffer
stdout.flush()?;
Ok(())
}
fn handle_event(&mut self) -> Result<bool, Error> {
use crossterm::event::Event::{Key, Mouse, Resize};
use crossterm::event::KeyCode::Char;
use crossterm::event::{KeyEvent, KeyModifiers};
match crossterm::event::read()? {
Key(KeyEvent {
code: Char('q'), ..
})
| Key(KeyEvent {
modifiers: KeyModifiers::CONTROL,
code: Char('c'),
}) => return Ok(false),
Key(KeyEvent {
code: Char('o'), ..
}) => {
self.toggle_sort();
}
Key(KeyEvent {
code: Char('g'), ..
}) => {
self.toggle_group();
}
Key(event) => info!("{:?}", event),
Mouse(event) => info!("{:?}", event),
Resize(cols, lines) => {
self.lines = lines;
self.cols = cols;
}
}
Ok(true)
}
fn toggle_sort(&self) {
self.options.lock().unwrap().toggle_sort();
}
fn toggle_group(&self) {
let mut o = self.options.lock().unwrap();
let group_by = o.toggle_group();
drop(o);
self.ring_buffer.lock().unwrap().regroup(group_by);
}
fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String {
let reqs = rr.stats.global.requests as f64;
format!(
"{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}",
width = (self.cols - 30) as usize,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(rr.stats.global.bytes as f64),
hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed),
key = key
)
}
// All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req)
fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String {
let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64;
let reqs = stats.global.requests as f64;
let header = if alltime { "All:" } else { "R:" };
format!(
"{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req",
bold = Attribute::Bold,
reset = Attribute::Reset,
space = ((self.cols - 50) / 2) as usize,
header = header,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(stats.global.bytes as f64),
hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed),
hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero)
)
}
// 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%)
fn per_code_line(&self, stats: &Stats) -> String {
let stats_2 = &stats.by_status_code[2];
let stats_3 = &stats.by_status_code[3];
let stats_4 = &stats.by_status_code[4];
let stats_5 = &stats.by_status_code[5];
// closure to reduce some duplication for some munging below
let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) {
// avoid divide by zero if there's no requests yet
let pct = if stats.global.requests > 0 {
100.0 * (rb_stats.requests as f64 / stats.global.requests as f64)
} else {
0.0
};
// intelligent dp detection: eg 2.34%, 10.5%, 100%
let dp = if (pct - 100.0).abs() < f64::EPSILON {
0
} else if pct < 10.0 {
2
} else {
1
};
(pct, dp)
};
let (code_2_pct, code_2_dp) = c(stats_2);
let (code_3_pct, code_3_dp)
|
{
let mut stdout = stdout();
stdout
.queue(terminal::Clear(terminal::ClearType::All))?
.queue(cursor::MoveTo(0, 0))?
.queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))?
.queue(cursor::MoveTo(self.cols / 2, 0))?
.queue(Print(self.started_at.to_string()))?
.queue(cursor::MoveTo(self.cols - 8 as u16, 0))?
.queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?;
{
let alltime_stats = self.alltime_stats.lock().unwrap();
let elapsed = self.started_at.elapsed().as_secs() as f64;
stdout
.queue(cursor::MoveTo(0, 1))?
.queue(Print(self.primary_stats_line(
&alltime_stats,
|
identifier_body
|
window.rs
|
);
// support f64 seconds by multiplying then using from_millis
let interval = std::time::Duration::from_millis((interval * 1000.0) as u64);
crossterm::terminal::enable_raw_mode()?;
// stdout().execute(crossterm::event::EnableMouseCapture)?
stdout().execute(cursor::Hide)?;
stdout().execute(terminal::EnterAlternateScreen)?;
stdout().execute(terminal::SetTitle("apachetop"))?;
loop {
self.redraw()?;
if crossterm::event::poll(interval)? && !self.handle_event()? {
break;
}
}
crossterm::terminal::disable_raw_mode()?;
stdout().execute(terminal::LeaveAlternateScreen)?;
stdout().execute(cursor::Show)?;
// stdout().execute(crossterm::event::DisableMouseCapture)?;
Ok(())
}
fn redraw(&mut self) -> Result<(), Error> {
let mut stdout = stdout();
stdout
.queue(terminal::Clear(terminal::ClearType::All))?
.queue(cursor::MoveTo(0, 0))?
.queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))?
.queue(cursor::MoveTo(self.cols / 2, 0))?
.queue(Print(self.started_at.to_string()))?
.queue(cursor::MoveTo(self.cols - 8 as u16, 0))?
.queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?;
{
let alltime_stats = self.alltime_stats.lock().unwrap();
let elapsed = self.started_at.elapsed().as_secs() as f64;
stdout
.queue(cursor::MoveTo(0, 1))?
.queue(Print(self.primary_stats_line(
&alltime_stats,
elapsed,
true,
)))?
.queue(cursor::MoveTo(0, 2))?
.queue(Print(self.per_code_line(&alltime_stats)))?;
} // mutex on alltime_stats
{
let mut ring_buffer = self.ring_buffer.lock().unwrap();
// TODO: better in another thread, not at display time?
ring_buffer.cleanup()?;
let elapsed = match ring_buffer.first() {
Some(f) => {
let first = chrono::DateTime::<chrono::Utc>::from(f.time);
(chrono::Utc::now() - first).num_seconds() as f64
}
None => 1.0, // avoid divide by zero later
};
stdout
.queue(cursor::MoveTo(0, 3))?
.queue(Print(self.primary_stats_line(
&ring_buffer.stats,
elapsed,
false,
)))?
.queue(cursor::MoveTo(0, 4))?
.queue(Print(self.per_code_line(&ring_buffer.stats)))?;
{
let options = self.options.lock().unwrap();
stdout.queue(cursor::MoveTo(0, 6))?.queue(Print(
format!(
"{:width$}",
&format!(
" REQS REQS/S SIZE SZ/S {}",
options.group.to_string()
),
width = self.cols as usize
)
.negative(),
))?;
} // read lock on options
if let Some(grouped) = &ring_buffer.grouped {
use lazysort::SortedBy;
// convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>,
// sort it by the RingBuffers, then lazy-sort the first n lines for display.
for (key, ring_buffer) in grouped
.iter()
.filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work
.collect::<Vec<(&GroupKey, &RingBuffer)>>()
.iter()
.sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer
.take((self.lines - 7/* lines used for header */) as usize)
{
stdout
.queue(cursor::MoveToNextLine(1))?
.queue(Print(self.table_line(key, ring_buffer, elapsed)))?;
}
}
} // mutex on ring_buffer
stdout.flush()?;
Ok(())
}
fn handle_event(&mut self) -> Result<bool, Error> {
use crossterm::event::Event::{Key, Mouse, Resize};
use crossterm::event::KeyCode::Char;
use crossterm::event::{KeyEvent, KeyModifiers};
match crossterm::event::read()? {
Key(KeyEvent {
code: Char('q'), ..
})
| Key(KeyEvent {
modifiers: KeyModifiers::CONTROL,
code: Char('c'),
}) => return Ok(false),
Key(KeyEvent {
code: Char('o'), ..
}) => {
self.toggle_sort();
}
Key(KeyEvent {
code: Char('g'), ..
}) => {
self.toggle_group();
}
Key(event) => info!("{:?}", event),
Mouse(event) => info!("{:?}", event),
Resize(cols, lines) => {
self.lines = lines;
self.cols = cols;
}
}
Ok(true)
}
fn toggle_sort(&self) {
self.options.lock().unwrap().toggle_sort();
}
fn toggle_group(&self) {
let mut o = self.options.lock().unwrap();
let group_by = o.toggle_group();
drop(o);
self.ring_buffer.lock().unwrap().regroup(group_by);
}
fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String {
let reqs = rr.stats.global.requests as f64;
format!(
"{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}",
width = (self.cols - 30) as usize,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(rr.stats.global.bytes as f64),
hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed),
key = key
)
}
// All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req)
fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String {
let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64;
let reqs = stats.global.requests as f64;
let header = if alltime { "All:" } else { "R:" };
format!(
"{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req",
bold = Attribute::Bold,
reset = Attribute::Reset,
space = ((self.cols - 50) / 2) as usize,
header = header,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(stats.global.bytes as f64),
hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed),
hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero)
)
}
// 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%)
fn per_code_line(&self, stats: &Stats) -> String {
let stats_2 = &stats.by_status_code[2];
let stats_3 = &stats.by_status_code[3];
let stats_4 = &stats.by_status_code[4];
let stats_5 = &stats.by_status_code[5];
// closure to reduce some duplication for some munging below
let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) {
// avoid divide by zero if there's no requests yet
let pct = if stats.global.requests > 0
|
else {
0.0
};
// intelligent dp detection: eg 2.34%, 10.5%, 100%
let dp = if (pct - 100.0).abs() < f64::EPSILON {
0
} else if pct < 10.0 {
2
} else {
1
};
(pct, dp)
};
let (code_2_pct, code_2_dp) = c(stats_2);
let (code_3_pct, code_3_dp)
|
{
100.0 * (rb_stats.requests as f64 / stats.global.requests as f64)
}
|
conditional_block
|
window.rs
|
(options);
// support f64 seconds by multiplying then using from_millis
let interval = std::time::Duration::from_millis((interval * 1000.0) as u64);
crossterm::terminal::enable_raw_mode()?;
// stdout().execute(crossterm::event::EnableMouseCapture)?
stdout().execute(cursor::Hide)?;
stdout().execute(terminal::EnterAlternateScreen)?;
stdout().execute(terminal::SetTitle("apachetop"))?;
loop {
self.redraw()?;
if crossterm::event::poll(interval)? && !self.handle_event()? {
break;
}
}
crossterm::terminal::disable_raw_mode()?;
stdout().execute(terminal::LeaveAlternateScreen)?;
stdout().execute(cursor::Show)?;
// stdout().execute(crossterm::event::DisableMouseCapture)?;
Ok(())
}
fn redraw(&mut self) -> Result<(), Error> {
let mut stdout = stdout();
stdout
.queue(terminal::Clear(terminal::ClearType::All))?
|
.queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))?
.queue(cursor::MoveTo(self.cols / 2, 0))?
.queue(Print(self.started_at.to_string()))?
.queue(cursor::MoveTo(self.cols - 8 as u16, 0))?
.queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?;
{
let alltime_stats = self.alltime_stats.lock().unwrap();
let elapsed = self.started_at.elapsed().as_secs() as f64;
stdout
.queue(cursor::MoveTo(0, 1))?
.queue(Print(self.primary_stats_line(
&alltime_stats,
elapsed,
true,
)))?
.queue(cursor::MoveTo(0, 2))?
.queue(Print(self.per_code_line(&alltime_stats)))?;
} // mutex on alltime_stats
{
let mut ring_buffer = self.ring_buffer.lock().unwrap();
// TODO: better in another thread, not at display time?
ring_buffer.cleanup()?;
let elapsed = match ring_buffer.first() {
Some(f) => {
let first = chrono::DateTime::<chrono::Utc>::from(f.time);
(chrono::Utc::now() - first).num_seconds() as f64
}
None => 1.0, // avoid divide by zero later
};
stdout
.queue(cursor::MoveTo(0, 3))?
.queue(Print(self.primary_stats_line(
&ring_buffer.stats,
elapsed,
false,
)))?
.queue(cursor::MoveTo(0, 4))?
.queue(Print(self.per_code_line(&ring_buffer.stats)))?;
{
let options = self.options.lock().unwrap();
stdout.queue(cursor::MoveTo(0, 6))?.queue(Print(
format!(
"{:width$}",
&format!(
" REQS REQS/S SIZE SZ/S {}",
options.group.to_string()
),
width = self.cols as usize
)
.negative(),
))?;
} // read lock on options
if let Some(grouped) = &ring_buffer.grouped {
use lazysort::SortedBy;
// convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>,
// sort it by the RingBuffers, then lazy-sort the first n lines for display.
for (key, ring_buffer) in grouped
.iter()
.filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work
.collect::<Vec<(&GroupKey, &RingBuffer)>>()
.iter()
.sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer
.take((self.lines - 7/* lines used for header */) as usize)
{
stdout
.queue(cursor::MoveToNextLine(1))?
.queue(Print(self.table_line(key, ring_buffer, elapsed)))?;
}
}
} // mutex on ring_buffer
stdout.flush()?;
Ok(())
}
fn handle_event(&mut self) -> Result<bool, Error> {
use crossterm::event::Event::{Key, Mouse, Resize};
use crossterm::event::KeyCode::Char;
use crossterm::event::{KeyEvent, KeyModifiers};
match crossterm::event::read()? {
Key(KeyEvent {
code: Char('q'), ..
})
| Key(KeyEvent {
modifiers: KeyModifiers::CONTROL,
code: Char('c'),
}) => return Ok(false),
Key(KeyEvent {
code: Char('o'), ..
}) => {
self.toggle_sort();
}
Key(KeyEvent {
code: Char('g'), ..
}) => {
self.toggle_group();
}
Key(event) => info!("{:?}", event),
Mouse(event) => info!("{:?}", event),
Resize(cols, lines) => {
self.lines = lines;
self.cols = cols;
}
}
Ok(true)
}
fn toggle_sort(&self) {
self.options.lock().unwrap().toggle_sort();
}
fn toggle_group(&self) {
let mut o = self.options.lock().unwrap();
let group_by = o.toggle_group();
drop(o);
self.ring_buffer.lock().unwrap().regroup(group_by);
}
fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String {
let reqs = rr.stats.global.requests as f64;
format!(
"{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}",
width = (self.cols - 30) as usize,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(rr.stats.global.bytes as f64),
hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed),
key = key
)
}
// All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req)
fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String {
let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64;
let reqs = stats.global.requests as f64;
let header = if alltime { "All:" } else { "R:" };
format!(
"{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req",
bold = Attribute::Bold,
reset = Attribute::Reset,
space = ((self.cols - 50) / 2) as usize,
header = header,
reqs = reqs,
reqs_per_sec = reqs / elapsed,
hb = Self::humansize(stats.global.bytes as f64),
hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed),
hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero)
)
}
// 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%)
fn per_code_line(&self, stats: &Stats) -> String {
let stats_2 = &stats.by_status_code[2];
let stats_3 = &stats.by_status_code[3];
let stats_4 = &stats.by_status_code[4];
let stats_5 = &stats.by_status_code[5];
// closure to reduce some duplication for some munging below
let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) {
// avoid divide by zero if there's no requests yet
let pct = if stats.global.requests > 0 {
100.0 * (rb_stats.requests as f64 / stats.global.requests as f64)
} else {
0.0
};
// intelligent dp detection: eg 2.34%, 10.5%, 100%
let dp = if (pct - 100.0).abs() < f64::EPSILON {
0
} else if pct < 10.0 {
2
} else {
1
};
(pct, dp)
};
let (code_2_pct, code_2_dp) = c(stats_2);
let (code_3_pct, code_3_dp) = c
|
.queue(cursor::MoveTo(0, 0))?
|
random_line_split
|
util.js
|
(n)
{
return !isNaN(parseFloat(n)) && isFinite(n);
}
(function($) {
$.loading = function(show)
{
var hide = (show === false); // no parameter assumes 'show' ; false = 'hide'
if(!j('#waiting').size())
{
j('body').append("<div id='waiting' style='display:none;'><div class='XXui-widget-overlay'></div><div id='' class='progress ui-corner-all'>Loading...<div class='progressbar'></div></div></div>");
//hide = false;
}
if(hide)
{
j('#waiting').fadeOut();
} else {
j('#waiting .progressbar').progressbar({value: false});
if(!j('#waiting').is(":visible"))
{
j('#waiting').fadeIn();
}
}
};
$.fn.ignoreEnter = function()
{
$(this).keypress(function(e) {
if(e.keyCode == 10 || e.keyCode == 13)
{
e.preventDefault();
}
}
);
};
$.fn.changeup = function(callback, timeout) {
var target = this;
if(!timeout) { timeout = 500; }
$(this).keyup(function() {
clearTimeout($(this).data('keyup_timeout_id'));
$(this).data('keyup_timeout_id', setTimeout(function() { callback(target); }, timeout));
});
// also detect paste
$(this).bind('paste',function() {
clearTimeout($(this).data('keyup_timeout_id'));
callback($(this));
});
$(this).change(function() {
clearTimeout($(this).data('keyup_timeout_id'));
callback($(this));
});
};
$.alert = function(msg,title)
{
// hide pleasewait abruptly (not animated)
j.hidespin(true);
j('#alert').html(msg);
j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] });
};
$.fn.modalcenter = function() {
// Also change the height if it's too tall, ie content added.
j(this).unbind('dialogopen.modalcenter');
j(this).bind('dialogopen.modalcenter', function() {
// call after open. since math matters.
//
//var buffer = 10;
var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet.
// Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height....
j(this).height('auto');
var windowHeight = $(window).height();
var modalHeight = j(this).height();
// consider titlebar
var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height();
// consider any buttons, too.
var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height();
var buffer = 50;
if(titleHeight) { buffer += titleHeight; }
if(buttonsHeight) { buffer += buttonsHeight; }
if(windowHeight < modalHeight + buffer)
{
j(this).dialog({height: windowHeight - buffer });
} else {
j(this).dialog({height: 'auto'});
//j(this).height('auto');
}
j(this).modaloption('width', j(this).parent().width());
// ie7 title bar bug fix when width: auto;
//
//
// ONLY show vert scrollbar.
j(this).css({overflow: 'hidden', overflowY: 'auto'});
// Also adjust width so no horiz scrollbars, in case new content sticks over edge...
//ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5);
j(this).modaloption('position','center');
});
if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); }
};
$.fn.modalopened = function() // saner. initialized may not mean open. async lag.
{
return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen');
};
$.fn.modaloption = function(key, value)
{
//console.log("SETTING="+key+"="+value);
var container = this;
if($(container).modalopened())
{
//console.log("TRIGOPT="+key+", V="+value);
$(container).dialog('option',key,value);
} else {
// console.log("DELAYED OPT="+key+", V="+value);
}
j(container).bind('dialogopen', function() {
j(container).dialog('option', key, value);
});
};
$.fn.modal = function(e) // called on LINK, so we get url properly....
{
var title = j(this).prop('title');
if(!title) { title = j(this).text(); }
var href = j(this).prop('href');
e.stopPropagation();
j('#modal').load(href, null, function(response)
{
//console.log(response);
j('#modal').dialog({
width: 'auto', // should keep width w/o scrollbar?
title: title,
modal: true,
resizable: false,
draggable: false,
open: function(event, ui) {
j('#modal').modalcenter();
j('#modal').trigger('modalready');
},
buttons: null
});
//console.log(j('#modal .resizable'));
j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger.
//console.log("RESIZED!");
var top = j('#modal').scrollTop();
j('#modal').modalcenter();
// Keep scroll position the same.
j('#modal').scrollTop(top);
});
j('.ui-widget-overlay').click(function() {
j('#modal').dialog('close');
});
})
return false;
};
$.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise.
{
if(typeof callback == 'function') // register at same time.
{
j(this).bind('modalready', callback);
}
if(j(this).modalopened())
{
j(this).trigger('modalready');
}
};
$.fn.closemodal = $.fn.modalclose = function(callback)
{
var container = this;
if(callback && !isNumeric(callback))
{
$(container).bind('dialogclose', function() {
if(typeof callback == 'function')
{
callback();
} else if (typeof callback == 'string') { // from controller.
eval(callback);
}
});
}
$(container).dialog('close');
$(container).dialog('destroy');
$(container).html(''); // clear content.
//e.stopPropagation();
return false;
};
$.fn.formerror = function(msg, before)
{
if(!msg) { msg = 'Missing Information'; }
var id = j(this).attr('id');
var errorid = id+"_error";
if(!j("#"+errorid).size())
{
var container = "<div id='"+errorid+"' class='formerror'></div>";
if(before)
{
j('#'+id).before(container);
} else {
// Place as last sibling, since may be stuff to right.
j('#'+id).parent().append(container);
}
}
//console.log(msg);
j('#'+errorid).html(msg).show();
return false;
};
$.fn.originalShow = $.fn.show;
$.fn.originalHide = $.fn.hide;
$.fn.show = function(speed, call)
{
$(this).trigger('show');
return $(this).originalShow(speed,call);
};
$.fn.hide = function(speed, call)
{
$(this).trigger('hide');
return $(this).originalHide(speed,call);
};
$.fn.ghostable = function(text)
{
var original = $(this);
var overlay = original.clone();
overlay.attr('id', original.attr('id')+"_clone");
overlay.attr('name', '');//original.attr('name')+"_ghostable");
overlay.addClass('ghost');
original.after(overlay);
overlay.val(text);
overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); });
original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } });
// IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing
//original.change(function() { if(!original.val()) {
|
isNumeric
|
identifier_name
|
|
util.js
|
timeout));
});
// also detect paste
$(this).bind('paste',function() {
clearTimeout($(this).data('keyup_timeout_id'));
callback($(this));
});
$(this).change(function() {
clearTimeout($(this).data('keyup_timeout_id'));
callback($(this));
});
};
$.alert = function(msg,title)
{
// hide pleasewait abruptly (not animated)
j.hidespin(true);
j('#alert').html(msg);
j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] });
};
$.fn.modalcenter = function() {
// Also change the height if it's too tall, ie content added.
j(this).unbind('dialogopen.modalcenter');
j(this).bind('dialogopen.modalcenter', function() {
// call after open. since math matters.
//
//var buffer = 10;
var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet.
// Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height....
j(this).height('auto');
var windowHeight = $(window).height();
var modalHeight = j(this).height();
// consider titlebar
var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height();
// consider any buttons, too.
var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height();
var buffer = 50;
if(titleHeight) { buffer += titleHeight; }
if(buttonsHeight) { buffer += buttonsHeight; }
if(windowHeight < modalHeight + buffer)
{
j(this).dialog({height: windowHeight - buffer });
} else {
j(this).dialog({height: 'auto'});
//j(this).height('auto');
}
j(this).modaloption('width', j(this).parent().width());
// ie7 title bar bug fix when width: auto;
//
//
// ONLY show vert scrollbar.
j(this).css({overflow: 'hidden', overflowY: 'auto'});
// Also adjust width so no horiz scrollbars, in case new content sticks over edge...
//ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5);
j(this).modaloption('position','center');
});
if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); }
};
$.fn.modalopened = function() // saner. initialized may not mean open. async lag.
{
return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen');
};
$.fn.modaloption = function(key, value)
{
//console.log("SETTING="+key+"="+value);
var container = this;
if($(container).modalopened())
{
//console.log("TRIGOPT="+key+", V="+value);
$(container).dialog('option',key,value);
} else {
// console.log("DELAYED OPT="+key+", V="+value);
}
j(container).bind('dialogopen', function() {
j(container).dialog('option', key, value);
});
};
$.fn.modal = function(e) // called on LINK, so we get url properly....
{
var title = j(this).prop('title');
if(!title) { title = j(this).text(); }
var href = j(this).prop('href');
e.stopPropagation();
j('#modal').load(href, null, function(response)
{
//console.log(response);
j('#modal').dialog({
width: 'auto', // should keep width w/o scrollbar?
title: title,
modal: true,
resizable: false,
draggable: false,
open: function(event, ui) {
j('#modal').modalcenter();
j('#modal').trigger('modalready');
},
buttons: null
});
//console.log(j('#modal .resizable'));
j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger.
//console.log("RESIZED!");
var top = j('#modal').scrollTop();
j('#modal').modalcenter();
// Keep scroll position the same.
j('#modal').scrollTop(top);
});
j('.ui-widget-overlay').click(function() {
j('#modal').dialog('close');
});
})
return false;
};
$.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise.
{
if(typeof callback == 'function') // register at same time.
{
j(this).bind('modalready', callback);
}
if(j(this).modalopened())
{
j(this).trigger('modalready');
}
};
$.fn.closemodal = $.fn.modalclose = function(callback)
{
var container = this;
if(callback && !isNumeric(callback))
{
$(container).bind('dialogclose', function() {
if(typeof callback == 'function')
{
callback();
} else if (typeof callback == 'string') { // from controller.
eval(callback);
}
});
}
$(container).dialog('close');
$(container).dialog('destroy');
$(container).html(''); // clear content.
//e.stopPropagation();
return false;
};
$.fn.formerror = function(msg, before)
{
if(!msg) { msg = 'Missing Information'; }
var id = j(this).attr('id');
var errorid = id+"_error";
if(!j("#"+errorid).size())
{
var container = "<div id='"+errorid+"' class='formerror'></div>";
if(before)
{
j('#'+id).before(container);
} else {
// Place as last sibling, since may be stuff to right.
j('#'+id).parent().append(container);
}
}
//console.log(msg);
j('#'+errorid).html(msg).show();
return false;
};
$.fn.originalShow = $.fn.show;
$.fn.originalHide = $.fn.hide;
$.fn.show = function(speed, call)
{
$(this).trigger('show');
return $(this).originalShow(speed,call);
};
$.fn.hide = function(speed, call)
{
$(this).trigger('hide');
return $(this).originalHide(speed,call);
};
$.fn.ghostable = function(text)
{
var original = $(this);
var overlay = original.clone();
overlay.attr('id', original.attr('id')+"_clone");
overlay.attr('name', '');//original.attr('name')+"_ghostable");
overlay.addClass('ghost');
original.after(overlay);
overlay.val(text);
overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); });
|
overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); });
original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } });
// IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing
//original.change(function() { if(!original.val()) { overlay.show(); original.hide(); } else { original.show(); overlay.hide(); } });
overlay.bind('paste', function() { overlay.val(''); setTimeout(function() { original.val(overlay.val()).show().change(); overlay.val(text).hide(); }, 100) });
//
if(original.val())
{
overlay.hide();
} else {
original.hide();
}
};
$.fn.ghostable2 = function(text) // Disabled element that won't submit or be gathered unless custom text.
{
$(this).wrap("<span class='ghostable_wrapper'/>");
var $original = $(this);
var $parent = $original.closest('span.ghostable_wrapper');
var $overlay = $(this).clone();
$overlay.val(text);
//$("<div class='ghostable_overlay'>"+text+"</div>");
// style the overlay
$overlay.css({
// position the overlay in the same real estate as the original parent element
position: "absolute"
, top: $parent.position().top
, left: $parent.position().left
, width: $parent.outerWidth()
, height: $parent.outerHeight()
, zIndex: 10000
// IE needs a color in order for the layer to respond to mouse events
, backgroundColor: "#fff"
// set the opacity to 0, so the element is transparent
|
random_line_split
|
|
util.js
|
'auto'});
//j(this).height('auto');
}
j(this).modaloption('width', j(this).parent().width());
// ie7 title bar bug fix when width: auto;
//
//
// ONLY show vert scrollbar.
j(this).css({overflow: 'hidden', overflowY: 'auto'});
// Also adjust width so no horiz scrollbars, in case new content sticks over edge...
//ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5);
j(this).modaloption('position','center');
});
if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); }
};
$.fn.modalopened = function() // saner. initialized may not mean open. async lag.
{
return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen');
};
$.fn.modaloption = function(key, value)
{
//console.log("SETTING="+key+"="+value);
var container = this;
if($(container).modalopened())
{
//console.log("TRIGOPT="+key+", V="+value);
$(container).dialog('option',key,value);
} else {
// console.log("DELAYED OPT="+key+", V="+value);
}
j(container).bind('dialogopen', function() {
j(container).dialog('option', key, value);
});
};
$.fn.modal = function(e) // called on LINK, so we get url properly....
{
var title = j(this).prop('title');
if(!title) { title = j(this).text(); }
var href = j(this).prop('href');
e.stopPropagation();
j('#modal').load(href, null, function(response)
{
//console.log(response);
j('#modal').dialog({
width: 'auto', // should keep width w/o scrollbar?
title: title,
modal: true,
resizable: false,
draggable: false,
open: function(event, ui) {
j('#modal').modalcenter();
j('#modal').trigger('modalready');
},
buttons: null
});
//console.log(j('#modal .resizable'));
j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger.
//console.log("RESIZED!");
var top = j('#modal').scrollTop();
j('#modal').modalcenter();
// Keep scroll position the same.
j('#modal').scrollTop(top);
});
j('.ui-widget-overlay').click(function() {
j('#modal').dialog('close');
});
})
return false;
};
$.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise.
{
if(typeof callback == 'function') // register at same time.
{
j(this).bind('modalready', callback);
}
if(j(this).modalopened())
{
j(this).trigger('modalready');
}
};
$.fn.closemodal = $.fn.modalclose = function(callback)
{
var container = this;
if(callback && !isNumeric(callback))
{
$(container).bind('dialogclose', function() {
if(typeof callback == 'function')
{
callback();
} else if (typeof callback == 'string') { // from controller.
eval(callback);
}
});
}
$(container).dialog('close');
$(container).dialog('destroy');
$(container).html(''); // clear content.
//e.stopPropagation();
return false;
};
$.fn.formerror = function(msg, before)
{
if(!msg) { msg = 'Missing Information'; }
var id = j(this).attr('id');
var errorid = id+"_error";
if(!j("#"+errorid).size())
{
var container = "<div id='"+errorid+"' class='formerror'></div>";
if(before)
{
j('#'+id).before(container);
} else {
// Place as last sibling, since may be stuff to right.
j('#'+id).parent().append(container);
}
}
//console.log(msg);
j('#'+errorid).html(msg).show();
return false;
};
$.fn.originalShow = $.fn.show;
$.fn.originalHide = $.fn.hide;
$.fn.show = function(speed, call)
{
$(this).trigger('show');
return $(this).originalShow(speed,call);
};
$.fn.hide = function(speed, call)
{
$(this).trigger('hide');
return $(this).originalHide(speed,call);
};
$.fn.ghostable = function(text)
{
var original = $(this);
var overlay = original.clone();
overlay.attr('id', original.attr('id')+"_clone");
overlay.attr('name', '');//original.attr('name')+"_ghostable");
overlay.addClass('ghost');
original.after(overlay);
overlay.val(text);
overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); });
original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } });
// IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing
//original.change(function() { if(!original.val()) { overlay.show(); original.hide(); } else { original.show(); overlay.hide(); } });
overlay.bind('paste', function() { overlay.val(''); setTimeout(function() { original.val(overlay.val()).show().change(); overlay.val(text).hide(); }, 100) });
//
if(original.val())
{
overlay.hide();
} else {
original.hide();
}
};
$.fn.ghostable2 = function(text) // Disabled element that won't submit or be gathered unless custom text.
{
$(this).wrap("<span class='ghostable_wrapper'/>");
var $original = $(this);
var $parent = $original.closest('span.ghostable_wrapper');
var $overlay = $(this).clone();
$overlay.val(text);
//$("<div class='ghostable_overlay'>"+text+"</div>");
// style the overlay
$overlay.css({
// position the overlay in the same real estate as the original parent element
position: "absolute"
, top: $parent.position().top
, left: $parent.position().left
, width: $parent.outerWidth()
, height: $parent.outerHeight()
, zIndex: 10000
// IE needs a color in order for the layer to respond to mouse events
, backgroundColor: "#fff"
// set the opacity to 0, so the element is transparent
, opacity: 0
})
// attach the click behavior
.click(function (){
$self.show(); // Show
$(this).hide();
// Hide me, focus on original.
// trigger the original event handler
//return $self.trigger("click");
});
// add the overlay to the page
$parent.append($overlay);
};
$.spin = function()
{
var container = $('body #spin');
if(!container.size())
{
container = $("<div id='spin' style='display: none;'></div>");
$('body').append(container);
$(container).click(function() {
$.hidespin();
});
}
$(container).show();
$(container).spin('large');
};
$.unspin = $.hidespin = function(immediate)
{
if(immediate)
{
$('body #spin').hide();
} else {
$('body #spin').fadeOut('slow');
}
};
$.fn.spin = function(opts, color) {
var defaults = {
color: '#FFF'
};
var presets = {
"tiny": { lines: 8, length: 2, width: 2, radius: 3 },
"small": { lines: 8, length: 4, width: 3, radius: 5 },
"large": { lines: 10, length: 8, width: 4, radius: 8 }
};
if (typeof Spinner != 'undefined') {
return this.each(function() {
var $this = $(this),
data = $this.data();
if (data.spinner) {
data.spinner.stop();
delete data.spinner;
}
if (opts !== false) {
if (typeof opts === "string") {
if (opts in presets) {
opts = $.extend(defaults, presets[opts]);
} else
|
{
opts = defaults;//{};
}
|
conditional_block
|
|
util.js
|
(function($) {
$.loading = function(show)
{
var hide = (show === false); // no parameter assumes 'show' ; false = 'hide'
if(!j('#waiting').size())
{
j('body').append("<div id='waiting' style='display:none;'><div class='XXui-widget-overlay'></div><div id='' class='progress ui-corner-all'>Loading...<div class='progressbar'></div></div></div>");
//hide = false;
}
if(hide)
{
j('#waiting').fadeOut();
} else {
j('#waiting .progressbar').progressbar({value: false});
if(!j('#waiting').is(":visible"))
{
j('#waiting').fadeIn();
}
}
};
$.fn.ignoreEnter = function()
{
$(this).keypress(function(e) {
if(e.keyCode == 10 || e.keyCode == 13)
{
e.preventDefault();
}
}
);
};
$.fn.changeup = function(callback, timeout) {
var target = this;
if(!timeout) { timeout = 500; }
$(this).keyup(function() {
clearTimeout($(this).data('keyup_timeout_id'));
$(this).data('keyup_timeout_id', setTimeout(function() { callback(target); }, timeout));
});
// also detect paste
$(this).bind('paste',function() {
clearTimeout($(this).data('keyup_timeout_id'));
callback($(this));
});
$(this).change(function() {
clearTimeout($(this).data('keyup_timeout_id'));
callback($(this));
});
};
$.alert = function(msg,title)
{
// hide pleasewait abruptly (not animated)
j.hidespin(true);
j('#alert').html(msg);
j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] });
};
$.fn.modalcenter = function() {
// Also change the height if it's too tall, ie content added.
j(this).unbind('dialogopen.modalcenter');
j(this).bind('dialogopen.modalcenter', function() {
// call after open. since math matters.
//
//var buffer = 10;
var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet.
// Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height....
j(this).height('auto');
var windowHeight = $(window).height();
var modalHeight = j(this).height();
// consider titlebar
var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height();
// consider any buttons, too.
var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height();
var buffer = 50;
if(titleHeight) { buffer += titleHeight; }
if(buttonsHeight) { buffer += buttonsHeight; }
if(windowHeight < modalHeight + buffer)
{
j(this).dialog({height: windowHeight - buffer });
} else {
j(this).dialog({height: 'auto'});
//j(this).height('auto');
}
j(this).modaloption('width', j(this).parent().width());
// ie7 title bar bug fix when width: auto;
//
//
// ONLY show vert scrollbar.
j(this).css({overflow: 'hidden', overflowY: 'auto'});
// Also adjust width so no horiz scrollbars, in case new content sticks over edge...
//ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5);
j(this).modaloption('position','center');
});
if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); }
};
$.fn.modalopened = function() // saner. initialized may not mean open. async lag.
{
return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen');
};
$.fn.modaloption = function(key, value)
{
//console.log("SETTING="+key+"="+value);
var container = this;
if($(container).modalopened())
{
//console.log("TRIGOPT="+key+", V="+value);
$(container).dialog('option',key,value);
} else {
// console.log("DELAYED OPT="+key+", V="+value);
}
j(container).bind('dialogopen', function() {
j(container).dialog('option', key, value);
});
};
$.fn.modal = function(e) // called on LINK, so we get url properly....
{
var title = j(this).prop('title');
if(!title) { title = j(this).text(); }
var href = j(this).prop('href');
e.stopPropagation();
j('#modal').load(href, null, function(response)
{
//console.log(response);
j('#modal').dialog({
width: 'auto', // should keep width w/o scrollbar?
title: title,
modal: true,
resizable: false,
draggable: false,
open: function(event, ui) {
j('#modal').modalcenter();
j('#modal').trigger('modalready');
},
buttons: null
});
//console.log(j('#modal .resizable'));
j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger.
//console.log("RESIZED!");
var top = j('#modal').scrollTop();
j('#modal').modalcenter();
// Keep scroll position the same.
j('#modal').scrollTop(top);
});
j('.ui-widget-overlay').click(function() {
j('#modal').dialog('close');
});
})
return false;
};
$.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise.
{
if(typeof callback == 'function') // register at same time.
{
j(this).bind('modalready', callback);
}
if(j(this).modalopened())
{
j(this).trigger('modalready');
}
};
$.fn.closemodal = $.fn.modalclose = function(callback)
{
var container = this;
if(callback && !isNumeric(callback))
{
$(container).bind('dialogclose', function() {
if(typeof callback == 'function')
{
callback();
} else if (typeof callback == 'string') { // from controller.
eval(callback);
}
});
}
$(container).dialog('close');
$(container).dialog('destroy');
$(container).html(''); // clear content.
//e.stopPropagation();
return false;
};
$.fn.formerror = function(msg, before)
{
if(!msg) { msg = 'Missing Information'; }
var id = j(this).attr('id');
var errorid = id+"_error";
if(!j("#"+errorid).size())
{
var container = "<div id='"+errorid+"' class='formerror'></div>";
if(before)
{
j('#'+id).before(container);
} else {
// Place as last sibling, since may be stuff to right.
j('#'+id).parent().append(container);
}
}
//console.log(msg);
j('#'+errorid).html(msg).show();
return false;
};
$.fn.originalShow = $.fn.show;
$.fn.originalHide = $.fn.hide;
$.fn.show = function(speed, call)
{
$(this).trigger('show');
return $(this).originalShow(speed,call);
};
$.fn.hide = function(speed, call)
{
$(this).trigger('hide');
return $(this).originalHide(speed,call);
};
$.fn.ghostable = function(text)
{
var original = $(this);
var overlay = original.clone();
overlay.attr('id', original.attr('id')+"_clone");
overlay.attr('name', '');//original.attr('name')+"_ghostable");
overlay.addClass('ghost');
original.after(overlay);
overlay.val(text);
overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); });
original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } });
// IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing
//original.change(function() { if(!original.val()) { overlay.show();
|
{
return !isNaN(parseFloat(n)) && isFinite(n);
}
|
identifier_body
|
|
core.py
|
the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
""" Set of core utilities shared between quantization and svd code """
import re
import tensorflow as tf
from aimet_tensorflow.utils import constants
from aimet_tensorflow.common import op_defs
from aimet_common.utils import AimetLogger
_BIAS_TYPES = ['Add', 'BiasAdd']
# Ops to skip quantization on, eg backprop, etc
_SKIPPED_PREFIXES = ('gradients/', 'RMSProp/', 'Adagrad/', 'Const_', 'HistogramSummary', 'ScalarSummary', 'save/', 'truncated_normal', 'Adam')
# Valid activation ops for quantization end points.
_ACTIVATION_OP_SUFFIXES = ['/Relu6', '/Relu', '/Identity']
# Regular expression for recognizing nodes that are part of batch norm group.
_BATCHNORM_RE = re.compile(r'^(.*)/BatchNorm/batchnorm')
_OP_MAP = op_defs.default_op_map
class OpQuery:
"""
Class for query a graph's operations and related data.
"""
def __init__(self, graph, op_map=None, ops_to_ignore=None, strict=True):
"""
Constructor
:param graph: The graph to search
:param op_map: The map of operations used to identify op sequences as "one op".
The default op_map used is defined in op_deps.py. Please refer to
that format for passing a custom op_map.
:param ops_to_ignore: List of ops to ignore
:param strict: If strict mode is set to True queries will only return the last ops
at the end of well known "op layers" as defined by the op_map. When False,
queries will return ops at the end of well known layers and, in addition,
all ops which are not "known".
Eg If you have a list of ops in a graph like: Conv2D, BiasAdd, WeirdOp
Strict mode will return ["BiasAdd"] since it knows that Conv2D+BiasAdd are
one logical "layer". When strict mode is disabled it will return ["BiasAdd", "WeirdOp"]
:param debug: Whether to enable debug messages or not.
"""
self._log = AimetLogger.get_area_logger(AimetLogger.LogAreas.Utils)
self._graph = graph
self._strict = strict
if op_map:
self._op_map = op_map
else:
self._op_map = _OP_MAP
if ops_to_ignore:
self._ops_to_ignore = ops_to_ignore
else:
self._ops_to_ignore = []
self._trained_vars = graph.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
@staticmethod
def _is_op_with_weights(op):
|
@classmethod
def get_weights_for_op(cls, op):
"""
Get the weight tensor for a given op
:param op: TF op
:return: Weight tensor for the op
"""
weights = None
if cls._is_op_with_weights(op):
weights = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
return weights
@staticmethod
def get_bias_for_op(op):
"""
Get bias tensor for the given op
:param op: TF op
:return: Bias tensor for the op
"""
bias = None
if op.type in _BIAS_TYPES:
bias = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
return bias
def get_weight_ops(self, ops=None, skip_bias_op=False):
"""
Get all ops that contain weights. If a list of ops is passed search only ops
from this list. Return the sequenced list of weight ops always with Conv/FC
first, followed by the bias op, if present.
:param ops: List of ops to use (optional)
:param ops: If bias op has to be skipped (optional)
:return:
"""
if not ops:
ops = self._graph.get_operations()
ops_with_weights = []
for op in ops:
if self._is_op_with_weights(op):
self._log.debug('Found op w/weights: %s', op.name)
ops_with_weights.append(op)
if not skip_bias_op and self._is_op_with_weights(op):
for consumer in op.outputs[0].consumers():
# Ignore Reshape as it can be placed between MatMul and BiasAdd on Dense layer of Transformer
if consumer.type in ['Reshape'] and len(consumer.outputs[0].consumers()) == 1:
consumer = consumer.outputs[0].consumers()[0]
if consumer.type in _BIAS_TYPES:
self._log.debug('Found op w/bias: %s', consumer.name+'('+consumer.type+')')
ops_with_weights.append(consumer)
reduced_list = [x for x in ops_with_weights if not x.name.startswith(tuple(self._ops_to_ignore))]
return reduced_list
@staticmethod
def get_weight_inputs(ops):
"""
Given a list of ops, returns a corresponding list of the weight indexes for their inputs
:param ops: List of TF ops
:return:
"""
indices = list()
for op in ops:
if op.type not in constants.OP_WEIGHT_INDICES:
raise ValueError('Op type: '+op.type+' does not contain weights!')
indices.append(constants.OP_WEIGHT_INDICES[op.type])
return indices
def _match_ops(self, current_op, candidate_op_list, matched_ops, visited_ops):
"""
Recursive function that helps traverse a network and find matching ops
:param current_op: Current op to traverse downstream from
:param candidate_op_list: Current list of candidate ops that may result in a match
:param matched_ops: List of already found matched_ops
:param visited_ops: List of all ops that have been visited (to cut short duplicate traversals)
:return:
"""
if any(x in current_op.name for x in _SKIPPED_PREFIXES):
return matched_ops
self._log.debug('Processing op: %s (%s) w/current list=%s', current_op.name, current_op.type, candidate_op_list)
candidate_op_list.append(current_op)
match_len, max_len = op_defs.check_match(candidate_op_list, op_map=self._op_map)
self._log.debug('Got match_len: %s and max_len: %s', str(match_len), str(max_len))
if match_len != 0 and match_len == max_len:
# Matched the maximum sequence possible
matched_ops.append(current_op)
op_type_list = [list_op.type for list_op in candidate_op_list]
self._log.info('Found op match w/new op: %s and sequence: %s', current_op.name, str(op_type_list))
candidate_op_list = []
elif match_len == 0:
# A list length > 1 means the current op_list was a match but not the newly added op. Save the previous last
# op from the list
if len(candidate_op_list) > 1:
# Check if indeed the previous op_list is a match
if op_defs.does_sequence_match(candidate_op_list[:-1], op_map=self._op_map):
matched_op = candidate_op_list[-2]
matched_ops.append(matched_op)
op_type_list = [list_op.type for list_op in candidate_op_list[:-1]]
self._log.info('Found op match: %s and sequence: %s', matched_op.name, str(op_type_list))
# Test to see if the current op is a match by itself
candidate_op_list = []
matched_ops = self._match_ops(current_op, candidate_op_list, matched_ops, visited_ops)
return matched_ops
# No match, reset the list
candidate_op_list = []
# There was some match, but not the max match possible. Continue drilling through the
# outputs to the next ops
for tensor in current_op.outputs:
for consumer in tensor.consumers():
if consumer not in visited_ops:
visited_ops.add(consumer)
self._log.info
|
"""
Checks if a given op has weights
:param op: TF op
:return: True, if op has weights, False otherwise
"""
return (op.type in constants.OP_WEIGHT_TYPES and
not op.name.startswith(_SKIPPED_PREFIXES))
|
identifier_body
|
core.py
|
the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
""" Set of core utilities shared between quantization and svd code """
import re
import tensorflow as tf
from aimet_tensorflow.utils import constants
from aimet_tensorflow.common import op_defs
from aimet_common.utils import AimetLogger
_BIAS_TYPES = ['Add', 'BiasAdd']
# Ops to skip quantization on, eg backprop, etc
_SKIPPED_PREFIXES = ('gradients/', 'RMSProp/', 'Adagrad/', 'Const_', 'HistogramSummary', 'ScalarSummary', 'save/', 'truncated_normal', 'Adam')
# Valid activation ops for quantization end points.
_ACTIVATION_OP_SUFFIXES = ['/Relu6', '/Relu', '/Identity']
# Regular expression for recognizing nodes that are part of batch norm group.
_BATCHNORM_RE = re.compile(r'^(.*)/BatchNorm/batchnorm')
_OP_MAP = op_defs.default_op_map
class OpQuery:
"""
Class for query a graph's operations and related data.
"""
def __init__(self, graph, op_map=None, ops_to_ignore=None, strict=True):
"""
Constructor
:param graph: The graph to search
:param op_map: The map of operations used to identify op sequences as "one op".
The default op_map used is defined in op_deps.py. Please refer to
that format for passing a custom op_map.
:param ops_to_ignore: List of ops to ignore
:param strict: If strict mode is set to True queries will only return the last ops
at the end of well known "op layers" as defined by the op_map. When False,
queries will return ops at the end of well known layers and, in addition,
all ops which are not "known".
Eg If you have a list of ops in a graph like: Conv2D, BiasAdd, WeirdOp
Strict mode will return ["BiasAdd"] since it knows that Conv2D+BiasAdd are
one logical "layer". When strict mode is disabled it will return ["BiasAdd", "WeirdOp"]
:param debug: Whether to enable debug messages or not.
"""
self._log = AimetLogger.get_area_logger(AimetLogger.LogAreas.Utils)
self._graph = graph
self._strict = strict
if op_map:
self._op_map = op_map
else:
self._op_map = _OP_MAP
if ops_to_ignore:
self._ops_to_ignore = ops_to_ignore
else:
self._ops_to_ignore = []
self._trained_vars = graph.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
@staticmethod
def _is_op_with_weights(op):
"""
Checks if a given op has weights
:param op: TF op
:return: True, if op has weights, False otherwise
"""
return (op.type in constants.OP_WEIGHT_TYPES and
not op.name.startswith(_SKIPPED_PREFIXES))
@classmethod
def get_weights_for_op(cls, op):
"""
Get the weight tensor for a given op
:param op: TF op
:return: Weight tensor for the op
"""
weights = None
if cls._is_op_with_weights(op):
weights = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
return weights
@staticmethod
def get_bias_for_op(op):
"""
Get bias tensor for the given op
:param op: TF op
:return: Bias tensor for the op
"""
bias = None
if op.type in _BIAS_TYPES:
|
return bias
def get_weight_ops(self, ops=None, skip_bias_op=False):
"""
Get all ops that contain weights. If a list of ops is passed search only ops
from this list. Return the sequenced list of weight ops always with Conv/FC
first, followed by the bias op, if present.
:param ops: List of ops to use (optional)
:param ops: If bias op has to be skipped (optional)
:return:
"""
if not ops:
ops = self._graph.get_operations()
ops_with_weights = []
for op in ops:
if self._is_op_with_weights(op):
self._log.debug('Found op w/weights: %s', op.name)
ops_with_weights.append(op)
if not skip_bias_op and self._is_op_with_weights(op):
for consumer in op.outputs[0].consumers():
# Ignore Reshape as it can be placed between MatMul and BiasAdd on Dense layer of Transformer
if consumer.type in ['Reshape'] and len(consumer.outputs[0].consumers()) == 1:
consumer = consumer.outputs[0].consumers()[0]
if consumer.type in _BIAS_TYPES:
self._log.debug('Found op w/bias: %s', consumer.name+'('+consumer.type+')')
ops_with_weights.append(consumer)
reduced_list = [x for x in ops_with_weights if not x.name.startswith(tuple(self._ops_to_ignore))]
return reduced_list
@staticmethod
def get_weight_inputs(ops):
"""
Given a list of ops, returns a corresponding list of the weight indexes for their inputs
:param ops: List of TF ops
:return:
"""
indices = list()
for op in ops:
if op.type not in constants.OP_WEIGHT_INDICES:
raise ValueError('Op type: '+op.type+' does not contain weights!')
indices.append(constants.OP_WEIGHT_INDICES[op.type])
return indices
def _match_ops(self, current_op, candidate_op_list, matched_ops, visited_ops):
"""
Recursive function that helps traverse a network and find matching ops
:param current_op: Current op to traverse downstream from
:param candidate_op_list: Current list of candidate ops that may result in a match
:param matched_ops: List of already found matched_ops
:param visited_ops: List of all ops that have been visited (to cut short duplicate traversals)
:return:
"""
if any(x in current_op.name for x in _SKIPPED_PREFIXES):
return matched_ops
self._log.debug('Processing op: %s (%s) w/current list=%s', current_op.name, current_op.type, candidate_op_list)
candidate_op_list.append(current_op)
match_len, max_len = op_defs.check_match(candidate_op_list, op_map=self._op_map)
self._log.debug('Got match_len: %s and max_len: %s', str(match_len), str(max_len))
if match_len != 0 and match_len == max_len:
# Matched the maximum sequence possible
matched_ops.append(current_op)
op_type_list = [list_op.type for list_op in candidate_op_list]
self._log.info('Found op match w/new op: %s and sequence: %s', current_op.name, str(op_type_list))
candidate_op_list = []
elif match_len == 0:
# A list length > 1 means the current op_list was a match but not the newly added op. Save the previous last
# op from the list
if len(candidate_op_list) > 1:
# Check if indeed the previous op_list is a match
if op_defs.does_sequence_match(candidate_op_list[:-1], op_map=self._op_map):
matched_op = candidate_op_list[-2]
matched_ops.append(matched_op)
op_type_list = [list_op.type for list_op in candidate_op_list[:-1]]
self._log.info('Found op match: %s and sequence: %s', matched_op.name, str(op_type_list))
# Test to see if the current op is a match by itself
candidate_op_list = []
matched_ops = self._match_ops(current_op, candidate_op_list, matched_ops, visited_ops)
return matched_ops
# No match, reset the list
candidate_op_list = []
# There was some match, but not the max match possible. Continue drilling through the
# outputs to the next ops
for tensor in current_op.outputs:
for consumer in tensor.consumers():
if consumer not in visited_ops:
visited_ops.add(consumer)
self._log.info
|
bias = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
|
conditional_block
|
core.py
|
the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
""" Set of core utilities shared between quantization and svd code """
import re
import tensorflow as tf
from aimet_tensorflow.utils import constants
from aimet_tensorflow.common import op_defs
from aimet_common.utils import AimetLogger
_BIAS_TYPES = ['Add', 'BiasAdd']
# Ops to skip quantization on, eg backprop, etc
_SKIPPED_PREFIXES = ('gradients/', 'RMSProp/', 'Adagrad/', 'Const_', 'HistogramSummary', 'ScalarSummary', 'save/', 'truncated_normal', 'Adam')
# Valid activation ops for quantization end points.
_ACTIVATION_OP_SUFFIXES = ['/Relu6', '/Relu', '/Identity']
# Regular expression for recognizing nodes that are part of batch norm group.
_BATCHNORM_RE = re.compile(r'^(.*)/BatchNorm/batchnorm')
_OP_MAP = op_defs.default_op_map
class
|
:
"""
Class for query a graph's operations and related data.
"""
def __init__(self, graph, op_map=None, ops_to_ignore=None, strict=True):
"""
Constructor
:param graph: The graph to search
:param op_map: The map of operations used to identify op sequences as "one op".
The default op_map used is defined in op_deps.py. Please refer to
that format for passing a custom op_map.
:param ops_to_ignore: List of ops to ignore
:param strict: If strict mode is set to True queries will only return the last ops
at the end of well known "op layers" as defined by the op_map. When False,
queries will return ops at the end of well known layers and, in addition,
all ops which are not "known".
Eg If you have a list of ops in a graph like: Conv2D, BiasAdd, WeirdOp
Strict mode will return ["BiasAdd"] since it knows that Conv2D+BiasAdd are
one logical "layer". When strict mode is disabled it will return ["BiasAdd", "WeirdOp"]
:param debug: Whether to enable debug messages or not.
"""
self._log = AimetLogger.get_area_logger(AimetLogger.LogAreas.Utils)
self._graph = graph
self._strict = strict
if op_map:
self._op_map = op_map
else:
self._op_map = _OP_MAP
if ops_to_ignore:
self._ops_to_ignore = ops_to_ignore
else:
self._ops_to_ignore = []
self._trained_vars = graph.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
@staticmethod
def _is_op_with_weights(op):
"""
Checks if a given op has weights
:param op: TF op
:return: True, if op has weights, False otherwise
"""
return (op.type in constants.OP_WEIGHT_TYPES and
not op.name.startswith(_SKIPPED_PREFIXES))
@classmethod
def get_weights_for_op(cls, op):
"""
Get the weight tensor for a given op
:param op: TF op
:return: Weight tensor for the op
"""
weights = None
if cls._is_op_with_weights(op):
weights = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
return weights
@staticmethod
def get_bias_for_op(op):
"""
Get bias tensor for the given op
:param op: TF op
:return: Bias tensor for the op
"""
bias = None
if op.type in _BIAS_TYPES:
bias = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
return bias
def get_weight_ops(self, ops=None, skip_bias_op=False):
"""
Get all ops that contain weights. If a list of ops is passed search only ops
from this list. Return the sequenced list of weight ops always with Conv/FC
first, followed by the bias op, if present.
:param ops: List of ops to use (optional)
:param ops: If bias op has to be skipped (optional)
:return:
"""
if not ops:
ops = self._graph.get_operations()
ops_with_weights = []
for op in ops:
if self._is_op_with_weights(op):
self._log.debug('Found op w/weights: %s', op.name)
ops_with_weights.append(op)
if not skip_bias_op and self._is_op_with_weights(op):
for consumer in op.outputs[0].consumers():
# Ignore Reshape as it can be placed between MatMul and BiasAdd on Dense layer of Transformer
if consumer.type in ['Reshape'] and len(consumer.outputs[0].consumers()) == 1:
consumer = consumer.outputs[0].consumers()[0]
if consumer.type in _BIAS_TYPES:
self._log.debug('Found op w/bias: %s', consumer.name+'('+consumer.type+')')
ops_with_weights.append(consumer)
reduced_list = [x for x in ops_with_weights if not x.name.startswith(tuple(self._ops_to_ignore))]
return reduced_list
@staticmethod
def get_weight_inputs(ops):
"""
Given a list of ops, returns a corresponding list of the weight indexes for their inputs
:param ops: List of TF ops
:return:
"""
indices = list()
for op in ops:
if op.type not in constants.OP_WEIGHT_INDICES:
raise ValueError('Op type: '+op.type+' does not contain weights!')
indices.append(constants.OP_WEIGHT_INDICES[op.type])
return indices
def _match_ops(self, current_op, candidate_op_list, matched_ops, visited_ops):
"""
Recursive function that helps traverse a network and find matching ops
:param current_op: Current op to traverse downstream from
:param candidate_op_list: Current list of candidate ops that may result in a match
:param matched_ops: List of already found matched_ops
:param visited_ops: List of all ops that have been visited (to cut short duplicate traversals)
:return:
"""
if any(x in current_op.name for x in _SKIPPED_PREFIXES):
return matched_ops
self._log.debug('Processing op: %s (%s) w/current list=%s', current_op.name, current_op.type, candidate_op_list)
candidate_op_list.append(current_op)
match_len, max_len = op_defs.check_match(candidate_op_list, op_map=self._op_map)
self._log.debug('Got match_len: %s and max_len: %s', str(match_len), str(max_len))
if match_len != 0 and match_len == max_len:
# Matched the maximum sequence possible
matched_ops.append(current_op)
op_type_list = [list_op.type for list_op in candidate_op_list]
self._log.info('Found op match w/new op: %s and sequence: %s', current_op.name, str(op_type_list))
candidate_op_list = []
elif match_len == 0:
# A list length > 1 means the current op_list was a match but not the newly added op. Save the previous last
# op from the list
if len(candidate_op_list) > 1:
# Check if indeed the previous op_list is a match
if op_defs.does_sequence_match(candidate_op_list[:-1], op_map=self._op_map):
matched_op = candidate_op_list[-2]
matched_ops.append(matched_op)
op_type_list = [list_op.type for list_op in candidate_op_list[:-1]]
self._log.info('Found op match: %s and sequence: %s', matched_op.name, str(op_type_list))
# Test to see if the current op is a match by itself
candidate_op_list = []
matched_ops = self._match_ops(current_op, candidate_op_list, matched_ops, visited_ops)
return matched_ops
# No match, reset the list
candidate_op_list = []
# There was some match, but not the max match possible. Continue drilling through the
# outputs to the next ops
for tensor in current_op.outputs:
for consumer in tensor.consumers():
if consumer not in visited_ops:
visited_ops.add(consumer)
self._log.info
|
OpQuery
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.