code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule Alixir.OSS do
@moduledoc """
`Alixir.OSS` enables puting and deleting objects for Aliyun
OSS.
## Examples
```
Alixir.OSS.put_object(args...)
|> Alixir.request()
Alixir.OSS.delete_objects(args...)
|> Alixir.request()
```
See `put_object/4` and `delete_object/4` for more details.
"""
alias Alixir.OSS.FileObject
alias Alixir.OSS.Operation
alias Alixir.OSS.PresignedURL
alias Alixir.OSS.PostObjectData
@doc """
Put object to OSS. Return an `Alixir.OSS.Operation` struct which
could be passed to `Alixir.request` to perform the
request.
## Example
iex> file_object = %Alixir.OSS.FileObject{bucket: "foo_bucket", object_key: "<KEY>", object: File.stream!("test/data/bar.jpg")}
...> operation = Alixir.OSS.put_object(file_object, "X-OSS-Object-Acl": "public-read")
...> with %Alixir.OSS.Operation{http_method: :put, bucket: "foo_bucket", object_key: "<KEY>",
...> file: %File.Stream{path: "test/data/bar.jpg"}, oss_headers: oss_headers} when is_list(oss_headers) <- operation, do: true
true
"""
@spec put_object(
%FileObject{},
list()
) :: %Alixir.OSS.Operation{http_method: :put}
def put_object(
%FileObject{bucket: bucket, object_key: object_key, object: object},
oss_headers \\ []
)
when is_list(oss_headers) do
%Operation{
http_method: :put,
bucket: bucket,
object_key: object_key,
file: object,
oss_headers: oss_headers
}
end
@doc """
Delete object from OSS. Return an `Alixir.OSS.Operation` struct which
could be passed to `Alixir.request` to perform the
request.
## Example
iex> file_object = %Alixir.OSS.FileObject{bucket: "foo_bucket", object_key: "<KEY>"}
...> operation = Alixir.OSS.delete_object(file_object)
...> with %Alixir.OSS.Operation{http_method: :delete, bucket: "foo_bucket", object_key: "<KEY>",
...> oss_headers: oss_headers} when is_list(oss_headers) <- operation, do: true
true
"""
@spec delete_object(
%FileObject{},
list()
) :: %Alixir.OSS.Operation{http_method: :delete}
def delete_object(%FileObject{bucket: bucket, object_key: object_key}, oss_headers \\ [])
when is_list(oss_headers) do
%Operation{
http_method: :delete,
bucket: bucket,
object_key: object_key,
oss_headers: oss_headers
}
end
@doc """
Generate a presigned URL, which could be used by other other applications (such as
frontend) to operate OSS
"""
@spec presigned_url(
atom(),
%FileObject{},
Keyword.t()
) :: String.t()
defdelegate presigned_url(http_method, file_object, options \\ []), to: PresignedURL
@doc """
Generate a presigned URL, which could be used by other other applications (such as
frontend) to operate OSS
"""
@spec post_object_data(
%FileObject{},
Keyword.t()
) :: map()
defdelegate post_object_data(file_object, policy_options \\ []), to: PostObjectData
@doc """
Head an object. Return true if the object exists, or false if it doesn't exist.
"""
@spec head_object(
String.t(),
String.t()
) :: boolean()
def head_object(bucket, key) do
%Operation{
http_method: :head,
bucket: bucket,
object_key: key,
oss_headers: []
}
end
end
|
lib/alixir_oss.ex
| 0.858941 | 0.664362 |
alixir_oss.ex
|
starcoder
|
defmodule Tw.V1_1.Entities do
@moduledoc """
Entities data structure and related functions.
https://developer.twitter.com/en/docs/twitter-api/v1/data-dictionary/object-model/entities
"""
alias Tw.V1_1.Hashtag
alias Tw.V1_1.Media
alias Tw.V1_1.Poll
alias Tw.V1_1.Schema
alias Tw.V1_1.Symbol
alias Tw.V1_1.URL
alias Tw.V1_1.UserMention
@enforce_keys []
defstruct([:hashtags, :media, :urls, :user_mentions, :symbols, :polls])
@typedoc """
> | field | description |
> | - | - |
> | `hashtags` | Represents hashtags which have been parsed out of the Tweet text. |
> | `media` | Represents media elements uploaded with the Tweet. |
> | `urls` | Represents URLs included in the text of a Tweet. |
> | `user_mentions` | Represents other Twitter users mentioned in the text of the Tweet. |
> | `symbols` | Represents symbols, i.e. $cashtags, included in the text of the Tweet. |
> | `polls` | Represents Twitter Polls included in the Tweet. |
>
"""
@type t :: %__MODULE__{
hashtags: list(Hashtag.t()) | nil,
media: list(Media.t()) | nil,
urls: list(URL.t()) | nil,
user_mentions: list(UserMention.t()) | nil,
symbols: list(Symbol.t()) | nil,
polls: list(Poll.t()) | nil
}
@spec decode!(map) :: t
@doc """
Decode JSON-decoded map into `t:t/0`
"""
def decode!(json) do
json =
json
|> Map.update(:hashtags, nil, Schema.nilable(fn v -> Enum.map(v, &Hashtag.decode!/1) end))
|> Map.update(:media, nil, Schema.nilable(fn v -> Enum.map(v, &Media.decode!/1) end))
|> Map.update(:urls, nil, Schema.nilable(fn v -> Enum.map(v, &URL.decode!/1) end))
|> Map.update(:user_mentions, nil, Schema.nilable(fn v -> Enum.map(v, &UserMention.decode!/1) end))
|> Map.update(:symbols, nil, Schema.nilable(fn v -> Enum.map(v, &Symbol.decode!/1) end))
|> Map.update(:polls, nil, Schema.nilable(fn v -> Enum.map(v, &Poll.decode!/1) end))
struct(__MODULE__, json)
end
end
|
lib/tw/v1_1/entities.ex
| 0.748628 | 0.424263 |
entities.ex
|
starcoder
|
defmodule AWS.Translate do
@moduledoc """
Provides translation between one source language and another of the same set of
languages.
"""
@doc """
A synchronous action that deletes a custom terminology.
"""
def delete_terminology(client, input, options \\ []) do
request(client, "DeleteTerminology", input, options)
end
@doc """
Gets the properties associated with an asycnhronous batch translation job
including name, ID, status, source and target languages, input/output S3
buckets, and so on.
"""
def describe_text_translation_job(client, input, options \\ []) do
request(client, "DescribeTextTranslationJob", input, options)
end
@doc """
Retrieves a custom terminology.
"""
def get_terminology(client, input, options \\ []) do
request(client, "GetTerminology", input, options)
end
@doc """
Creates or updates a custom terminology, depending on whether or not one already
exists for the given terminology name.
Importing a terminology with the same name as an existing one will merge the
terminologies based on the chosen merge strategy. Currently, the only supported
merge strategy is OVERWRITE, and so the imported terminology will overwrite an
existing terminology of the same name.
If you import a terminology that overwrites an existing one, the new terminology
take up to 10 minutes to fully propagate and be available for use in a
translation due to cache policies with the DataPlane service that performs the
translations.
"""
def import_terminology(client, input, options \\ []) do
request(client, "ImportTerminology", input, options)
end
@doc """
Provides a list of custom terminologies associated with your account.
"""
def list_terminologies(client, input, options \\ []) do
request(client, "ListTerminologies", input, options)
end
@doc """
Gets a list of the batch translation jobs that you have submitted.
"""
def list_text_translation_jobs(client, input, options \\ []) do
request(client, "ListTextTranslationJobs", input, options)
end
@doc """
Starts an asynchronous batch translation job.
Batch translation jobs can be used to translate large volumes of text across
multiple documents at once. For more information, see `async`.
Batch translation jobs can be described with the `DescribeTextTranslationJob`
operation, listed with the `ListTextTranslationJobs` operation, and stopped with
the `StopTextTranslationJob` operation.
Amazon Translate does not support batch translation of multiple source languages
at once.
"""
def start_text_translation_job(client, input, options \\ []) do
request(client, "StartTextTranslationJob", input, options)
end
@doc """
Stops an asynchronous batch translation job that is in progress.
If the job's state is `IN_PROGRESS`, the job will be marked for termination and
put into the `STOP_REQUESTED` state. If the job completes before it can be
stopped, it is put into the `COMPLETED` state. Otherwise, the job is put into
the `STOPPED` state.
Asynchronous batch translation jobs are started with the
`StartTextTranslationJob` operation. You can use the
`DescribeTextTranslationJob` or `ListTextTranslationJobs` operations to get a
batch translation job's `JobId`.
"""
def stop_text_translation_job(client, input, options \\ []) do
request(client, "StopTextTranslationJob", input, options)
end
@doc """
Translates input text from the source language to the target language.
For a list of available languages and language codes, see `what-is-languages`.
"""
def translate_text(client, input, options \\ []) do
request(client, "TranslateText", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "translate"}
host = build_host("translate", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSShineFrontendService_20170701.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/translate.ex
| 0.823399 | 0.482734 |
translate.ex
|
starcoder
|
defmodule Talib.Utility do
@moduledoc ~S"""
Module containing utility functions, used in the
calculation of indicators and oscillators.
"""
@doc """
Gets the change in the list.
The `direction` parameter is a direction filter, which defaults to 0.
Returns `{:ok, change}`, otherwise `{:error, reason}`.
If 0, it keeps track of both gains and losses.
If 1, it replaces losses with a 0. Results are absolute values.
If -1, it replaces gains with a 0. Results are absolute values.
## Examples
iex> Talib.Utility.change([1, 2, -3])
{:ok, [0, 1, -5]}
iex> Talib.Utility.change([1, 2, -3], 1)
{:ok, [0, 1, 0]}
iex> Talib.Utility.change([1, 2, nil, -3], 0)
{:ok, [0, 1, 0, 0]}
iex> Talib.Utility.change([1, 2, -3], -1)
{:ok, [0, 0, 5]}
iex> Talib.Utility.change([1], -1)
{:ok, [0]}
iex> Talib.Utility.change([], -1)
{:error, :no_data}
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec change([number], integer) :: {:ok, [number, ...]} | {:error, atom}
def change(_data, direction \\ 0)
def change([], _direction), do: {:error, :no_data}
def change(data, direction) do
[_, result] =
Enum.reduce(data, [nil, []], fn element, [last_el, total] ->
# Check differences between last element and current element
cond do
element === nil or last_el === nil ->
[element, total ++ [0]]
(direction === 1 and element > last_el) or
(direction === -1 and element < last_el) ->
[element, total ++ [abs(element - last_el)]]
direction === 0 ->
[element, total ++ [element - last_el]]
true ->
[element, total ++ [0]]
end
end)
{:ok, result}
end
@doc """
Gets the gain in the list.
Returns `{:ok, gain}`, otherwise `{:error, reason}`.
Alias for `Talib.Utility.change(data, 1)`.
## Examples
iex> Talib.Utility.gain([1, 2, -3])
{:ok, [0, 1, 0]}
iex> Talib.Utility.gain([1])
{:ok, [0]}
iex> Talib.Utility.gain([])
{:error, :no_data}
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec gain([number]) :: {:ok, [number, ...]} | {:error, atom}
def gain(data), do: change(data, 1)
@doc """
Gets the highest number in the list.
Returns `{:ok, high}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.Utility.high([1])
{:ok, 1}
iex> Talib.Utility.high([1, 3])
{:ok, 3}
iex> Talib.Utility.high([1, nil, 3])
{:ok, 3}
iex> Talib.Utility.high([nil])
{:ok, nil}
iex> Talib.Utility.high([])
{:error, :no_data}
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec high([number]) :: {:ok, number} | {:error, atom}
def high([]), do: {:error, :no_data}
def high(data) do
filtered_data = filter_nil(data)
highest =
case filtered_data do
[] -> nil
[_ | _] -> Enum.max(filtered_data)
end
{:ok, highest}
end
@doc """
Gets the loss in the list.
Returns `{:ok, loss}`, otherwise `{:error, reason}`.
Alias for `Talib.Utility.change(data, -1)`.
## Examples
iex> Talib.Utility.loss([1, 2, -3])
{:ok, [0, 0, 5]}
iex> Talib.Utility.loss([1])
{:ok, [0]}
iex> Talib.Utility.loss([])
{:error, :no_data}
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec loss([number]) :: {:ok, [number, ...]} | {:error, atom}
def loss(data), do: change(data, -1)
@doc """
Gets the lowest number in the list.
Returns `{:ok, low}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.Utility.low([1])
{:ok, 1}
iex> Talib.Utility.low([1, 3])
{:ok, 1}
iex> Talib.Utility.low([1, nil, 3])
{:ok, 1}
iex> Talib.Utility.low([nil])
{:ok, nil}
iex> Talib.Utility.low([])
{:error, :no_data}
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec low([number]) :: {:ok, number} | {:error, atom}
def low([]), do: {:error, :no_data}
def low(data) do
filtered_data = filter_nil(data)
lowest =
case filtered_data do
[] -> nil
[_ | _] -> Enum.min(filtered_data)
end
{:ok, lowest}
end
@doc """
Creates a map with the amount of times each element of a
list is present in the list.
Returns `{:ok, occur}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.Utility.occur([1, 2, 3])
{:ok, %{1 => 1, 2 => 1, 3 => 1}}
iex> Talib.Utility.occur([1, 2, 3, 2])
{:ok, %{1 => 1, 2 => 2, 3 => 1}}
iex> Talib.Utility.occur([1, 2, nil, 3, 2])
{:ok, %{1 => 1, 2 => 2, 3 => 1, nil => 1}}
iex> Talib.Utility.occur([])
{:error, :no_data}
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec occur([number]) :: {:ok, map} | {:error, atom}
def occur([]), do: {:error, :no_data}
def occur(data) do
result =
Enum.reduce(data, %{}, fn tag, acc ->
Map.update(acc, tag, 1, &(&1 + 1))
end)
{:ok, result}
end
@doc """
Gets the change in the list.
The `direction` parameter is a direction filter, which defaults to 0.
If 0, it keeps track of both gains and losses.
If 1, it replaces losses with a 0. Results are absolute values.
If -1, it replaces gains with a 0. Results are absolute values.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex> Talib.Utility.change!([1, 2, -3])
[0, 1, -5]
iex> Talib.Utility.change!([1, 2, -3], 1)
[0, 1, 0]
iex> Talib.Utility.change!([1, 2, nil, -3], 0)
[0, 1, 0, 0]
iex> Talib.Utility.change!([1, 2, -3], -1)
[0, 0, 5]
iex> Talib.Utility.change!([1], -1)
[0]
iex> Talib.Utility.change!([], -1)
** (NoDataError) no data error
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec change!([number], integer) :: [number, ...] | no_return
def change!(data, direction \\ 0)
def change!(data, direction), do: data |> change(direction) |> to_bang_function
@doc """
Gets the gain in the list.
Raises `NoDataError` if the given list is an empty list.
Alias for `Talib.Utility.change!(data, 1)`.
## Examples
iex> Talib.Utility.gain!([1, 2, -3])
[0, 1, 0]
iex> Talib.Utility.gain!([1])
[0]
iex> Talib.Utility.gain!([])
** (NoDataError) no data error
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec gain!([number]) :: [number, ...] | no_return
def gain!(data), do: change!(data, 1)
@doc """
Gets the highest number in the list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex> Talib.Utility.high!([1])
1
iex> Talib.Utility.high!([1, 3])
3
iex> Talib.Utility.high!([1, nil, 3])
3
iex> Talib.Utility.high!([nil])
nil
iex> Talib.Utility.high!([])
** (NoDataError) no data error
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec high!([number]) :: number | no_return
def high!(data), do: data |> high |> to_bang_function
@doc """
Gets the loss in the list.
Raises `NoDataError` if the given list is an empty list.
Alias for `Talib.Utility.change!(data, -1)`.
## Examples
iex> Talib.Utility.loss!([1, 2, -3])
[0, 0, 5]
iex> Talib.Utility.loss!([1])
[0]
iex> Talib.Utility.loss!([])
** (NoDataError) no data error
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec loss!([number]) :: [number, ...] | no_return
def loss!(data), do: change!(data, -1)
@doc """
Gets the lowest number in the list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex> Talib.Utility.low!([1])
1
iex> Talib.Utility.low!([1, 3])
1
iex> Talib.Utility.low!([1, nil, 3])
1
iex> Talib.Utility.low!([nil])
nil
iex> Talib.Utility.low!([])
** (NoDataError) no data error
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec low!([number]) :: number | no_return
def low!(data), do: data |> low |> to_bang_function
@doc """
Creates a map with the amount of times each element of a
list is present in the list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex> Talib.Utility.occur!([1, 2, 3])
%{1 => 1, 2 => 1, 3 => 1}
iex> Talib.Utility.occur!([1, 2, 3, 2])
%{1 => 1, 2 => 2, 3 => 1}
iex> Talib.Utility.occur!([nil, 1, 2, 3, 2])
%{1 => 1, 2 => 2, 3 => 1, nil => 1}
iex> Talib.Utility.occur!([])
** (NoDataError) no data error
## History
Version: 1.0
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec occur!([number]) :: map | no_return
def occur!(data), do: data |> occur |> to_bang_function
@doc """
Filters nil from the input list.
## Examples
iex> Talib.Utility.filter_nil([1, 2, 3, nil, 5])
[1, 2, 3, 5]
"""
@spec filter_nil([number | nil]) :: [number]
def filter_nil(data), do: Enum.filter(data, &(&1 !== nil))
@doc """
Transforms an input function to a bang function, which either returns the
output value or raises errors.
## Examples
iex> Talib.Utility.to_bang_function({:ok, [1, 2, 3, nil, 5]})
[1, 2, 3, nil, 5]
iex> Talib.Utility.to_bang_function({:error, :bad_period})
** (BadPeriodError) bad period error
iex> Talib.Utility.to_bang_function({:error, :no_data})
** (NoDataError) no data error
"""
@spec to_bang_function({atom, any | atom}) :: any | no_return
def to_bang_function({:ok, result}), do: result
def to_bang_function({:error, error}) do
case error do
:bad_period -> raise BadPeriodError
:no_data -> raise NoDataError
end
end
end
|
lib/talib/utility.ex
| 0.917367 | 0.746994 |
utility.ex
|
starcoder
|
defmodule Ease do
import :math, only: [pow: 2, cos: 1, pi: 0, sin: 1, sqrt: 1]
@moduledoc """
Provides a number of popular easing functions.
Useful if you're doing animation or some sort of motion.
See [easings.net](http://easings.net) for nice graphs of each function.
"""
@type easing_function :: :linear | :ease_in_quad | :ease_out_quad |
:ease_in_out_quad | :ease_in_cubic | :ease_out_cubic |
:ease_in_out_cubic | :ease_in_quartic |
:ease_out_quartic | :ease_in_out_quartic |
:ease_in_quintic | :ease_out_quintic |
:ease_in_out_quintic | :ease_in_sine | :ease_out_sine |
:ease_in_out_sine | :ease_in_expo | :ease_out_expo |
:ease_in_out_expo | :ease_in_circular |
:ease_out_circular | :ease_in_out_circular
@doc """
No easing.
Constant velocity with no accelleration.
## Examples
iex> Ease.map(1..10, :linear)
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
"""
@spec linear(number, number, number, number) :: number
def linear(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
change_in_value * current_time / duration + start_value
end
@doc """
Quadratic ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_quad)
[1.0, 1.1111111111111112, 1.4444444444444444, 2.0,
2.7777777777777777, 3.777777777777778, 5.0, 6.444444444444445,
8.11111111111111, 10.0]
"""
@spec ease_in_quad(number, number, number, number) :: number
def ease_in_quad(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / duration
change_in_value * pow(current_time, 2) + start_value
end
@doc """
Quadratic ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_quad)
[1.0, 0.8888888888888888, 0.5555555555555556, 0.0,
-0.7777777777777777, -1.7777777777777781, -3.0, -4.444444444444445,
-6.111111111111111, -8.0]
"""
@spec ease_out_quad(number, number, number, number) :: number
def ease_out_quad(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / duration
-change_in_value * pow(current_time, 2) + start_value
end
@doc """
Quadratic ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_quad)
[1.0, 1.2222222222222223, 1.8888888888888888, 3.0, 4.555555555555555,
6.444444444444445, 7.999999999999999, 9.11111111111111,
9.777777777777779, 10.0]
"""
@spec ease_in_out_quad(number, number, number, number) :: number
def ease_in_out_quad(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / (duration / 2)
if (current_time < 1) do
change_in_value / 2 * pow(current_time, 2) + start_value
else
current_time = current_time - 1
-change_in_value / 2 * (current_time * (current_time - 2) - 1) + start_value
end
end
@doc """
Cubic ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_cubic)
[1.0, 1.0123456790123457, 1.0987654320987654, 1.3333333333333333,
1.7901234567901234, 2.54320987654321, 3.666666666666666,
5.234567901234568, 7.320987654320986, 10.0]
"""
@spec ease_in_cubic(number, number, number, number) :: number
def ease_in_cubic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / duration
change_in_value * pow(current_time, 3) + start_value
end
@doc """
Cubic ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_cubic)
[1.0, 3.6790123456790136, 5.765432098765432, 7.333333333333332,
8.45679012345679, 9.209876543209877, 9.666666666666666,
9.901234567901234, 9.987654320987655, 10.0]
"""
@spec ease_out_cubic(number, number, number, number) :: number
def ease_out_cubic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = (current_time / duration - 1)
change_in_value * (pow(current_time, 3) + 1) + start_value
end
@doc """
Cubic ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_cubic)
[1.0, 1.0493827160493827, 1.3950617283950617, 2.333333333333333,
4.160493827160494, 6.839506172839507, 8.666666666666668,
9.604938271604938, 9.950617283950617, 10.0]
"""
@spec ease_in_out_cubic(number, number, number, number) :: number
def ease_in_out_cubic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / (duration / 2)
if (current_time < 1) do
change_in_value / 2 * pow(current_time, 3) + start_value
else
current_time = current_time - 2
change_in_value / 2 * (pow(current_time, 3) + 2) + start_value
end
end
@doc """
Quartic ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_quartic)
[1.0, 1.0013717421124828, 1.0219478737997256, 1.1111111111111112,
1.3511659807956105, 1.8573388203017833, 2.7777777777777777,
4.293552812071331, 6.6186556927297655, 10.0]
"""
@spec ease_in_quartic(number, number, number, number) :: number
def ease_in_quartic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / duration
change_in_value * pow(current_time, 4) + start_value
end
@doc """
Quartic ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_quartic)
[1.0, 4.3813443072702345, 6.706447187928669, 8.222222222222221,
9.142661179698218, 9.64883401920439, 9.88888888888889,
9.978052126200273, 9.998628257887518, 10.0]
"""
@spec ease_out_quartic(number, number, number, number) :: number
def ease_out_quartic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = (current_time / duration) - 1
-change_in_value * (pow(current_time, 4) - 1) + start_value
end
@doc """
Quartic ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_quartic)
[1.0, 1.010973936899863, 1.1755829903978052, 1.8888888888888888,
3.8093278463648828, 7.190672153635117, 9.11111111111111,
9.824417009602195, 9.989026063100138, 10.0]
"""
@spec ease_in_out_quartic(number, number, number, number) :: number
def ease_in_out_quartic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / (duration / 2)
if (current_time < 1) do
change_in_value / 2 * pow(current_time, 4) + start_value
else
current_time = current_time - 2
-change_in_value / 2 * (pow(current_time, 4) - 2) + start_value
end
end
@doc """
Quintic ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_quintic)
[1.0, 1.000152415790276, 1.004877305288828, 1.037037037037037,
1.1560737692424934, 1.476299344612102, 2.1851851851851847,
3.5616521871665907, 5.994360615759791, 10.0]
"""
@spec ease_in_quintic(number, number, number, number) :: number
def ease_in_quintic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / duration
change_in_value * pow(current_time, 5) + start_value
end
@doc """
Quintic ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_quintic)
[1.0, 5.005639384240209, 7.438347812833409, 8.814814814814813,
9.523700655387898, 9.843926230757507, 9.962962962962964,
9.995122694711172, 9.999847584209725, 10.0]
"""
@spec ease_out_quintic(number, number, number, number) :: number
def ease_out_quintic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = (current_time / duration) - 1
change_in_value * (pow(current_time, 5) + 1) + start_value
end
@doc """
Quintic ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_quintic)
[1.0, 1.002438652644414, 1.0780368846212467, 1.5925925925925923,
3.4971803078798955, 7.502819692120104, 9.407407407407407,
9.921963115378754, 9.997561347355585, 10.0]
"""
@spec ease_in_out_quintic(number, number, number, number) :: number
def ease_in_out_quintic(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / (duration / 2)
if (current_time < 1) do
change_in_value / 2 * pow(current_time, 5) + start_value
else
current_time = current_time - 2
change_in_value / 2 * (pow(current_time, 5) + 2) + start_value
end
end
@doc """
Sinusoidal ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_sine)
[1.0, 1.1367302228901277, 1.5427664129268237, 2.205771365940052,
3.105600011929197, 4.214911512821145, 5.499999999999999,
6.9218187100689805, 8.437166400997626, 10.0]
"""
@spec ease_in_sine(number, number, number, number) :: number
def ease_in_sine(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
-change_in_value * cos(current_time / duration * pi() / 2) + change_in_value + start_value
end
@doc """
Sinusoidal ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_sine)
[1.0, 2.562833599002373, 4.078181289931019, 5.499999999999999,
6.785088487178854, 7.894399988070802, 8.794228634059948,
9.457233587073175, 9.863269777109872, 10.0]
"""
@spec ease_out_sine(number, number, number, number) :: number
def ease_out_sine(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
change_in_value * sin(current_time / duration * pi() / 2) + start_value
end
@doc """
Sinusoidal ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_sine)
[1.0, 1.271383206463412, 2.0528000059645986, 3.2499999999999996,
4.718583200498813, 6.281416799501186, 7.749999999999999,
8.947199994035401, 9.728616793536588, 10.0]
"""
@spec ease_in_out_sine(number, number, number, number) :: number
def ease_in_out_sine(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
-change_in_value / 2 * (cos(pi() * current_time / duration) - 1) + start_value
end
@doc """
Exponential ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_expo)
[1.0087890625, 1.0189854250977164, 1.041010786547598,
1.088588198820733, 1.1913610937745218, 1.4133628259525122,
1.8929130917321118, 2.928798961419414, 5.166436205292806, 10.0]
"""
@spec ease_in_expo(number, number, number, number) :: number
def ease_in_expo(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
change_in_value * pow(2, 10 * (current_time / duration - 1)) + start_value
end
@doc """
Exponential ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_expo)
[1.0, 5.833563794707193, 8.071201038580586, 9.107086908267888,
9.586637174047487, 9.808638906225479, 9.911411801179266,
9.958989213452401, 9.981014574902284, 9.9912109375]
"""
@spec ease_out_expo(number, number, number, number) :: number
def ease_out_expo(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
change_in_value * (0 - pow(2, -10 * current_time / duration) + 1) + start_value
end
@doc """
Exponential ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_expo)
[1.00439453125, 1.0205053932737989, 1.0956805468872608,
1.4464565458660559, 3.083218102646403, 7.916781897353598,
9.553543454133944, 9.90431945311274, 9.9794946067262, 9.99560546875]
"""
@spec ease_in_out_expo(number, number, number, number) :: number
def ease_in_out_expo(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / (duration / 2)
if (current_time < 1) do
change_in_value / 2 * pow(2, 10 * (current_time - 1)) + start_value
else
current_time = current_time - 1
change_in_value / 2 * (0 - pow(2, -10 * current_time) + 2) + start_value
end
end
@doc """
Circular ease-in.
Acceleration from zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_circular)
[1.0, 1.0557280900008417, 1.2250356126078779, 1.5147186257614298,
1.9377422517014504, 2.5166852264521173, 3.291796067500631,
4.34314575050762, 5.876894374382339, 10.0]
"""
@spec ease_in_circular(number, number, number, number) :: number
def ease_in_circular(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / duration
-change_in_value * (sqrt(1 - pow(current_time, 2)) - 1) + start_value
end
@doc """
Circular ease-out.
Decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_out_circular)
[1.0, 5.1231056256176615, 6.65685424949238, 7.708203932499368,
8.483314773547882, 9.06225774829855, 9.48528137423857,
9.774964387392123, 9.94427190999916, 10.0]
"""
@spec ease_out_circular(number, number, number, number) :: number
def ease_out_circular(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = (current_time / duration - 1)
change_in_value * sqrt(1 - pow(current_time, 2)) + start_value
end
@doc """
Circular ease-in-out.
Accelleration from zero velocity to half-way, then decelleration to zero velocity.
## Examples
iex> Ease.map(1..10, :ease_in_out_circular)
[1.0, 1.112517806303939, 1.4688711258507252, 2.1458980337503153,
3.4384471871911697, 7.561552812808831, 8.854101966249683,
9.531128874149275, 9.88748219369606, 10.0]
"""
@spec ease_in_out_circular(number, number, number, number) :: number
def ease_in_out_circular(current_time, start_value, change_in_value, duration)
when is_number(current_time) and is_number(start_value)
and is_number(change_in_value) and is_number(duration) do
current_time = current_time / (duration / 2)
if (current_time < 1) do
-change_in_value / 2 * (sqrt(1 - pow(current_time, 2)) - 1) + start_value
else
current_time = current_time - 2
change_in_value / 2 * (sqrt(1 - pow(current_time, 2)) + 1) + start_value
end
end
@doc """
Map an enumerable into it's eased version.
This is a pretty useless function, I've implemented it mainly for
documentation purposes.
## Examples
iex> Ease.map(1..10, :linear)
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
"""
@spec map(Enumerable.t, easing_function) :: Enumerable.t
def map(enum, fun) do
start_value = Enum.at(enum, 0)
last_value = Enum.at(enum, -1)
change_in_value = last_value - start_value
duration = change_in_value
Enum.map(enum, &apply(Ease, fun, [&1 - start_value, start_value, change_in_value, duration]))
end
end
|
lib/ease.ex
| 0.870032 | 0.692447 |
ease.ex
|
starcoder
|
defmodule Eurexa.EurexaServer do
@moduledoc """
This is a gen server process, handling one Elixir service for Eureka. Each
service requires one `EurexaServer`, which sends the regular heartbeats.
The data sent to the Eureka Server has to comply to this XML schema and can
be sent either as XML or as JSON data. We will use the JSON format.
```lang:xml
<?xml version="1.0" encoding="UTF-8"?>
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" attributeFormDefault="unqualified">
<xsd:element name="instance">
<xsd:complexType>
<xsd:all>
<!-- hostName in ec2 should be the public dns name, within ec2 public dns name will
always resolve to its private IP -->
<xsd:element name="hostName" type="xsd:string" />
<!-- app name
Instructions for adding a new app name -
<a _jive_internal="true" href="/clearspace/docs/DOC-20965"
target="_blank">http://wiki.netflix.com/clearspace/docs/DOC-20965</a> -->
<xsd:element name="app" type="xsd:string" />
<xsd:element name="ipAddr" type="xsd:string" />
<xsd:element name="vipAddress" type="xsd:string" />
<xsd:element name="secureVipAddress" type="xsd:string" />
<xsd:element name="status" type="statusType" />
<xsd:element name="port" type="xsd:positiveInteger" minOccurs="0" />
<xsd:element name="securePort" type="xsd:positiveInteger" />
<xsd:element name="homePageUrl" type="xsd:string" />
<xsd:element name="statusPageUrl" type="xsd:string" />
<xsd:element name="healthCheckUrl" type="xsd:string" />
<xsd:element ref="dataCenterInfo" minOccurs="1" maxOccurs="1" />
<!-- optional -->
<xsd:element ref="leaseInfo" minOccurs="0"/>
<!-- optional app specific metadata -->
<xsd:element name="metadata" type="appMetadataType" minOccurs="0" />
</xsd:all>
</xsd:complexType>
</xsd:element>
<xsd:element name="dataCenterInfo">
<xsd:complexType>
<xsd:all>
<xsd:element name="name" type="dcNameType" />
<!-- metadata is only required if name is Amazon -->
<xsd:element name="metadata" type="amazonMetdataType" minOccurs="0"/>
</xsd:all>
</xsd:complexType>
</xsd:element>
<xsd:element name="leaseInfo">
<xsd:complexType>
<xsd:all>
<!-- (optional) if you want to change the length of lease - default if 90 secs -->
<xsd:element name="evictionDurationInSecs" minOccurs="0"
type="xsd:positiveInteger"/>
</xsd:all>
</xsd:complexType>
</xsd:element>
<xsd:simpleType name="dcNameType">
<!-- Restricting the values to a set of value using 'enumeration' -->
<xsd:restriction base = "xsd:string">
<xsd:enumeration value = "MyOwn"/>
<xsd:enumeration value = "Amazon"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="statusType">
<!-- Restricting the values to a set of value using 'enumeration' -->
<xsd:restriction base = "xsd:string">
<xsd:enumeration value = "UP"/>
<xsd:enumeration value = "DOWN"/>
<xsd:enumeration value = "STARTING"/>
<xsd:enumeration value = "OUT_OF_SERVICE"/>
<xsd:enumeration value = "UNKNOWN"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="amazonMetdataType">
<!-- From <a class="jive-link-external-small"
href="http://docs.amazonwebservices.com/AWSEC2/latest/DeveloperGuide/index.html?AESDG-chapter-instancedata.html" target="_blank">http://docs.amazonwebservices.com/AWSEC2/latest/DeveloperGuide/index.html?AESDG-chapter-instancedata.html</a> -->
<xsd:all>
<xsd:element name="ami-launch-index" type="xsd:string" />
<xsd:element name="local-hostname" type="xsd:string" />
<xsd:element name="availability-zone" type="xsd:string" />
<xsd:element name="instance-id" type="xsd:string" />
<xsd:element name="public-ipv4" type="xsd:string" />
<xsd:element name="public-hostname" type="xsd:string" />
<xsd:element name="ami-manifest-path" type="xsd:string" />
<xsd:element name="local-ipv4" type="xsd:string" />
<xsd:element name="hostname" type="xsd:string"/>
<xsd:element name="ami-id" type="xsd:string" />
<xsd:element name="instance-type" type="xsd:string" />
</xsd:all>
</xsd:complexType>
<xsd:complexType name="appMetadataType">
<xsd:sequence>
<!-- this is optional application specific name, value metadata -->
<xsd:any minOccurs="0" maxOccurs="unbounded" processContents="skip"/>
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
```
"""
@type status_t :: :UNKNOWN | :UP | :DOWN | :STARTING | :OUT_OF_SERVICE
defstruct app: "my_app",
hostName: "localhost",
ipAddr: "127.0.0.1",
vipAddress: "",
secureVipAddress: "",
status: :UNKNOWN,
port: 4001,
securePort: 0,
homePageUrl: nil,
statusPageUrl: nil,
healthCheckUrl: nil,
dataCenterInfo: %{
name: "MyOwn",
metadata: %{}
},
leaseInfo: %{evictionDurationInSecs: 30},
metadata: %{}
use GenServer
require Logger
@doc """
Starts the Eurexa Server process for application `app_name`.
"""
def start_link(app_name) do
GenServer.start_link(__MODULE__, [app_name], name: __MODULE__)
end
def init([app_name]) do
app = %__MODULE__{app: app_name, status: :UP}
conf = Application.get_env(:eurexa, app_name)
server = conf[:eureka_server]
port = conf[:eureka_port]
prefix = conf[:eureka_prefix]
version= conf[:eureka_version]
mod = case version do
1 -> Eurexa.EurekaV1
2 -> Eurexa.EurekaV2
end
eureka_base_url = "http://#{server}:#{port}#{prefix}"
timer = trigger_heartbeat(eureka_base_url, app, mod)
{:ok, resp} = mod.register(eureka_base_url, app)
Logger.info "Registration suceeded with response #{inspect resp}"
{:ok, {app, timer, eureka_base_url, mod}}
end
def terminate(reason, {app, timer, eureka_base_url, mod}) do
Logger.info "Terminating: derigster #{app} as Eureka"
:timer.cancel(timer)
mod.deregister(eureka_base_url, app.app, app.hostName)
end
@doc """
Initializes the interval timer sending heartbeats to Eureka
after 3/4 of the eviction interval, which usually 30 seconds.
So, we are sending heatbeats every 22,5 seconds.
"""
def trigger_heartbeat(eureka_base_url,
%__MODULE__{app: app_name, hostName: hostname,
leaseInfo: %{evictionDurationInSecs: interval}},
mod) do
{:ok, tref} = :timer.apply_interval(interval * 750, mod, :send_heartbeat, [eureka_base_url, app_name, hostname])
tref
end
def make_instance_data(app = %__MODULE__{}) do
{:ok, json} = Poison.encode(app)
json
end
end
|
lib/eurexa/eurexa_server.ex
| 0.766075 | 0.583263 |
eurexa_server.ex
|
starcoder
|
defmodule Day4 do
@moduledoc """
Documentation for `Day4`.
"""
@doc """
Hello world.
"""
def run() do
inhalt = read_input()
|> adjust_input()
|> parse_input()
solve(inhalt)
solve_2(inhalt)
end
def solve(%{} = game) do
bingoed_boards = get_winning_boards(game.boards)
cond do
length(game.numbers) == 0 ->
IO.puts("There are *NO* winning boards")
length(bingoed_boards) == 0 ->
next_number = hd(game.numbers)
new_boards = Enum.map(game.boards, fn board-> Bingo.call_number(board, next_number) end)
solve(%{game | boards: new_boards, numbers: tl(game.numbers), last_number: next_number})
length(bingoed_boards) == 1 ->
# IO.inspect(bingoed_boards, label: "bingoed board")
# IO.inspect(Enum.at(game.boards, hd(bingoed_boards)-1), label: "Bingoed board")
IO.puts("The winning board is #{bingoed_boards}")
calculated_answer = calculate_answer(Enum.at(game.boards, hd(bingoed_boards)-1), game.last_number)
IO.puts("The calculated answer is #{calculated_answer}")
true ->
# IO.inspect(bingoed_boards, label: "bingoed boards")
IO.puts("The winning games are #{bingoed_boards}")
end
end
def solve_2(%{} = game) do
IO.puts("-----------")
IO.puts("Part 2")
Map.put(game, :last_winner, -1)
|> Map.put(:last_number, -1)
|> solve_2a()
end
def solve_2a(%{} = game) do
bingoed_boards_0 = get_winning_boards(game.boards)
cond do
length(game.numbers) == 0 or length(game.boards) == length(bingoed_boards_0) ->
solve_2b(game)
true ->
next_number = hd(game.numbers)
new_boards = Enum.map(game.boards, fn board-> Bingo.call_number(board, next_number) end)
bingoed_boards_1 = get_winning_boards(new_boards)
game = figure_out_last_winner(game, bingoed_boards_0, bingoed_boards_1, next_number)
solve_2a(%{game | boards: new_boards, numbers: tl(game.numbers)})
end
end
def solve_2b(%{boards: boards, last_winner: last_winner, last_number: last_number} = _game) do
answer = calculate_answer(Enum.at(boards, last_winner-1), last_number)
IO.puts("second problem board = #{last_winner}, number=#{last_number}, answer = #{answer}")
end
def figure_out_last_winner(game, bb0, bb1, _number) when length(bb0) == length(bb1), do: game
def figure_out_last_winner(game, bb0, bb1, number) do
IO.inspect({bb0, bb1, number}, label: "figure last winner")
[last_game] = Enum.take(bb1 -- bb0, 1) #pick an undefined one if multiple boards bingoed with one called number
%{game | last_winner: last_game, last_number: number}
end
def calculate_answer(board, number) do
IO.inspect({board, number}, label: "calculate_answer")
unmarked = Enum.filter(Map.keys(board.values), fn key -> elem(Map.get(board.values,key),0) == false end)
Enum.sum(unmarked) * number
end
def get_winning_boards(boards) do
Enum.filter(1..length(boards), fn index -> Bingo.is_bingo?(Enum.at(boards, index-1)) end)
end
def read_input(), do: File.read!("input.txt")
def adjust_input(text_lines) do
String.trim(text_lines)
|> String.split("\n")
|> Enum.filter(fn l -> String.length(l) > 5 end)
# |> IO.inspect(label: "lines in parse_input")
end
def parse_input(text_list) do
[numbers] = Enum.slice(text_list, 0..0)
numbers = Enum.map(String.split(numbers, ","), fn n -> String.to_integer(n) end)
# make sure the number we call are unique!
{true} = {numbers == Enum.uniq(numbers)}
boards_lines = Enum.slice(text_list, 1..-1)
%{numbers: numbers, boards: [], last_number: -1}
|> parse_boards(boards_lines)
end
def parse_boards(%{} = game, boards_lines) when length(boards_lines) == 0, do: game
def parse_boards(%{} = game, boards_lines) do
next_board_lines = Enum.slice(boards_lines, 0..4)
rest_of_boards_lines = Enum.slice(boards_lines, 5..-1)
next_board = parse_board(next_board_lines)
new_boards = game.boards ++ [next_board] #|> IO.inspect
%{game| boards: new_boards}
|> parse_boards(rest_of_boards_lines)
end
def parse_board([_h|_t] = board_lines) when length(board_lines) == 5 do
Enum.map(board_lines, fn line -> String.split(line, " ", trim: true) end)
|> Enum.map(fn list_of_numbers -> parse_board_line(list_of_numbers) end)
|> Bingo.new()
end
def parse_board_line([_h|_t] = board_line) when length(board_line) == 5 do
Enum.map(board_line, fn val -> String.to_integer(val) end)
end
def example() do
input = """
7,4,9,5,11,17,23,2,0,14,21,24,10,16,13,6,15,25,12,22,18,20,8,19,3,26,1
22 13 17 11 0
8 2 23 4 24
21 9 14 16 7
6 10 3 18 5
1 12 20 15 19
3 15 0 2 22
9 18 13 17 5
19 8 7 25 23
20 11 10 24 4
14 21 16 12 6
14 21 17 24 4
10 16 15 9 19
18 8 23 26 20
22 11 13 6 5
2 0 12 3 7
"""
game = adjust_input(input)
|> parse_input()
# |> IO.inspect(label: "parsed input")
solve(game)
adjust_input(input)
|> parse_input()
|> solve_2()
end
end
|
apps/day4/lib/day4.ex
| 0.685529 | 0.512815 |
day4.ex
|
starcoder
|
defmodule Time do
defmacro time(exp) do
quote do
{time, dict} = :timer.tc(fn -> unquote(exp) end)
IO.inspect("time: #{time} micro second")
IO.inspect("-------------")
dict
end
end
end
defmodule Network do
defmacro defnetwork(name, do: body) do
{_, _, [{arg, _, _}]} = name
body1 = parse(body, arg)
quote do
def unquote(name) do
unquote(body1)
end
end
end
# filter
def parse({:f, _, [x, y]}, _) do
quote do
{:filter, Cmatrix.new(unquote(x), unquote(y), 0.1), 1, 0.1,
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
def parse({:f, _, [x, y, lr]}, _) do
quote do
{:filter, Cmatrix.new(unquote(x), unquote(y), 0.1), 1, unquote(lr),
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
def parse({:f, _, [x, y, lr, z]}, _) do
quote do
{:filter, Cmatrix.new(unquote(x), unquote(y), unquote(z)), 1, unquote(lr),
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
def parse({:f, _, [x, y, lr, z, st]}, _) do
quote do
{:filter, Cmatrix.new(unquote(x), unquote(y), unquote(z)), unquote(st), unquote(lr),
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
# pooling
def parse({:pool, _, [x]}, _) do
quote do
{:pooling, unquote(x)}
end
end
# padding
def parse({:pad, _, [x]}, _) do
quote do
{:padding, unquote(x)}
end
end
# constant weight for test
def parse({:cw, _, [x]}, _) do
quote do
{:weight, Matrex.new(unquote(x)), 0.1, 0}
end
end
# constant filter for test
def parse({:cf, _, [x]}, _) do
quote do
{:filter, Matrex.new(unquote(x)), 1, 0.1, 0}
end
end
# constant bias for test
def parse({:cb, _, [x]}, _) do
quote do
{:bias, Matrex.new(unquote(x)), 0.1, 0}
end
end
# weight
def parse({:w, _, [x, y]}, _) do
quote do
{:weight, Cmatrix.new(unquote(x), unquote(y), 0.1), 0.1,
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
def parse({:w, _, [x, y, lr]}, _) do
quote do
{:weight, Cmatrix.new(unquote(x), unquote(y), 0.1), unquote(lr),
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
def parse({:w, _, [x, y, lr, z]}, _) do
quote do
{:weight, Cmatrix.new(unquote(x), unquote(y), unquote(z)), unquote(lr),
Cmatrix.zeros(unquote(x), unquote(y))}
end
end
# rnn
def parse({:rnn, _, [x, y, r]}, _) do
quote do
{:rnn, Cmatrix.zeros(unquote(x), unquote(y)), unquote(gen_rnn(x, y, r))}
end
end
def parse({:rnn, _, [x, y, r, lr]}, _) do
quote do
{:rnn, Cmatrix.zeros(unquote(x), unquote(y)), unquote(gen_rnn(x, y, r)), unquote(lr)}
end
end
def parse({:rnn, _, [x, y, r, lr, z]}, _) do
quote do
{:rnn, Cmatrix.zeros(unquote(x), unquote(y)), unquote(gen_rnn(x, y, z, r)), unquote(lr)}
end
end
# LSTM {:lstm, c_init,[]}
def parse({:lstm, _, [x, y, r]}, _) do
quote do
{:lstm, Cmatrix.new(unquote(x), unquote(y)), Cmatrix.zeros(unquote(x), unquote(y)),
unquote(gen_lstm(x, y, r)), 0.1}
end
end
def parse({:lstm, _, [x, y, r, lr]}, _) do
quote do
{:lstm, Cmatrix.new(unquote(x), unquote(y)), Cmatrix.zeros(unquote(x), unquote(y)),
unquote(gen_lstm(x, y, r)), unquote(lr)}
end
end
def parse({:lstm, _, [x, y, r, lr, z]}, _) do
quote do
{:lstm, Cmatrix.new(unquote(x), unquote(y)), Cmatrix.zeros(unquote(x), unquote(y)),
unquote(gen_lstm(x, y, z, r)), unquote(lr)}
end
end
# bias
def parse({:b, _, [x]}, _) do
quote do
{:bias, Cmatrix.zeros(1, unquote(x)), 0.1, Cmatrix.zeros(1, unquote(x))}
end
end
def parse({:b, _, [x, lr]}, _) do
quote do
{:bias, Cmatrix.zeros(1, unquote(x)), unquote(lr), Cmatrix.zeros(1, unquote(x))}
end
end
# sigmoid
def parse({:sigmoid, _, nil}, _) do
quote do
{:function, fn x -> DP.sigmoid(x) end, fn x -> DP.dsigmoid(x) end, :sigmoid}
end
end
# identity
def parse({:ident, _, nil}, _) do
quote do
{:function, fn x -> DP.ident(x) end, fn x -> DP.dident(x) end, :ident}
end
end
# relu
def parse({:relu, _, nil}, _) do
quote do
{:function, fn x -> DP.relu(x) end, fn x -> DP.drelu(x) end, :relu}
end
end
# softmax
def parse({:softmax, _, nil}, _) do
quote do
{:softmax, fn x -> DP.softmax(x) end, fn x -> DP.dsoftmax(x) end}
end
end
# flatten
def parse({:flatten, _, nil}, _) do
quote do
{:flatten}
end
end
def parse({x, _, nil}, _) do
x
end
def parse({:|>, _, exp}, arg) do
parse(exp, arg)
end
def parse([{arg, _, nil}, exp], arg) do
[parse(exp, arg)]
end
def parse([exp1, exp2], arg) do
Enum.reverse([parse(exp2, arg)] ++ Enum.reverse(parse(exp1, arg)))
end
def parse(x, _) do
:io.write(x)
IO.puts("Syntax error in defnetwork")
end
def gen_rnn(_, _, 0) do
[]
end
def gen_rnn(x, y, r) do
quote do
[
{Cmatrix.new(unquote(x), unquote(y)), Cmatrix.new(unquote(x), unquote(y)),
Cmatrix.zeros(1, unquote(y))}
| unquote(gen_rnn(x, y, r - 1))
]
end
end
# [{wx1,wh1,b1},..{wxr,whr,br}]
def gen_rnn(_, _, _, 0) do
[]
end
def gen_rnn(x, y, z, r) do
quote do
[
{Cmatrix.new(unquote(x), unquote(y), unquote(z)),
Cmatrix.new(unquote(x), unquote(y), unquote(z)), Cmatrix.zeros(1, unquote(y))}
| unquote(gen_rnn(x, y, z, r - 1))
]
end
end
def gen_lstm(_, _, 0) do
[]
end
def gen_lstm(x, y, r) do
quote do
[
{Cmatrix.new(unquote(x), unquote(y * 4)), Cmatrix.new(unquote(x), unquote(y * 4)),
Cmatrix.zeros(unquote(x), unquote(y * 4))}
| unquote(gen_rnn(x, y, r - 1))
]
end
end
# wx = wx(f),wx(g),wx(i),wx(o)
# wh = wh(f),wh(g),wh(i),wh(o)
# [{wx1,wh1,b1},...,{wxr,whr,br}]
def gen_lstm(_, _, _, 0) do
[]
end
def gen_lstm(x, y, z, r) do
quote do
[
{Cmatrix.new(unquote(x), unquote(y * 4), unquote(z)),
Cmatrix.new(unquote(x), unquote(y * 4), unquote(z)),
Cmatrix.zeros(unquote(x), unquote(y * 4))}
| unquote(gen_rnn(x, y, r - 1))
]
end
end
end
|
lib/macro.ex
| 0.572006 | 0.671948 |
macro.ex
|
starcoder
|
defmodule Rayray.Tuple do
def point(x, y, z) do
tuple(x, y, z, 1.0)
end
def vector(x, y, z) do
tuple(x, y, z, 0.0)
end
def color(r, g, b) do
%{red: r, green: g, blue: b}
end
def tuple(x, y, z, w) do
%{x: x, y: y, z: z, w: w}
end
def add(%{red: r1, green: g1, blue: b1}, %{red: r2, green: g2, blue: b2}) do
%{red: r1 + r2, green: g1 + g2, blue: b1 + b2}
end
def add(t1, t2) do
%{x: t1[:x] + t2[:x], y: t1[:y] + t2[:y], z: t1[:z] + t2[:z], w: t1[:w] + t2[:w]}
end
def subtract(%{red: r1, green: g1, blue: b1}, %{red: r2, green: g2, blue: b2}) do
%{red: r1 - r2, green: g1 - g2, blue: b1 - b2}
end
def subtract(t1, t2) do
%{x: t1[:x] - t2[:x], y: t1[:y] - t2[:y], z: t1[:z] - t2[:z], w: t1[:w] - t2[:w]}
end
def negate(%{x: x, y: y, z: z, w: w}) do
%{x: -x, y: -y, z: -z, w: -w}
end
def multiply(%{red: r1, green: g1, blue: b1}, %{red: r2, green: g2, blue: b2}) do
%{red: r1 * r2, green: g1 * g2, blue: b1 * b2}
end
def multiply(%{red: r1, green: g1, blue: b1}, s) do
%{red: r1 * s, green: g1 * s, blue: b1 * s}
end
def multiply(%{x: x, y: y, z: z, w: w}, s) do
%{x: x * s, y: y * s, z: z * s, w: w * s}
end
def divide(%{x: x, y: y, z: z, w: w}, s) do
%{x: x / s, y: y / s, z: z / s, w: w / s}
end
def magnitude(%{x: x, y: y, z: z, w: w}) do
:math.sqrt(x * x + y * y + z * z + w * w)
end
def normalize(%{x: x, y: y, z: z, w: w} = v) do
tuple(
x / magnitude(v),
y / magnitude(v),
z / magnitude(v),
w / magnitude(v)
)
end
def dot(a, b) do
a[:x] * b[:x] + a[:y] * b[:y] + a[:z] * b[:z] + a[:w] * b[:w]
end
def cross(a, b) do
vector(
a[:y] * b[:z] - a[:z] * b[:y],
a[:z] * b[:x] - a[:x] * b[:z],
a[:x] * b[:y] - a[:y] * b[:x]
)
end
def fuzzy_equal?(p1, p2, epsilon) do
p1
|> Map.keys()
|> Enum.reduce_while(true, fn
_val, false ->
{:halt, false}
el, true ->
if Map.fetch!(p1, el) - Map.fetch!(p2, el) < epsilon do
{:cont, true}
else
{:halt, false}
end
end)
end
def reflect(in_vec, normal_vec) do
subtract(
in_vec,
multiply(multiply(normal_vec, 2), dot(in_vec, normal_vec))
)
end
end
|
lib/rayray/tuple.ex
| 0.754915 | 0.832407 |
tuple.ex
|
starcoder
|
defmodule PersQueue do
use Application
@moduledoc """
`PersQueue` is persistent queue with `Mnesia` backend.
## Installation
1) Add `pers_queue` to your deps:
```elixir
def deps do
[
{:pers_queue, "~> 0.0.1"}
]
end
```
2) Add `pers_queue` to the list of application dependencies:
```elixir
def application do
[applications: [:pers_queue]]
end
```
## Persistent Setup
PersQueue runs out of the box, but by default all messages are stored in-memory.
To persist messages across application restarts, run the following mix task:
```bash
$ mix pers_queue.setup
```
This will create the Mnesia schema and message database for you.
## Usage
```elixir
# Add message:
PersQueue.add("consumer1", "a") # => :ok
PersQueue.add("consumer1", "b") # => :ok
# Get message:
PersQueue.get("consumer1") # => %PersQueue.Message{consumer: "consumer1", content: "a", id: 1}
PersQueue.get("consumer1") # => %PersQueue.Message{consumer: "consumer1", content: "b", id: 2}
# Ack message by message id:
PersQueue.ack("consumer1", 1) # => :ok
# Reject message by message id:
PersQueue.reject("consumer1", 2) # => :ok
```
"""
@doc """
Starts the PersQueue application
"""
def start(_type, _args) do
PersQueue.Supervisor.start_link()
end
@doc """
Enqueues a message to persistent queue.
Accepts the consumer name and message content.
## Example
```
PersQueue.add("consumer1", "a") # => :ok
PersQueue.add("consumer2", "b") # => :ok
```
"""
@spec add(consumer :: String.t(), message_content :: String.t()) :: :ok
defdelegate add(consumer, message_content), to: PersQueue.ServerSupervisor
@doc """
Gets a message from persistent queue.
Accepts the consumer name.
## Example
```
PersQueue.get("consumer1") # => %PersQueue.Message{consumer: "consumer1", content: "a", id: 1}
```
"""
@spec get(consumer :: String.t()) :: PersQueue.Message.t()
defdelegate get(consumer), to: PersQueue.ServerSupervisor
@doc """
Acks a message and deletes it from running list.
Accepts the consumer name and message id.
## Example
```
PersQueue.ack("consumer1", 1) # => :ok
```
"""
@spec ack(consumer :: String.t(), message_id :: pos_integer) :: :ok
defdelegate ack(consumer, message_id), to: PersQueue.ServerSupervisor
@doc """
Rejects a message and moves it to the end of persistent queue.
Accepts the consumer name and message id.
## Example
```
PersQueue.reject("consumer1", 1) # => :ok
```
"""
@spec reject(consumer :: String.t(), message_id :: pos_integer) :: :ok
defdelegate reject(consumer, message_id), to: PersQueue.ServerSupervisor
end
|
lib/pers_queue.ex
| 0.793866 | 0.75158 |
pers_queue.ex
|
starcoder
|
defmodule Xandra.Prepared do
@moduledoc """
A data structure used to internally represent prepared queries.
These are the publicly accessible fields of this struct:
* `:tracing_id` - the tracing ID (as a UUID binary) if tracing was enabled,
or `nil` if no tracing was enabled. See the "Tracing" section in `Xandra.execute/4`.
All other fields are documented in `t:t/0` to avoid Dialyzer warnings,
but are not meant to be used by users.
"""
defstruct [
:statement,
:values,
:id,
:bound_columns,
:result_columns,
:default_consistency,
:protocol_module,
:tracing_id
]
@type t :: %__MODULE__{
statement: Xandra.statement(),
values: Xandra.values() | nil,
id: binary | nil,
bound_columns: list | nil,
result_columns: list | nil,
default_consistency: atom | nil,
protocol_module: module | nil,
tracing_id: binary | nil
}
@doc false
def rewrite_named_params_to_positional(%__MODULE__{} = prepared, params)
when is_map(params) do
Enum.map(prepared.bound_columns, fn {_keyspace, _table, name, _type} ->
case Map.fetch(params, name) do
{:ok, value} ->
value
:error ->
raise ArgumentError,
"missing named parameter #{inspect(name)} for prepared query, " <>
"got: #{inspect(params)}"
end
end)
end
defimpl DBConnection.Query do
alias Xandra.Frame
def parse(prepared, _options) do
prepared
end
def encode(prepared, values, options) when is_map(values) do
encode(prepared, @for.rewrite_named_params_to_positional(prepared, values), options)
end
def encode(prepared, values, options) when is_list(values) do
Frame.new(:execute, Keyword.take(options, [:compressor, :tracing]))
|> prepared.protocol_module.encode_request(%{prepared | values: values}, options)
|> Frame.encode(prepared.protocol_module)
end
def decode(prepared, %Frame{} = frame, options) do
prepared.protocol_module.decode_response(frame, prepared, options)
end
def describe(prepared, _options) do
prepared
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(prepared, options) do
properties = [
statement: prepared.statement,
tracing_id: prepared.tracing_id
]
concat(["#Xandra.Prepared<", to_doc(properties, options), ">"])
end
end
end
|
lib/xandra/prepared.ex
| 0.828037 | 0.46308 |
prepared.ex
|
starcoder
|
defmodule Distillery.Releases.Profile do
@moduledoc """
Represents the configuration profile for a specific environment and release.
More generally, a release has a profile, as does an environment, and
when determining the configuration for a release in a given environment, the
environment profile overrides the release profile.
## Options
* `:output_dir` - The directory to place release artifacts (default: `_build/<env>/rel/<name>`)
* `:vm_args` - When set, defines the path to a vm.args template to use
* `:cookie` - The distribution cookie to use when one is not provided via alternate means
* `:sys_config` - When set, defines the path to a custom sys.config file to use in the release
* `:no_dot_erlang` - Determines whether or not to pass `:no_dot_erlang` to `:systools`
* `:executable` - When set, builds the release into a self-extracting tar archive.
This setting can either be `true`, a keyword list of options implying `true`, or `false`
* `:transient` - One of the options possible for `:executable`. Sets the archive to remove all extracted
contents once execution finishes. NOTE: Only removes the self-extracted directory.
* `:erl_opts` - A string containing command-line arguments to pass to `erl` when running the release.
* `:run_erl_env` - A string containing environment variables to set when using `run_erl`
* `:dev_mode` - Assembles the release in a special development mode, optimized for quick feedback loops;
rather than copying files to the output directory, they are symlinked, avoiding the expensive
copies, and allowing one to run `mix compile`, restart the release, and have the changes be picked up.
Disables archival of the release, and is not intended for deployment use, only development and testing.
* `:include_erts` - Sets the strategy for locating ERTS in a release to one of the following:
* `true` - Bundles the current ERTS into the release (located by asking `erl` where it lives)
* `false` - Skips bundling an ERTS completely, but requires that one be provided on the target system
* `"path/to/erlang"` - As indicated, a path to the ERTS you wish to bundle. Useful for cross-compiling.
This path can be found with `:code.root_dir()`
* `:include_src` - Boolean indicating whether to bundle source files in the release. (default: false)
* `:config_providers` - A list of custom configuration providers to use. See `Distillery.Releases.Config.Provider` for details.
* `:included_configs` - Used to set paths for additional `sys.config` files to include at runtime, e.g. `["/etc/sys.config"]`
* `:appup_transforms` - A list of custom appup transforms to apply when building upgrades: e.g. `[MyTransform, {MyTransform, [:foo]}]`
* `:strip_debug_info` - Boolean indicating whether to strip debug information from BEAM files (default: false)
* `:plugins` - A list of custom release plugins. See `Distillery.Releases.Plugin` for details
* `:overlay_vars` - A list of variables to expose to overlays and templates. Must be a Keyword list
* `:overlays` - A list of overlays to apply. See `Distillery.Releases.Overlays` for details.
* `:overrides` - A list of overrides for Distillery-provided overlay vars
* `:commands` - A list of custom commands to add to the release, e.g. `[migrate: "rel/scripts/migrate.sh"]`
## Hooks
The following options all take a path to a directory containing the scripts which will be
executed at the given point in the release lifecycle:
* `:pre_configure_hooks` - Executed _before_ the system has generated config files
* `:post_configure_hooks` - Executed _after_ config files have been generated
* `:pre_start_hooks` - Executed _before_ the release is started
* `:post_start_hooks` - Executed _after_ the release is started
* `:pre_stop_hooks` - Executed _before_ the release is stopped
* `:post_stop_hooks` - Executed _after_ the release is stopped
* `:pre_upgrade_hooks` - Executed _before_ a release upgrade is installed
* `:post_upgrade_hooks` - Executed _after_ a release upgrade is installed
"""
defstruct output_dir: nil,
vm_args: nil,
cookie: nil,
config: nil,
sys_config: nil,
executable: [enabled: false, transient: false],
erl_opts: nil,
run_erl_env: nil,
dev_mode: nil,
no_dot_erlang: nil,
include_erts: nil,
erts_version: nil,
include_src: nil,
include_system_libs: nil,
included_configs: [],
config_providers: [],
appup_transforms: [],
strip_debug_info: nil,
plugins: [],
overlay_vars: [],
overlays: [],
overrides: nil,
commands: nil,
pre_configure_hooks: nil,
post_configure_hooks: nil,
pre_start_hooks: nil,
post_start_hooks: nil,
pre_stop_hooks: nil,
post_stop_hooks: nil,
pre_upgrade_hooks: nil,
post_upgrade_hooks: nil
@type t :: %__MODULE__{
output_dir: nil | String.t(),
vm_args: nil | String.t(),
cookie: nil | atom(),
config: nil | String.t(),
sys_config: nil | String.t(),
executable: nil | false | Keyword.t(),
erl_opts: nil | String.t(),
run_erl_env: nil | String.t(),
dev_mode: nil | boolean,
no_dot_erlang: nil | boolean,
include_erts: nil | boolean | String.t(),
erts_version: nil | String.t(),
include_src: nil | boolean,
include_system_libs: nil | boolean | String.t(),
included_configs: [String.t()],
config_providers: [module() | {module(), [term]}],
appup_transforms: [module() | {module(), [term]}],
strip_debug_info: nil | boolean,
plugins: [module()],
overlay_vars: nil | Keyword.t(),
overlays: [Distillery.Releases.Overlays.overlay()],
overrides: nil | [{atom, String.t()}],
commands: nil | [{atom, String.t()}],
pre_configure_hooks: nil | String.t(),
post_configure_hooks: nil | String.t(),
pre_start_hooks: nil | String.t(),
post_start_hooks: nil | String.t(),
pre_stop_hooks: nil | String.t(),
post_stop_hooks: nil | String.t(),
pre_upgrade_hooks: nil | String.t(),
post_upgrade_hooks: nil | String.t()
}
end
|
lib/distillery/releases/models/profile.ex
| 0.799599 | 0.57063 |
profile.ex
|
starcoder
|
defmodule Tds.BinaryUtils do
@moduledoc false
@doc """
A single bit value of either 0 or 1
"""
defmacro bit(), do: quote(do: size(1))
@doc """
An unsigned single byte (8-bit) value. The range is 0 to 255.
"""
defmacro byte(), do: quote(do: unsigned - 8)
@doc """
An unsigned single byte (8-bit) value representing the length of the associated data. The range is 0 to 255.
"""
defmacro bytelen(), do: quote(do: unsigned - 8)
@doc """
An unsigned 2-byte (16-bit) value. The range is 0 to 65535.
"""
defmacro ushort(), do: quote(do: little - unsigned - 16)
@doc """
An unsigned 6-byte (48-bit) value. The range is 0 to (2^48)-1
"""
defmacro sixbyte(), do: quote(do: unsigned - 48)
@doc """
A signed 4-byte (32-bit) value. The range is -(2^31) to (2^31)-1.
"""
defmacro long(), do: quote(do: little - signed - 32)
@doc """
A signed 8-byte (64-bit) value. The range is β(2^63) to (2^63)-1.
"""
defmacro longlong(), do: quote(do: little - signed - 64)
@doc """
An unsigned 4-byte (32-bit) value. The range is 0 to (2^32)-1
"""
defmacro ulong(), do: quote(do: little - unsigned - 32)
@doc """
An unsigned 8-byte (64-bit) value. The range is 0 to (2^64)-1.
"""
defmacro ulonglong(), do: quote(do: little - unsigned - 64)
@doc """
An unsigned 4-byte (32-bit) value. The range when used as a numeric value is 0 to (2^32)- 1.
"""
defmacro dword(), do: quote(do: unsigned - 32)
@doc """
An unsigned single byte (8-bit) value representing a character. The range is 0 to 255.
"""
defmacro uchar(), do: quote(do: unsigned - 8)
@doc """
An unsigned 2-byte (16-bit) value representing the length of the associated data. The range is 0 to 65535.
"""
defmacro ushortlen(), do: quote(do: little - unsigned - 16)
@doc """
An unsigned 2-byte (16-bit) value representing the length of the associated character or binary data. The range is 0 to 8000.
"""
defmacro ushortcharbinlen(), do: quote(do: little - unsigned - 16)
@doc """
A signed 4-byte (32-bit) value representing the length of the associated data. The range is -(2^31) to (2^31)-1.
"""
defmacro longlen(), do: quote(do: little - signed - 32)
@doc """
An unsigned 8-byte (64-bit) value representing the length of the associated data. The range is 0 to (2^64)-1.
"""
defmacro ulonglonglen(), do: quote(do: little - unsigned - 64)
@doc """
An unsigned single byte (8-bit) value representing the precision of a numeric number.
"""
defmacro precision(), do: quote(do: unsigned - 8)
@doc """
An unsigned single byte (8-bit) value representing the scale of a numeric number.
"""
defmacro scale(), do: quote(do: unsigned - 8)
@doc """
A single byte (8-bit) value representing a NULL value. 0x00
## Example
iex> import Tds.BinaryUtils
iex> <<_::gen_null()>> = <<0x00 :: size(8)>>
"""
defmacro gen_null(), do: quote(do: size(8))
@doc """
A 2-byte (16-bit) value representing a T-SQL NULL value for a character or binary data type.
## Example
iex> import Tds.BinaryUtils
iex> <<_::charbin_null32>> = <<0xFFFF :: size(32)>>
Please refer to TYPE_VARBYTE (see MS-TDS.pdf section 2.2.5.2.3) for additional details.
"""
defmacro charbin_null16(), do: quote(do: size(16))
@doc """
A 4-byte (32-bit) value representing a T-SQL NULL value for a character or binary data type.
## Example
iex> import Tds.BinaryUtils
iex> <<_::charbin_null32>> = <<0xFFFFFFFF :: size(32)>>
Please refer to TYPE_VARBYTE (see MS-TDS.pdf section 2.2.5.2.3) for additional details.
"""
defmacro charbin_null32(), do: quote(do: size(32))
@doc """
A FRESERVEDBIT is a BIT value used for padding that does not transmit information.
FRESERVEDBIT fields SHOULD be set to %b0 and MUST be ignored on receipt.
"""
defmacro freservedbit(), do: quote(do: 0x0 :: size(1))
@doc """
A FRESERVEDBYTE is a BYTE value used for padding that does not transmit information. FRESERVEDBYTE fields SHOULD be set to %x00 and MUST be ignored on receipt.
"""
defmacro freservedbyte(), do: quote(do: 0x00 :: size(8))
@doc """
A 8-bit signed integer
"""
defmacro int8(), do: quote(do: signed - 8)
@doc """
A 16-bit signed integer
"""
defmacro int16(), do: quote(do: signed - 16)
@doc """
A 16-bit signed integer
"""
defmacro int32(), do: quote(do: signed - 32)
@doc """
A 16-bit signed integer
"""
defmacro int64(), do: quote(do: signed - 64)
@doc """
A 16-bit signed integer
"""
defmacro uint8(), do: quote(do: unsigned - 8)
@doc """
A 16-bit signed integer
"""
defmacro uint16(), do: quote(do: unsigned - 16)
@doc """
A 32-bit signed integer
"""
defmacro uint32(), do: quote(do: unsigned - 32)
@doc """
A 64-bit signed integer
"""
defmacro uint64(), do: quote(do: unsigned - 64)
@doc """
A 64-bit signed float
"""
defmacro float64(), do: quote(do: signed - float - 64)
defmacro float32(), do: quote(do: signed - float - 32)
defmacro binary(size), do: quote(do: binary - size(unquote(size)))
defmacro binary(size, unit),
do: quote(do: binary - size(unquote(size)) - unit(unquote(unit)))
defmacro unicode(size),
do: quote(do: binary - little - size(unquote(size)) - unit(16))
end
|
lib/tds/binary_utils.ex
| 0.862308 | 0.586552 |
binary_utils.ex
|
starcoder
|
defmodule Buckaroo.Router do
@moduledoc ~S"""
An extension to `Plug.Router` now also supporting `websocket`.
"""
@doc false
defmacro __using__(opts) do
quote location: :keep do
use Plug.Router, unquote(opts)
import Buckaroo.Router, only: [sse: 2, websocket: 2]
Module.register_attribute(__MODULE__, :plug_forwards, accumulate: true)
@on_definition {Buckaroo.Router, :on_def}
@before_compile Buckaroo.Router
@has_sse_route false
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
@doc false
@spec __sse__ :: boolean
if @has_sse_route do
def __sse__, do: true
else
def __sse__ do
Enum.any?(@plug_forwards, fn plug ->
{:__sse__, 0} in plug.__info__(:functions) and plug.__sse__()
end)
end
end
import Buckaroo.Router, only: []
end
end
@doc ~S"""
Dispatches to the websocket.
See `Plug.Router.match/3` for more examples.
## Example
```
websocket "/ws", connect: ExampleSocket
```
"""
defmacro websocket(expr, opts) do
method = :websocket
{path, guards} = extract_path_and_guards(expr)
body = quote do: Plug.Conn.put_private(var!(conn), :websocket, unquote(opts[:connect]))
options = Keyword.delete(opts, :connect)
quote bind_quoted: [
method: method,
path: path,
options: options,
guards: Macro.escape(guards, unquote: true),
body: Macro.escape(body, unquote: true)
] do
route = Plug.Router.__route__(method, path, guards, options)
{conn, method, match, params, host, guards, private, assigns} = route
defp do_match(unquote(conn), unquote(method), unquote(match), unquote(host))
when unquote(guards) do
unquote(private)
unquote(assigns)
merge_params = fn
%Plug.Conn.Unfetched{} -> unquote({:%{}, [], params})
fetched -> Map.merge(fetched, unquote({:%{}, [], params}))
end
conn = update_in(unquote(conn).params, merge_params)
conn = update_in(conn.path_params, merge_params)
Plug.Router.__put_route__(conn, unquote(path), fn var!(conn) -> unquote(body) end)
end
end
end
@doc ~S"""
Dispatches to the event source.
Server-Sent Events (SSE) is a server push technology enabling a client to receive automatic updates from a server via HTTP connection.
See `Plug.Router.match/3` for more examples.
## Example
```
sse "/eventsource", source: ExampleEventSource
```
"""
defmacro sse(expr, opts) do
method = :get
{path, guards} = extract_path_and_guards(expr)
body = quote do: Plug.Conn.put_private(var!(conn), :websocket, {:sse, unquote(opts[:source])})
options = Keyword.delete(opts, :source)
quote bind_quoted: [
method: method,
path: path,
options: options,
guards: Macro.escape(guards, unquote: true),
body: Macro.escape(body, unquote: true)
] do
route = Plug.Router.__route__(method, path, guards, options)
{conn, method, match, params, host, guards, private, assigns} = route
defp do_match(unquote(conn), unquote(method), unquote(match), unquote(host))
when unquote(guards) do
unquote(private)
unquote(assigns)
merge_params = fn
%Plug.Conn.Unfetched{} -> unquote({:%{}, [], params})
fetched -> Map.merge(fetched, unquote({:%{}, [], params}))
end
conn = update_in(unquote(conn).params, merge_params)
conn = update_in(conn.path_params, merge_params)
Plug.Router.__put_route__(conn, unquote(path), fn var!(conn), _ -> unquote(body) end)
end
@has_sse_route true
end
end
@doc false
@spec on_def(term, :def | :defp, atom, term, term, term) :: term
# credo:disable-for-next-line
def on_def(env, :defp, :do_match, [{:conn, _, Plug.Router}, _method, _path, _], _guards, _body) do
if forward = Module.get_attribute(env.module, :plug_forward_target) do
unless forward in Module.get_attribute(env.module, :plug_forwards) do
Module.put_attribute(env.module, :plug_forwards, forward)
end
end
end
# credo:disable-for-next-line
def on_def(_env, _type, _name, _args, _guards, _body), do: :ignore
# Extract the path and guards from the path.
defp extract_path_and_guards({:when, _, [path, guards]}), do: {extract_path(path), guards}
defp extract_path_and_guards(path), do: {extract_path(path), true}
defp extract_path({:_, _, var}) when is_atom(var), do: "/*_path"
defp extract_path(path), do: path
end
|
lib/buckaroo/router.ex
| 0.786705 | 0.612657 |
router.ex
|
starcoder
|
defmodule Thumbnex do
@moduledoc """
Create thumbnails from images and videos.
"""
alias Thumbnex.Animations
alias Thumbnex.ExtractFrame
alias Thumbnex.Gifs
@doc """
Create a thumbnail image.
Image format is inferred from output path file extension.
To override, pass the `:format` option.
Options:
* `:width` - Width of the thumbnail. Defaults to input width.
* `:height` - Height of the thumbnail. Defaults to input height.
* `:max_width` - Maximum width of the thumbnail.
* `:max_height` - Maximum height of the thumbnail.
* `:format` - Output format for the thumbnail. Defaults to `output_path` extension, or `"png"` if indeterminate.
* `:time_offset` - Timestamp in seconds at which to take screenshot, for videos and GIFs.
By default picks a time near the beginning, based on video duration.
"""
@spec create_thumbnail(binary, binary, Keyword.t) :: :ok
def create_thumbnail(input_path, output_path, opts \\ []) do
input_path = Path.expand(input_path)
output_path = Path.expand(output_path)
max_width = number_opt(opts, :max_width, 1_000_000_000_000)
max_height = number_opt(opts, :max_height, 1_000_000_000_000)
format = normalize_format(Keyword.get(opts, :format, image_format_from_path(output_path)))
duration = Animations.duration(input_path)
frame_time = number_opt(opts, :time_offset, frame_time(duration))
desired_width = number_opt(opts, :width, nil)
desired_height = number_opt(opts, :height, nil)
single_frame_path = ExtractFrame.single_frame(input_path, frame_time, output_ext: ".#{format}")
single_frame_path
|> Mogrify.open
|> Mogrify.verbose
|> resize_if_different(desired_width, desired_height)
|> Mogrify.resize_to_limit("#{max_width}x#{max_height}")
|> Mogrify.save(path: output_path)
:ok = File.rm! single_frame_path
end
@doc """
Create an animated GIF preview.
Options:
* `:width` - Width of the thumbnail. Defaults to input width.
* `:height` - Height of the thumbnail. Defaults to input height.
* `:max_width` - Maximum width of the thumbnail.
* `:max_height` - Maximum height of the thumbnail.
* `:frame_count` - Number of frames to output. Default 4.
* `:fps` - Frames per second of output GIF. Default 1.
* `:optimize` - Add mogrify options to reduce output size. Default true.
"""
@spec animated_gif_thumbnail(binary, binary, Keyword.t) :: :ok
def animated_gif_thumbnail(input_path, output_path, opts \\ []) do
input_path = Path.expand(input_path)
output_path = Path.expand(output_path)
max_width = number_opt(opts, :max_width, 1_000_000_000_000)
max_height = number_opt(opts, :max_height, 1_000_000_000_000)
desired_width = number_opt(opts, :width, nil)
desired_height = number_opt(opts, :height, nil)
frame_count = number_opt(opts, :frame_count, 4)
fps = number_opt(opts, :fps, 1)
optimize = Keyword.get(opts, :optimize, true)
multi_frame_path = ExtractFrame.multiple_frames(input_path, frame_count, fps, output_ext: ".gif")
multi_frame_path
|> Mogrify.open
|> Mogrify.verbose
|> resize_if_different(desired_width, desired_height)
|> Mogrify.resize_to_limit("#{max_width}x#{max_height}")
|> optimize_mogrify_image(optimize)
|> Mogrify.save(path: output_path)
:ok = File.rm! multi_frame_path
end
defp image_format_from_path(path) do
case Path.extname(path) do
"" -> "png"
extname -> String.slice(extname, 1..-1) # remove "."
end
end
defp normalize_format(format) do
if String.starts_with?(format, "."), do: String.slice(format, 1..-1), else: format
end
defp frame_time(:no_duration), do: 0
defp frame_time(short) when short < 4, do: 0
defp frame_time(medium) when medium < 10, do: 1
defp frame_time(long), do: 0.1 * long
defp resize_if_different(image, nil, nil), do: image
defp resize_if_different(%{width: width, height: height} = image, desired_width, desired_height) do
if width != desired_width or height != desired_height do
image |> Mogrify.resize("#{desired_width}x#{desired_height}")
else
image
end
end
defp optimize_mogrify_image(image, true = _optimize) do
Gifs.optimize_mogrify_image(image)
end
defp optimize_mogrify_image(image, false = _optimize), do: image
defp number_opt(opts, key, default) do
value = Keyword.get(opts, key)
if is_number(value), do: value, else: default
end
end
|
lib/thumbnex.ex
| 0.894873 | 0.506225 |
thumbnex.ex
|
starcoder
|
defmodule JMES do
@moduledoc """
JMES implements JMESPath, a query language for JSON.
It passes the official compliance tests.
See [jmespath.org](http://jmespath.org).
"""
alias JMES.{Functions, Parser}
@type expr :: String.t() | charlist
@type ast :: tuple | atom
@type error :: {:error, any}
@doc """
Evaluates a JMESPath expression against some data.
The expression can be a string, a charlist, or an Abstract Syntax Tree (see
`JMES.Parser.parse/1`).
## Options
- `underscore`: if `true`, underscore identifiers in the expression (default `false`)
- `custom_functions`: if specified, JMES will attempt to call functions it doesn't know
using `call(name, args, options)` in the specificied module
(`JMES.Functions.Handler` behaviour)
## Examples
iex> JMES.search("[name, age]", %{"name" => "Alice", "age" => 28, "place" => "wonderland"})
{:ok, ["Alice", 28]}
"""
@spec search(ast | expr, any) :: {:ok, any} | error
def search(expr, data) do
search(expr, data, [])
end
@spec search(ast, any, keyword) :: {:ok, any} | error
def search(expr, data, opts) when is_tuple(expr) or is_atom(expr) do
case eval(expr, data, opts) do
{:ok, value} -> {:ok, unproject(value)}
err -> err
end
end
@spec search(expr, any, keyword) :: {:ok, any} | error
def search(expr, data, opts) do
with {:ok, ast} <- Parser.parse(expr) do
search(ast, data, opts)
end
end
@spec eval(ast, any, keyword) :: {:ok, any} | error
# ==============================================================================================
# Structs
# ==============================================================================================
defp eval(ast, %{__struct__: _} = struct, opts) do
eval(ast, Map.from_struct(struct), opts)
end
# ==============================================================================================
# Projection
# ==============================================================================================
defp eval(ast, {:project, data}, opts) do
List.foldr(data, {:ok, {:project, []}}, fn
item, {:ok, {:project, list}} = acc ->
case eval(ast, item, opts) do
{:ok, value} when is_nil(value) -> acc
{:ok, value} -> {:ok, {:project, [value | list]}}
err -> err
end
_expr, err ->
err
end)
end
# ==============================================================================================
# Wildcard
# ==============================================================================================
defp eval(:wildcard, data, _opts) when is_map(data) do
values = Map.values(data)
if length(values) > 0 do
{:ok, {:project, values}}
else
{:ok, nil}
end
end
defp eval(:wildcard, _data, _opts) do
{:ok, nil}
end
defp eval({:wildcard, expr}, data, opts) do
case eval(expr, data, opts) do
{:ok, {:project, _date} = value} -> eval({:list, [:wildcard]}, value, opts)
{:ok, value} when is_list(value) and length(value) > 0 -> {:ok, {:project, value}}
{:ok, _value} -> {:ok, nil}
err -> err
end
end
defp eval({:list, [:wildcard]}, data, _opts) when is_list(data) do
{:ok, {:project, data}}
end
defp eval({:list, [:wildcard]}, _data, _opts) do
{:ok, nil}
end
# ==============================================================================================
# ID
# ==============================================================================================
defp eval({:id, id}, data, opts) when is_map(data) do
underscore = Keyword.get(opts, :underscore, false)
id = if underscore, do: Macro.underscore(id), else: id
value = Map.get(data, id)
if is_nil(value) do
{:ok, Map.get(data, String.to_atom(id))}
else
{:ok, value}
end
end
defp eval({:id, _id}, _data, _opts) do
{:ok, nil}
end
# ==============================================================================================
# Literals
# ==============================================================================================
defp eval({:string, value}, _data, _opts) do
{:ok, value}
end
defp eval({:json, value}, _data, _opts) do
Poison.decode(value)
end
defp eval(value, _data, _opts) when is_binary(value) do
{:ok, value}
end
defp eval(value, _data, _opts) when is_number(value) do
{:ok, value}
end
defp eval(value, _data, _opts) when is_list(value) do
{:ok, value}
end
defp eval(value, _data, _opts) when is_map(value) do
{:ok, value}
end
defp eval(value, _data, _opts) when is_nil(value) do
{:ok, nil}
end
# ==============================================================================================
# Node
# ==============================================================================================
defp eval(:node, data, _opts) do
{:ok, data}
end
# ==============================================================================================
# Pipe
# ==============================================================================================
defp eval({:pipe, [left, right]}, data, opts) do
with {:ok, left} <- eval(left, data, opts) do
eval(right, unproject(left), opts)
end
end
# ==============================================================================================
# Logical Operators
# ==============================================================================================
defp eval({:and, [left, right]}, data, opts) do
binop_ok(left, right, data, &if(truthy?(&1), do: &2, else: &1), opts)
end
defp eval({:or, [left, right]}, data, opts) do
binop_ok(left, right, data, &if(truthy?(&1), do: &1, else: &2), opts)
end
defp eval({:eq, [left, right]}, data, opts) do
binop_ok(left, right, data, &===/2, opts)
end
defp eval({:neq, [left, right]}, data, opts) do
binop_ok(left, right, data, &!==/2, opts)
end
defp eval({:lt, [left, right]}, data, opts) do
compare(left, right, data, &</2, opts)
end
defp eval({:gt, [left, right]}, data, opts) do
compare(left, right, data, &>/2, opts)
end
defp eval({:lte, [left, right]}, data, opts) do
compare(left, right, data, &<=/2, opts)
end
defp eval({:gte, [left, right]}, data, opts) do
compare(left, right, data, &>=/2, opts)
end
defp eval({:not, expr}, data, opts) do
with {:ok, value} <- eval(expr, data, opts),
value = unproject(value) do
{:ok, !truthy?(value)}
end
end
# ==============================================================================================
# Child
# ==============================================================================================
defp eval({:child, [expr, child]}, data, opts) do
with {:ok, parent} <- eval(expr, data, opts) do
eval(child, parent, opts)
end
end
# ==============================================================================================
# Flatten
# ==============================================================================================
defp eval(:flatten, data, _opts) when is_list(data) do
{:ok, {:project, flatten(data)}}
end
defp eval(:flatten, _data, _opts) do
{:ok, nil}
end
defp eval({:flatten, expr}, data, opts) do
with {:ok, parent} <- eval(expr, data, opts),
parent = unproject(parent) do
eval(:flatten, parent, opts)
end
end
# ==============================================================================================
# Index
# ==============================================================================================
defp eval({:index, [nil, index]}, data, _opts) when is_integer(index) and is_list(data) do
{:ok, Enum.at(data, index)}
end
defp eval({:index, [expr, index]}, data, opts) when is_integer(index) and not is_nil(expr) do
with {:ok, parent} <- eval(expr, data, opts) do
eval({:index, [nil, index]}, parent, opts)
end
end
defp eval({:index, [_expr, _index]}, _data, _opts) do
{:ok, nil}
end
# ==============================================================================================
# Slice
# ==============================================================================================
defp eval({:slice, [_expr, [_start, _stop, _step]]}, nil, _opts) do
{:ok, nil}
end
defp eval({:slice, [nil, [start, stop, step]]}, data, _opts) do
project_slice(data, start, stop, step)
end
defp eval({:slice, [expr, [start, stop, step]]}, data, opts) do
with {:ok, parent} <- eval(expr, data, opts) do
project_slice(parent, start, stop, step)
end
end
# ==============================================================================================
# Filter
# ==============================================================================================
defp eval({:filter, [_expr, _query]}, nil, _opts) do
{:ok, nil}
end
defp eval({:filter, [nil, query]}, data, opts) when is_list(data) do
List.foldr(data, {:ok, {:project, []}}, fn
item, {:ok, {:project, list}} = acc ->
case eval(query, item, opts) do
{:ok, value} ->
if truthy?(unproject(value)) do
{:ok, {:project, [item | list]}}
else
acc
end
err ->
err
end
_expr, err ->
err
end)
end
defp eval({:filter, [nil, _query]}, _data, _opts) do
{:ok, nil}
end
defp eval({:filter, [expr, query]}, data, opts) do
with {:ok, parent} <- eval(expr, data, opts) do
eval({:filter, [nil, query]}, parent, opts)
end
end
# ==============================================================================================
# List
# ==============================================================================================
defp eval({:list, _exprs}, nil, _opts) do
{:ok, nil}
end
defp eval({:list, exprs}, data, opts) do
with {:ok, value} <- eval_list(exprs, data, opts) do
{:ok, value}
end
end
# ==============================================================================================
# Dictionary
# ==============================================================================================
defp eval({:dict, _keyvalv}, nil, _opts) do
{:ok, nil}
end
defp eval({:dict, keyvalv}, data, opts) do
List.foldl(keyvalv, {:ok, %{}}, fn
[key, expr], {:ok, map} ->
case eval(expr, data, opts) do
{:ok, value} -> {:ok, Map.put(map, key, value)}
err -> err
end
_expr, err ->
err
end)
end
# ==============================================================================================
# Call
# ==============================================================================================
defp eval({:call, [name, argv]}, data, opts) do
with {:ok, args} <- eval_list(argv, data, opts),
args = unproject(args) do
Functions.call(name, args, opts)
end
end
defp eval({:quote, expr}, _data, _opts) do
{:ok, expr}
end
# ==============================================================================================
# Fallback
# ==============================================================================================
defp eval(ast, _data, _opts) do
{:error, {:invalid_ast, ast}}
end
# ==============================================================================================
# Helpers
# ==============================================================================================
@spec unproject(any) :: any
defp unproject({:project, data}) do
unproject(data)
end
defp unproject(data) when is_list(data) do
Enum.map(data, &unproject(&1))
end
defp unproject(%{__struct__: _} = struct) do
unproject(Map.from_struct(struct))
end
defp unproject(data) when is_map(data) do
data
|> Enum.map(fn {key, value} -> {key, unproject(value)} end)
|> Enum.into(%{})
end
defp unproject(data) do
data
end
@spec binop(expr, expr, any, (any, any -> {:ok, any} | error), keyword) :: {:ok, any} | error
defp binop(left, right, data, fun, opts) do
with {:ok, left} <- eval(left, data, opts),
{:ok, right} <- eval(right, data, opts),
left = unproject(left),
right = unproject(right) do
fun.(left, right)
end
end
@spec binop_ok(expr, expr, any, (any, any -> any), keyword) :: {:ok, any}
defp binop_ok(left, right, data, fun, opts) do
binop(
left,
right,
data,
fn left, right ->
{:ok, fun.(left, right)}
end,
opts
)
end
@spec compare(expr, expr, any, (any, any -> boolean), keyword) :: {:ok, boolean} | error
defp compare(left, right, data, fun, opts) do
binop(
left,
right,
data,
fn left, right ->
if is_number(left) and is_number(right) do
{:ok, fun.(left, right)}
else
{:error, :invalid_type}
end
end,
opts
)
end
@spec truthy?(any) :: boolean
defp truthy?("") do
false
end
defp truthy?([]) do
false
end
defp truthy?(%{} = map) do
length(Map.keys(map)) > 0
end
defp truthy?(value) do
!!value
end
@spec flatten(any) :: list | nil
defp flatten(data) when is_list(data) do
List.foldr(data, [], fn
[], acc -> acc
[_ | _] = list, acc -> list ++ acc
item, acc -> [item | acc]
end)
end
defp flatten(_data) do
nil
end
@spec slice(any, integer | nil, integer | nil, integer | nil) :: {:ok, list | nil} | error
defp slice(_data, _start, _stop, step) when step == 0 do
{:error, :invalid_step}
end
defp slice(data, _start, _stop, _step) when not is_list(data) do
{:ok, nil}
end
defp slice(data, nil, nil, nil) do
{:ok, data}
end
defp slice(data, start, stop, nil) do
slice(data, start, stop, 1)
end
defp slice(data, nil, stop, step) when step > 0 do
slice(data, 0, stop, step)
end
defp slice(data, nil, stop, step) do
slice(data, length(data), stop, step)
end
defp slice(data, start, stop, step) when start < 0 do
slice(data, length(data) + start, stop, step)
end
defp slice(data, start, nil, step) when step > 0 do
slice(data, start, length(data), step)
end
defp slice(data, start, stop, step) when is_number(stop) and stop < 0 do
if -stop > length(data) do
slice(data, start, nil, step)
else
slice(data, start, length(data) + stop, step)
end
end
defp slice(data, start, stop, step) when step > 0 and start < stop do
{:ok, data |> Enum.slice(start..(stop - 1)) |> Enum.take_every(step)}
end
defp slice(data, start, stop, step) when step < 0 and stop == nil do
{:ok, data |> Enum.slice(0..start) |> Enum.reverse() |> Enum.take_every(-step)}
end
defp slice(data, start, stop, step) when step < 0 and start > stop do
{:ok, data |> Enum.slice((stop + 1)..start) |> Enum.reverse() |> Enum.take_every(-step)}
end
defp slice(_data, _start, _stop, _step) do
{:ok, []}
end
@spec project_slice(any, integer | nil, integer | nil, integer | nil) :: {:ok, any} | error
defp project_slice(data, start, stop, step) do
case slice(data, start, stop, step) do
{:ok, values} when is_list(values) -> {:ok, {:project, values}}
default -> default
end
end
@spec eval_list([ast], any, keyword) :: {:ok, list} | error
defp eval_list(exprs, data, opts) do
List.foldr(exprs, {:ok, []}, fn
expr, {:ok, list} ->
case eval(expr, data, opts) do
{:ok, value} -> {:ok, [value | list]}
err -> err
end
_expr, err ->
err
end)
end
end
|
lib/jmes.ex
| 0.837071 | 0.585783 |
jmes.ex
|
starcoder
|
defmodule Expletive do
@moduledoc """
A profanity detection and sanitization library.
"""
alias Expletive.Configuration, as: Configuration
alias Expletive.Replacement, as: Replacement
@type replacement :: :default | :garbled | :stars | :vowels | :nonconsonants | String.t | {:repeat, String.t} | :keep_first_letter | {:keep_first_letter, String.t}
@doc """
Returns a configuration to pass to other functions.
* `:blacklist` - A list of words which are considered profane (if a string is given, it will be split on whitespace to create the world list)
* `:whitelist` - A list of words which are allowed even if they're also present in the blacklist (if a string is given, it will be split on whitespace to create the world list)
* `:replacement` - A replacement strategy:
* `:garbled` - Replace by a random permutation of `$@!#%` (default)
* `:stars` - Replace all characters by `*`
* `:vowels` - Replace all vowels of the offending word by `*`
* `:nonconsonants` - Replace all non-consonants of the offending word by `*`
* `string` - Replace the occurence by the given string
* `{:repeat, string}` - Replace all characters by the given string
* `:keep_first_letter` - Replace all characters but the first one by `*`
* `{:keep_first_letter, string}` - Replace all characters but the first one by the given string
"""
@spec configure(list) :: Configuration.t
def configure(options) do
Configuration.new(options)
end
@doc """
Updates the configuration. Accepts the same options as `configure/1`
"""
@spec configure(Configuration.t, list) :: Configuration.t
def configure(config, options) do
Configuration.update(config, options)
end
@doc """
Returns `true` if the given string contains a word considered profane by the given configuration
"""
@spec profane?(String.t, Configuration.t) :: boolean
def profane?(string, config) do
config.regex |> Regex.match?(string)
end
@doc """
Returns a list of profanities found in the given string. All occurences are returned, duplicates may thus occur
"""
@spec profanities(String.t, Configuration.t) :: [String.t]
def profanities(string, config) do
config.regex
|> Regex.scan(string)
|> Enum.map(fn [match] -> match end)
end
@doc """
Replace all profanities by a placeholder as defined by the `replacement` option of the current configuration
"""
@spec sanitize(String.t, Configuration.t) :: String.t
def sanitize(string, config) do
config.regex |> Regex.replace(string, fn word -> Replacement.replace(word, config.replacement) end)
end
@doc """
Replace all profanities by a placeholder as defined by the given `replacement` strategy
"""
@spec sanitize(String.t, Configuration.t, replacement) :: String.t
def sanitize(string, config, replacement) do
sanitize(string, %{config | replacement: replacement})
end
end
|
lib/expletive.ex
| 0.877752 | 0.682574 |
expletive.ex
|
starcoder
|
defmodule Meeseeks.Selector do
@moduledoc """
Selector structs package some method of checking if a node matches some
condition with an optional `Meeseeks.Selector.Combinator`, an optional
list of filter selectors, and an optional method of validating the
Selector.
For instance, the css selector `ul > li` contains a selector `ul` and the
associated combinator `> li`.
In Meeseeks, this selector could be represented as:
```elixir
alias Meeseeks.Selector.Combinator
alias Meeseeks.Selector.Element
%Element{
selectors: [%Element.Tag{value: "ul"}],
combinator: %Combinator.ChildElements{
selector: %Element{selectors: [%Element.Tag{value: "li"}]}}}
```
Extending Meeseek's ability to query is as simple as defining a struct
with the Meeseeks.Selector behaviour, and selectors provide a simple
target to compile dsls to.
## Examples
```elixir
defmodule Selector.Text.Contains do
use Meeseeks.Selector
alias Meeseeks.Document
defstruct value: ""
def match(selector, %Document.Text{} = text, _document, _context) do
String.contains?(text.content, selector.value)
end
def match(_selector, _node, _document, _context) do
false
end
end
```
"""
alias Meeseeks.{Context, Document, Error, Selector}
@type t :: struct
@doc """
Invoked in order to check if the selector matches the node in the context
of the document. Can return a boolean or a tuple of a boolean and a
context.
"""
@callback match(
selector :: t,
node :: Document.node_t(),
document :: Document.t(),
context :: Context.t()
) :: boolean | {boolean, Context.t()}
@doc """
Invoked to return the selector's combinator, or `nil` if it does not have
one.
"""
@callback combinator(selector :: t) :: Selector.Combinator.t() | nil
@doc """
Invoked to return the selector's filter selectors, which may be an empty
list, or `nil` if it does not have any.
Filters are selectors that are applied to a list of any nodes that match
the selector before they are further walked with the selector's combinator
if it has one, or accumulated if it does not.
"""
@callback filters(selector :: t) :: [t] | nil
@doc """
Invoked to validate a selector, returning `{:ok, selector}` if the selector
is valid or `{:error, reason}` if it is not.
Selector validation can be useful in instances where a selector has been
built dynamically (parsed from a string, for instance).
See the `Meeseeks.Selector.Element.PseudoClass.*` selectors for examples.
Meeseek's selection process doesn't call `validate` anywhere, so there is
no selection-time cost for providing a validator.
"""
@callback validate(selector :: t) :: {:ok, t} | {:error, String.t()}
# match
@doc """
Checks if the selector matches the node in the context of the document. Can
return a boolean or a tuple of a boolean and a context.
"""
@spec match(t, Document.node_t(), Document.t(), Context.t()) :: boolean | {boolean, Context.t()}
def match(%{__struct__: struct} = selector, node, document, context) do
struct.match(selector, node, document, context)
end
# combinator
@doc """
Returns the selector's combinator, or `nil` if it does not have one.
"""
@spec combinator(t) :: Selector.Combinator.t() | nil
def combinator(%{__struct__: struct} = selector) do
struct.combinator(selector)
end
# filters
@doc """
Returns the selector's filter selectors, which may be an empty list, or
`nil` if it does not have any.
"""
@spec filters(t) :: [t] | nil
def filters(%{__struct__: struct} = selector) do
struct.filters(selector)
end
# validate
@doc """
Validates selector, returning `{:ok, selector}` if the selector is valid or
`{:error, %Meeseeks.Error{}}` if it is not.
"""
@spec validate(t) :: {:ok, t} | {:error, Error.t()}
def validate(%{__struct__: struct} = selector) do
struct.validate(selector)
end
# validate!
@doc """
Validates selector, returning the selector if it is valid or raising a
`Meeseeks.Error` if it is not.
"""
@spec validate!(t) :: t | no_return
def validate!(selector) do
case validate(selector) do
{:ok, selector} -> selector
{:error, %Error{} = error} -> raise error
end
end
# __using__
@doc false
defmacro __using__(_) do
quote do
@behaviour Selector
@impl Selector
def match(_, _, _, _), do: raise("match/4 not implemented")
@impl Selector
def combinator(_), do: nil
@impl Selector
def filters(_), do: nil
@impl Selector
def validate(selector), do: {:ok, selector}
defoverridable match: 4, combinator: 1, filters: 1, validate: 1
end
end
end
|
lib/meeseeks/selector.ex
| 0.919953 | 0.829975 |
selector.ex
|
starcoder
|
defmodule Pwned do
@moduledoc """
Check if your password has been pwned.
"""
alias Pwned.Utils.EmailFlattener
alias Pwned.Utils.EmailReducer
@doc """
It uses [have i been pwned?](https://haveibeenpwned.com) to verify if a password has appeared in a data breach. In order to protect the value of the source password being searched the value is not sended through the network.
## Examples
iex> Pwned.check_password("<PASSWORD>")
{:ok, 47205}
iex> Pwned.check_password("<PASSWORD>")
{:ok, false}
"""
@spec check_password(String.t()) :: {:ok, integer} | {:ok, false} | :error
def check_password(password) do
with {head, rest} <- hash(password),
{:ok, response} <- range_client().get(head),
{:ok, range} <- parse_password_response(response),
{:ok, answer} <- do_check(range, rest) do
{:ok, answer}
else
:error -> :error
end
end
@doc """
This uses API v3 of the [have i been pwned?](https://haveibeenpwned.com) to check
if an email has appeared in a data breach. It returns the total count of appearances.
It also requires a purchased hibp-api-key, and implements a 12-factor methodology by
accessing the hibp-api-key from the system's environment variables.
## Examples
iex> Pwned.check_email("<EMAIL>")
{:pwned_email, 4893554722}
iex Pwned.check_email("<EMAIL>")
{:safe_email, "email not pwned"}
"""
def check_email(email) do
with head <- email,
{:pwned_email, response} <- api_client().get(head),
{:ok, response} <- parse_email_response(response),
email_list <- EmailFlattener.flatten(response),
pwned_count <- EmailReducer.reduce_email_list(email_list) do
{:pwned_email, pwned_count}
else
{:safe_email, message} -> {:safe_email, message}
{:error, message} -> {:error, message}
_ -> :error
end
end
defp hash(password) do
:crypto.hash(:sha, password)
|> Base.encode16()
|> String.split_at(5)
end
defp parse_password_response(response) do
parsed_response =
response
|> String.split("\r\n")
|> Enum.map(fn line -> String.split(line, ":") end)
{:ok, parsed_response}
end
defp parse_email_response(response) do
parsed_response = Regex.scan(~r/"PwnCount":\d+/, response)
{:ok, parsed_response}
end
defp do_check(range, rest) do
case find_hash(range, rest) do
{:ok, false} -> {:ok, false}
{:ok, count} -> parse_count(count)
end
end
defp find_hash(range, hash) do
range
|> Enum.find(fn item -> List.first(item) == hash end)
|> handle_hash()
end
defp handle_hash(nil), do: {:ok, false}
defp handle_hash([_hash, count]), do: {:ok, count}
defp parse_count(count) do
Integer.parse(count)
|> handle_count()
end
defp handle_count(:error), do: :error
defp handle_count({count, _rest}), do: {:ok, count}
defp range_client, do: Application.get_env(:pwned_coretheory, :range_client, Pwned.Range.HTTPClient)
defp api_client, do: Application.get_env(:pwned_coretheory, :api_client, Pwned.Utils.APIClient)
end
|
lib/pwned.ex
| 0.719088 | 0.580322 |
pwned.ex
|
starcoder
|
defmodule Aoc2021.Day16 do
@moduledoc """
See https://adventofcode.com/2021/day/16
"""
alias Aoc2021.Day16.Packet
@type operator() :: :literal | :sum | :prod | :min | :max | :gt | :lt | :eq
defmodule Packet do
@moduledoc false
alias Aoc2021.Day16
defstruct [:version, :type, :value]
@type t() :: %__MODULE__{
version: non_neg_integer(),
type: Day16.operator(),
value: t()
}
@spec new(non_neg_integer(), :literal, non_neg_integer()) :: t()
@spec new(non_neg_integer(), Day16.operator(), [t()]) :: t()
def new(version, :literal, value) do
%__MODULE__{version: version, type: :literal, value: value}
end
def new(version, op, [_ | _] = contents) when op in [:sum, :prod, :min, :max] do
%__MODULE__{version: version, type: op, value: contents}
end
def new(version, op, [_, _] = contents) when op in [:gt, :lt, :eq] do
%__MODULE__{version: version, type: op, value: contents}
end
end
defmodule Parser do
@moduledoc false
alias Aoc2021.Day16.Packet
def parse_packet(<<v::3, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, rest::bitstring>>) do
# literal packet
{value, rest} = parse_value(rest)
{Packet.new(v, :literal, value), rest}
end
def parse_packet(<<v::3, t::3, 0::1, bits::15, rest::bitstring>>) do
<<contents::bitstring-size(bits), rest::bitstring>> = rest
ps = parse_until_empty(contents, [])
{Packet.new(v, parse_type(t), ps), rest}
end
def parse_packet(<<v::3, t::3, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, count::11, rest::bitstring>>) do
{ps, rest} = parse_until_count(rest, count, [])
{Packet.new(v, parse_type(t), ps), rest}
end
defp parse_value(rest), do: parse_value(rest, <<>>)
defp parse_value(<<0::1, v::bitstring-size(4), rest::bitstring>>, acc) do
bin = <<acc::bitstring, v::bitstring>>
len = bit_size(bin)
<<value::integer-size(len)>> = bin
{value, rest}
end
defp parse_value(<<1::1, v::bitstring-size(4), rest::bitstring>>, acc) do
parse_value(rest, <<acc::bitstring, v::bitstring>>)
end
defp parse_type(0), do: :sum
defp parse_type(1), do: :prod
defp parse_type(2), do: :min
defp parse_type(3), do: :max
defp parse_type(4), do: :literal
defp parse_type(5), do: :gt
defp parse_type(6), do: :lt
defp parse_type(7), do: :eq
defp parse_until_empty(<<>>, acc), do: Enum.reverse(acc)
defp parse_until_empty(rest, acc) do
{p, rest} = parse_packet(rest)
parse_until_empty(rest, [p | acc])
end
defp parse_until_count(rest, 0, acc), do: {Enum.reverse(acc), rest}
defp parse_until_count(rest, count, acc) do
{p, rest} = parse_packet(rest)
parse_until_count(rest, count - 1, [p | acc])
end
end
defmodule Evaluator do
@moduledoc false
alias Aoc2021.Day16.Packet
@spec evaluate(Packet.t()) :: non_neg_integer()
def evaluate(%Packet{type: t, value: v}) do
evaluate(t, v)
end
defp evaluate(:literal, v), do: v
defp evaluate(:sum, vv) do
vv
|> Enum.map(&evaluate/1)
|> Enum.sum()
end
defp evaluate(:prod, vv) do
vv
|> Enum.map(&evaluate/1)
|> Enum.product()
end
defp evaluate(:min, vv) do
vv
|> Enum.map(&evaluate/1)
|> Enum.min()
end
defp evaluate(:max, vv) do
vv
|> Enum.map(&evaluate/1)
|> Enum.max()
end
defp evaluate(:gt, [a, b]) do
if evaluate(a) > evaluate(b) do
1
else
0
end
end
defp evaluate(:lt, [a, b]) do
if evaluate(a) < evaluate(b) do
1
else
0
end
end
defp evaluate(:eq, [a, b]) do
if evaluate(a) == evaluate(b) do
1
else
0
end
end
end
@spec solve_part1() :: non_neg_integer()
@spec solve_part1(Path.t()) :: non_neg_integer()
def solve_part1(path \\ "priv/day16/input.txt") do
path
|> File.read!()
|> parse_and_sum()
end
@spec solve_part2() :: non_neg_integer()
@spec solve_part2(Path.t()) :: non_neg_integer()
def solve_part2(path \\ "priv/day16/input.txt") do
path
|> File.read!()
|> parse()
|> Evaluator.evaluate()
end
defp parse(string) do
{p, _} =
string
|> Base.decode16!()
|> Parser.parse_packet()
p
end
def parse_and_sum(string) do
string
|> parse()
|> sum_versions()
end
def sum_versions(%Packet{version: v, type: :literal}) do
v
end
def sum_versions(%Packet{version: v, value: packets}) do
v + Enum.reduce(packets, 0, fn p, acc -> acc + sum_versions(p) end)
end
end
|
lib/aoc2021/day16.ex
| 0.718298 | 0.555918 |
day16.ex
|
starcoder
|
defmodule Honeylixir do
@moduledoc """
Used to interact with honeycomb.io's API for tracing and other data.
## Installation
Adding Honeylixir to your mix.exs as a dependency should suffice for installation:
```
def deps() do
[
{:honeylixir, "~> 0.3.0"}
]
end
```
## Configuration
You can configure any of the following variables via Config:
|Name|Type|Description|Default|
|---|---|---|---|
|`:api_host`|`string`|API to send events to|https://api.honeycomb.io|
|`:sample_rate`|`integer`|Rate at which events will be sampled represented as a percented. e.g., use 10 to send 10% of events|1|
|`:team_writekey`|`string`|API key used to send events|`nil`|
|`:dataset`|`string`/`atom`|Dataset to send the events to|`nil`|
|`:service_name`|`string`/`atom`|Name of your service which will be added as a field on all events at the key `"service_name"`|`nil`|
|`:max_queue_size`|`integer`|How large the transmission queue can grow before events are dropped|`10_000`|
|`:batch_size`|`integer`|How many events to send per batch|`50`|
|`:batch_timing`|`integer`|Time in milliseconds to try sending events from the queue in a batch|`100`|
|`:max_send_processes`|`integer`|How many Processes to use for sending events in the background|`30`|
|`:max_response_queue_size`|`integer`|How many responses for event sending can be held in the queue. Once reached, responses are dropped|`100`|
It is required that you set `:team_writekey` and `:dataset` for events to be sent. Otherwise,
they will return non-200 responses from Honeycomb resulting in the events being dropped. An
example config may look like so (whether distillery or compile time Mix):
```
config :honeylixir,
dataset: :"my-company",
team_writekey: System.get_env("HONEYLIXIR_WRITEKEY"),
service_name: :my_application
```
Additionally, these are all defined on attributes so you can change them on a
per event basis if desired.
```
event = Honeylixir.Event.create()
event = %{event | api_host: "https://some-other-valid-host.com"}
```
## Usage
### Sending events
`Honeylixir` provides the ability to make a `Honeylixir.Event`, add fields to it, then send it off asynchronously.
```
Honeylixir.Event.create()
|> Honeylixir.Event.add_field("a_field", "my_value")
|> Honeylixir.Event.add_field("something-else", %{"nested" => "value"})
|> Honeylixir.Event.send()
```
Any value can be used but fields are **REQUIRED** to be strings. Non-string fields
will result in a no matching function clause error.
### Checking responses
By attaching metadata to your events, you can pull `Honeylixir.Response`s to see what happened
to your event. This data is stored in the `Honeylixir.ResponseQueue` which acts as a small
data store.
```
Honeylixir.Event.create() |> Honeylixir.Event.send()
# wait for event to async send off
resp = Honeylixir.ResponseQueue.pop()
```
"""
use Application
@doc false
def start(_type, _args) do
Supervisor.start_link(children(), strategy: :one_for_one)
end
defp children do
[
{Honeylixir.ResponseQueue,
%{
max_response_queue_size: Application.get_env(:honeylixir, :max_response_queue_size, 100)
}},
{Honeylixir.TransmissionQueue,
%{
max_queue_size: Application.get_env(:honeylixir, :max_queue_size, 10_000),
batch_size: Application.get_env(:honeylixir, :batch_size, 50),
batch_timing: Application.get_env(:honeylixir, :batch_timing, 100)
}},
{Task.Supervisor,
name: Honeylixir.TransmissionSupervisor,
max_children: Application.get_env(:honeylixir, :max_send_processes, 30)}
]
end
@doc """
Generates a random string of 16 bytes encoded in base 16.
"""
@spec generate_long_id() :: String.t()
def generate_long_id do
:crypto.strong_rand_bytes(16) |> Base.encode16(case: :lower)
end
@doc """
Generates a random string of 8 bytes encoded in base 16.
"""
@spec generate_short_id() :: String.t()
def generate_short_id do
:crypto.strong_rand_bytes(8) |> Base.encode16(case: :lower)
end
end
|
lib/honeylixir.ex
| 0.852537 | 0.944485 |
honeylixir.ex
|
starcoder
|
defmodule Hangman.Shard.Handler do
@moduledoc """
Module runs game play for the given shard of secrets
as determined by `Shard.Flow`. Basically, runs a chunk
of the overall original secrets vector.
Module drives `Player.Controller`, while
setting up the proper `Game` server and `Event` consumer states beforehand.
Simply stated it politely nudges the player to proceed to the next
course of action or make the next guess.
When the game is finished it politely ends the game playing returning the
shard_key and game snapshot tuple.
The twist to this is that these shard handlers are run
in parallel and concurrently thanks to the concurrent map reduce
setup of `Flow`
"""
alias Hangman.{Game, Player, Handler.Accumulator}
import Accumulator
require Logger
@sleep 3000
@doc """
Sets up the `game` server and per player `event` server.
Used primarly by the collation logic in Flow
"""
@spec setup({Player.id(), list[String.t()]}) :: Player.id()
def setup({id, secrets})
when (is_binary(id) or is_tuple(id)) and is_list(secrets) do
# Grab game pid first from game server controller
game_pid = Game.Server.Controller.get_server(id, secrets)
# Start Worker in Controller
Player.Controller.start_worker(id, :robot, false, game_pid)
id
end
@doc """
Play handles client play loop for particular player shard_key
"""
@spec play(Player.id()) :: {Player.id(), list(String.t())}
def play(shard_key) do
# Compose game status accumulator until we have received
# an :exit value from the Player Controller
list =
repeatedly do
case Player.Controller.proceed(shard_key) do
{code, _status} when code in [:begin, :transit] ->
:ok
{:retry, _status} ->
# Stop gap for gproc no proc error
Process.sleep(@sleep)
{:action, status} ->
# collect action guess result
next(status)
{:exit, status} ->
# Stop both the player client worker and the corresponding game server
Player.Controller.stop_worker(shard_key)
Game.Server.Controller.stop_server(shard_key)
# signal end of accumulator and capture last status result
done(status)
_ ->
raise "Unknown Player state"
end
end
{shard_key, list}
end
end
|
lib/hangman/shard_handler.ex
| 0.759671 | 0.59075 |
shard_handler.ex
|
starcoder
|
defmodule Workflows.Retrier do
@moduledoc """
Implements a state retrier.
## References
* https://states-language.net/#errors
"""
alias Workflows.Error
@type t :: %__MODULE__{
error_equals: String.t(),
interval_seconds: pos_integer(),
max_attempts: non_neg_integer(),
backoff_rate: float()
}
defstruct [:error_equals, :interval_seconds, :max_attempts, :backoff_rate]
@default_interval_seconds 1
@default_max_attempts 3
@default_backoff_rate 2.0
@doc """
Create a new Retrier.
"""
@spec create(any()) :: {:ok, t()} | {:error, term()}
def create(%{"ErrorEquals" => errors} = attrs) do
interval_seconds = Map.get(attrs, "IntervalSeconds", @default_interval_seconds)
max_attempts = Map.get(attrs, "MaxAttempts", @default_max_attempts)
backoff_rate = Map.get(attrs, "BackoffRate", @default_backoff_rate)
do_create(errors, interval_seconds, max_attempts, backoff_rate)
end
def create(_attrs) do
{:error, :missing_error_equals}
end
@spec matches?(t(), Error.t()) :: boolean()
def matches?(retrier, error) do
Enum.any?(retrier.error_equals, fn ee -> ee == "States.ALL" || ee == error.name end)
end
@spec wait_seconds(t(), pos_integer()) :: float()
def wait_seconds(retrier, retry_count) do
retrier.interval_seconds + retry_count * retrier.backoff_rate
end
## Private
defp do_create([], _interval_seconds, _max_attempts, _backoff_rate),
do: {:error, :empty_errors}
defp do_create(_errors, interval_seconds, _max_attempts, _backoff_rate)
when not is_integer(interval_seconds)
when interval_seconds <= 0,
do: {:error, :invalid_interval_seconds}
defp do_create(_errors, _interval_seconds, max_attempts, _backoff_rate)
when not is_integer(max_attempts)
when max_attempts < 0,
do: {:error, :invalid_max_attempts}
defp do_create(_errors, _interval_seconds, _max_attempts, backoff_rate)
when backoff_rate < 1.0,
do: {:error, :invalid_backoff_rate}
defp do_create(errors, interval_seconds, max_attempts, backoff_rate) do
retrier = %__MODULE__{
error_equals: errors,
interval_seconds: interval_seconds,
max_attempts: max_attempts,
backoff_rate: backoff_rate
}
{:ok, retrier}
end
end
|
lib/workflows/retrier.ex
| 0.876905 | 0.470737 |
retrier.ex
|
starcoder
|
defmodule GoCD.Crypt do
@moduledoc false
require Logger
@doc ~S"""
Decrypt a GoCD secure variable.
Currently supports:
- AES
- DES
Note: DES has been replaced with AES in version 17 and will be deprecated in 18.
"""
@spec decrypt(String.t(), map) :: {:ok, String.t()} | {:error, atom}
def decrypt("AES:" <> iv_and_data, ciphers), do: aes_decrypt(iv_and_data, ciphers.aes)
def decrypt(des, ciphers), do: des_decrypt(des, ciphers.des)
@doc ~S"""
Encrypt GoCD encrypted variable.
"""
@spec encrypt(String.t(), map) :: {:ok, String.t()} | {:error, atom}
def encrypt(value, ciphers) do
cond do
_aes = ciphers.aes ->
{:ok, value}
_des = ciphers.des ->
Logger.warn(fn ->
"GoCD: Encrypting with DEPRECATED `des` cipher. Please upgrade to `aes`."
end)
{:ok, value}
:no_ciphers ->
{:error, :no_ciphers_given}
end
end
@spec aes_decrypt(String.t(), binary | nil) :: {:ok, String.t()} | {:error, atom}
defp aes_decrypt(_data, nil), do: {:error, :missing_aes_cipher}
defp aes_decrypt(data, cipher) do
with [iv, data_base64] <- String.split(data, ":", trim: true),
{:ok, iv} <- Base.decode64(iv),
{:ok, data_bin} <- Base.decode64(data_base64),
decrypted <- :crypto.block_decrypt(:aes_cbc128, cipher, iv, data_bin),
padding <- :binary.last(decrypted) do
{:ok, :binary.part(decrypted, 0, byte_size(decrypted) - padding)}
else
error = {:error, _} -> error
:error -> {:error, :invalid_aes_encoding}
_ -> {:error, :invalid_aes_secret}
end
end
@spec des_decrypt(String.t(), binary | nil) :: {:ok, String.t()} | {:error, atom}
defp des_decrypt(_data, nil), do: {:error, :missing_des_cipher}
defp des_decrypt(data, cipher) do
with {:ok, bin} <- Base.decode64(data),
decrypted <- :crypto.block_decrypt(:des_cbc, cipher, <<0, 0, 0, 0, 0, 0, 0, 0>>, bin),
padding <- :binary.last(decrypted) do
{:ok, :binary.part(decrypted, 0, byte_size(decrypted) - padding)}
end
end
end
|
lib/gocd/crypt.ex
| 0.84039 | 0.443118 |
crypt.ex
|
starcoder
|
defmodule GrowthBook.FeatureResult do
@moduledoc """
Struct holding results of an evaluated Feature.
Holds the result of a feature evaluation, and is used to check if a feature is enabled, and
optionally what data it provides, if that was configured.
"""
@typedoc """
Feature result
The result of evaluating a `GrowthBook.Feature`. Has a number of keys:
- **`value`** (`term()`) - The assigned value of the feature
- **`on`**/**`on?`** (`boolean()`) - The assigned value cast to a boolean
- **`off`**/**`off?`** (`boolean()`) - The assigned value cast to a boolean and then negated
- **`source`** (`t:source/0`) - The source of the feature result
- **`experiment`** (`t:GrowthBook.Experiment.t/0` or `nil`) - When source is `:experiment`, this
will be an `%GrowthBook.Experiment{}` struct
- **`experiment_result`** (`t:GrowthBook.ExperimentResult.t/0` or `nil`) - When source is
`:experiment`, this will be an `%GrowthBook.ExperimentResult{}` struct
"""
@type t() :: %__MODULE__{
value: term(),
source: source(),
on: boolean(),
on?: boolean(),
off: boolean(),
off?: boolean(),
experiment: GrowthBook.Experiment.t() | nil,
experiment_result: GrowthBook.ExperimentResult.t() | nil
}
@typedoc "The source of a feature"
@type source() :: :unknown_feature | :default_value | :force | :experiment
@enforce_keys [:value, :source, :on, :off, :on?, :off?]
defstruct value: nil,
on: nil,
on?: nil,
off: nil,
off?: nil,
source: :unknown_feature,
experiment: nil,
experiment_result: nil
@doc "Helper function to convert string sources to atoms"
@spec feature_source_from_string(String.t()) :: source()
def feature_source_from_string("defaultValue"), do: :default_value
def feature_source_from_string("force"), do: :force
def feature_source_from_string("experiment"), do: :experiment
def feature_source_from_string(_unknown), do: :unknown_feature
end
|
lib/growth_book/feature_result.ex
| 0.900776 | 0.731155 |
feature_result.ex
|
starcoder
|
defmodule Annex.DataAssertion do
@moduledoc """
A helper module for making assertions about the returns of a Data
type's callbacks.
"""
use ExUnit.CaseTemplate
alias Annex.{
Data,
Shape
}
require Shape
def cast(type, data, shape) when Shape.is_shape(shape) do
product = Shape.product(shape)
flat_data =
if is_type?(type, data) do
to_flat_list(type, data)
else
Enum.into(data, [])
end
n_elements = length(flat_data)
assert product == n_elements, """
The number of elements in a flattened data structure must be the same as the product of the
elements of a shape.
product: #{inspect(product)}
n_elements: #{inspect(n_elements)}
type: #{inspect(type)}
data: #{inspect(data)}
shape: #{inspect(shape)}
"""
casted = Data.cast(type, data, shape)
assert Data.is_type?(type, casted) == true, """
Data.cast/3 failed to produce the expected type.
invalid_result: #{inspect(casted)}
type: #{inspect(type)}
data: #{inspect(data)}
shape: #{inspect(shape)}
"""
casted
end
def to_flat_list(type, data) do
flattened = Data.to_flat_list(type, data)
assert is_list(flattened) == true
assert Enum.all?(flattened, &is_float/1), """
Data.to_flat_list/2 failed to produce a flat list of floats.
invalid_result: #{inspect(flattened)}
type: #{inspect(type)}
data: #{inspect(data)}
"""
flattened
end
def is_type?(type, data) do
result = Data.is_type?(type, data)
assert result in [true, false], """
Annex.Data.is_type?/2 failed to return a boolean.
invalid_result: #{inspect(result)}
type: #{inspect(type)}
data: #{inspect(data)}
"""
result
end
def shape(type, data) do
result = Data.shape(type, data)
assert Shape.is_shape?(result) == true, """
Annex.Data.shape/2 failed to return a valid shape.
invalid_result: #{inspect(result)}
type: #{inspect(type)}
data: #{inspect(data)}
"""
result
end
def shape_is_valid(type, data) do
shape = Data.shape(type, data)
assert Shape.is_shape?(shape), """
For Annex.Data.shape/2 failed to produce a valid shape.
A shape must be a non-empty tuple of integer | :any
invalid_shape: #{inspect(shape)}
type: #{inspect(type)}
data: #{inspect(data)}
"""
shape_is_all_integers(shape)
end
def shape_is_all_integers(shape) do
all_ints? = Enum.all?(shape, &is_integer/1)
assert all_ints? == true, """
Annex.Data.shape/2 failed to produce a valid shape.
Data shape must be concrete; a list of integers only.
invalid_shape: #{inspect(shape)}
"""
all_ints?
end
def shape_product(shape) when Shape.is_shape(shape) do
assert shape_is_all_integers(shape) == true
product = Shape.product(shape)
assert is_integer(product) == true, """
Shape.product/1 failed to produce an integer.
invalid_result: #{inspect(product)}
shape: #{inspect(shape)}
"""
product
end
end
|
test/support/data_assertion.ex
| 0.852414 | 0.842151 |
data_assertion.ex
|
starcoder
|
defmodule Benchee.Statistics do
@moduledoc """
Statistics related functionality that is meant to take the raw benchmark data
and then compute statistics like the average and the standard deviation etc.
See `statistics/1` for a breakdown of the included statistics.
"""
alias Benchee.{CollectionData, Conversion.Duration, Scenario, Suite, Utility.Parallel}
require Integer
defstruct [
:average,
:ips,
:std_dev,
:std_dev_ratio,
:std_dev_ips,
:median,
:percentiles,
:mode,
:minimum,
:maximum,
:relative_more,
:relative_less,
:absolute_difference,
sample_size: 0
]
@typedoc """
Careful with the mode, might be multiple values, one value or nothing.π±
"""
@type mode :: [number] | number | nil
@typedoc """
All the statistics `statistics/1` computes from the samples.
Overview of all the statistics Benchee currently provides:
* average - average run time of the job in ΞΌs (the lower the better)
* ips - iterations per second, how often can the given function be
executed within one second (the higher the better)
* std_dev - standard deviation, a measurement how much results vary
(the higher the more the results vary)
* std_dev_ratio - standard deviation expressed as how much it is relative to
the average
* std_dev_ips - the absolute standard deviation of iterations per second
(= ips * std_dev_ratio)
* median - when all measured times are sorted, this is the middle
value (or average of the two middle values when the number of times is
even). More stable than the average and somewhat more likely to be a
typical value you see.
* percentiles - a map of percentile ranks. These are the values below
which x% of the run times lie. For example, 99% of run times are shorter
than the 99th percentile (99th %) rank.
is a value for which 99% of the run times are shorter.
* mode - the run time(s) that occur the most. Often one value, but
can be multiple values if they occur the same amount of times. If no value
occurs at least twice, this value will be nil.
* minimum - the smallest sample measured for the scenario
* maximum - the biggest sample measured for the scenario
* relative_more - relative to the reference (usually the fastest scenario) how much more
was the average of this scenario. E.g. for reference at 100, this scenario 200 then it
is 2.0.
* relative_less - relative to the reference (usually the fastest scenario) how much less
was the average of this scenario. E.g. for reference at 100, this scenario 200 then it
is 0.5.
* absolute_difference - relative to the reference (usually the fastest scenario) what is
the difference of the averages of the scenarios. e.g. for reference at 100, this
scenario 200 then it is 100.
* sample_size - the number of run time measurements taken
"""
@type t :: %__MODULE__{
average: float,
ips: float | nil,
std_dev: float,
std_dev_ratio: float,
std_dev_ips: float | nil,
median: number,
percentiles: %{number => float},
mode: mode,
minimum: number,
maximum: number,
relative_more: float | nil | :infinity,
relative_less: float | nil | :infinity,
absolute_difference: float | nil,
sample_size: integer
}
@typedoc """
The samples a `Benchee.Collect` collected to compute statistics from.
"""
@type samples :: [number]
@doc """
Takes a suite with scenarios and their data samples, adds the statistics to the
scenarios. For an overview of what the statistics mean see `t:t/0`.
Note that this will also sort the scenarios fastest to slowest to ensure a consistent order
of scenarios in all used formatters.
## Examples
iex> scenarios = [
...> %Benchee.Scenario{
...> job_name: "My Job",
...> run_time_data: %Benchee.CollectionData{
...> samples: [200, 400, 400, 400, 500, 500, 500, 700, 900]
...> },
...> memory_usage_data: %Benchee.CollectionData{
...> samples: [200, 400, 400, 400, 500, 500, 500, 700, 900]
...> },
...> input_name: "Input",
...> input: "Input"
...> }
...> ]
iex> suite = %Benchee.Suite{scenarios: scenarios}
iex> Benchee.Statistics.statistics(suite)
%Benchee.Suite{
scenarios: [
%Benchee.Scenario{
job_name: "My Job",
input_name: "Input",
input: "Input",
run_time_data: %Benchee.CollectionData{
samples: [200, 400, 400, 400, 500, 500, 500, 700, 900],
statistics: %Benchee.Statistics{
average: 500.0,
ips: 2000_000.0,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: 800_000.0,
median: 500.0,
percentiles: %{50 => 500.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
},
memory_usage_data: %Benchee.CollectionData{
samples: [200, 400, 400, 400, 500, 500, 500, 700, 900],
statistics: %Benchee.Statistics{
average: 500.0,
ips: nil,
std_dev: 200.0,
std_dev_ratio: 0.4,
std_dev_ips: nil,
median: 500.0,
percentiles: %{50 => 500.0, 99 => 900.0},
mode: [500, 400],
minimum: 200,
maximum: 900,
sample_size: 9
}
}
}
],
system: nil
}
"""
@spec statistics(Suite.t()) :: Suite.t()
def statistics(suite) do
percentiles = suite.configuration.percentiles
update_in(suite.scenarios, fn scenarios ->
Parallel.map(scenarios, fn scenario ->
calculate_scenario_statistics(scenario, percentiles)
end)
end)
end
defp calculate_scenario_statistics(scenario, percentiles) do
run_time_stats =
scenario.run_time_data.samples
|> calculate_statistics(percentiles)
|> add_ips
memory_stats = calculate_statistics(scenario.memory_usage_data.samples, percentiles)
reductions_stats = calculate_statistics(scenario.reductions_data.samples, percentiles)
%Scenario{
scenario
| run_time_data: %CollectionData{
scenario.run_time_data
| statistics: run_time_stats
},
memory_usage_data: %CollectionData{
scenario.memory_usage_data
| statistics: memory_stats
},
reductions_data: %CollectionData{
scenario.reductions_data
| statistics: reductions_stats
}
}
end
defp calculate_statistics([], _) do
%__MODULE__{
sample_size: 0
}
end
defp calculate_statistics(samples, percentiles) do
samples
|> Statistex.statistics(percentiles: percentiles)
|> convert_from_statistex
end
defp convert_from_statistex(statistex_statistics) do
%__MODULE__{
average: statistex_statistics.average,
std_dev: statistex_statistics.standard_deviation,
std_dev_ratio: statistex_statistics.standard_deviation_ratio,
median: statistex_statistics.median,
percentiles: statistex_statistics.percentiles,
mode: statistex_statistics.mode,
minimum: statistex_statistics.minimum,
maximum: statistex_statistics.maximum,
sample_size: statistex_statistics.sample_size
}
end
defp add_ips(statistics = %__MODULE__{sample_size: 0}), do: statistics
defp add_ips(statistics = %__MODULE__{average: 0.0}), do: statistics
defp add_ips(statistics) do
ips = Duration.convert_value({1, :second}, :nanosecond) / statistics.average
standard_dev_ips = ips * statistics.std_dev_ratio
%__MODULE__{
statistics
| ips: ips,
std_dev_ips: standard_dev_ips
}
end
end
|
lib/benchee/statistics.ex
| 0.933423 | 0.86916 |
statistics.ex
|
starcoder
|
defmodule Miss.Kernel do
@moduledoc """
Functions to extend the Elixir `Kernel` module.
"""
@doc """
Returns `true` if `term` is a charlist. Otherwise returns `false`.
A charlist is a list made of non-negative integers, where each integer represents a Unicode code
point. These integers must be:
- within the range `0..0x10FFFF` (`0..1_114_111`);
- out of the range `0xD800..0xDFFF` (`55_296..57_343`), which is reserved in Unicode for UTF-16
surrogate pairs.
Elixir uses single quotes to define charlists:
'cΓ³raΓ§Γ£o dΓͺ mΓ©lΓ£o'
[99, 243, 114, 97, 231, 227, 111, 32, 100, 234, 32, 109, 233, 108, 227, 111]
Check the [Elixir Charlists documentation](https://hexdocs.pm/elixir/List.html#module-charlists)
for more details.
Note that `Miss.Kernel.charlist?/1` CANNOT be used as a guard.
## Examples
iex> Miss.Kernel.charlist?('prodis')
true
iex> Miss.Kernel.charlist?([112, 114, 111, 100, 105, 115])
true
iex> Miss.Kernel.charlist?([112, 114, 111, 100, 105, 115, 55_296])
false
iex> Miss.Kernel.charlist?("prodis")
false
iex> Miss.Kernel.charlist?(:prodis)
false
iex> Miss.Kernel.charlist?(true)
false
iex> Miss.Kernel.charlist?(123)
false
iex> Miss.Kernel.charlist?(123.45)
false
"""
@spec charlist?(term()) :: boolean()
def charlist?(term) when is_list(term) do
Enum.all?(term, fn item ->
item in 0..55_295 or item in 57_344..1_114_111
end)
end
def charlist?(_term), do: false
@doc """
Performs an integer division and computes the remainder.
`Miss.Kernel.div_rem/2` uses truncated division, which means:
- the result of the division is always rounded towards zero;
- the remainder will always have the sign of the `dividend`.
Raises an `ArithmeticError` if one of the arguments is not an integer, or when the `divisor` is
`0`.
## Examples
iex> Miss.Kernel.div_rem(5, 2)
{2, 1}
iex> Miss.Kernel.div_rem(6, -4)
{-1, 2}
iex> Miss.Kernel.div_rem(-99, 2)
{-49, -1}
iex> Miss.Kernel.div_rem(10, 5)
{2, 0}
iex> Miss.Kernel.div_rem(0, 2)
{0, 0}
iex> Miss.Kernel.div_rem(5, 0)
** (ArithmeticError) bad argument in arithmetic expression
iex> Miss.Kernel.div_rem(10.0, 2)
** (ArithmeticError) bad argument in arithmetic expression
iex> Miss.Kernel.div_rem(10, 2.0)
** (ArithmeticError) bad argument in arithmetic expression
"""
@spec div_rem(integer(), neg_integer() | pos_integer()) :: {integer(), integer()}
def div_rem(dividend, divisor), do: {div(dividend, divisor), rem(dividend, divisor)}
@doc """
Creates and updates a struct in the same way of `Kernel.struct/2`, but receiving the parameters
in the inverse order, first the `fields` and second the `struct`. Useful when building the
fields using the pipe operator `|>`.
In the following example, a hypothetical function `build/2` builds a `Map` to create a
`MyStruct` struct.
Using `Kernel.struct/2` is necessary to assign the map to a variable before creating the struct:
def build(param1, param2) do
fields =
%{
key1: param1.one,
key2: param1.two,
key3: :a_default_value
}
|> Map.merge(build_more_fields(param2))
struct(MyStruct, fields)
end
Using `Miss.Kernel.struct_inverse/2` the map can be piped when creating the struct:
def build(param1, param2) do
%{
key1: param1.one,
key2: param1.two,
key3: :a_default_value
}
|> Map.merge(build_more_fields(param2))
|> Miss.Kernel.struct_inverse(MyStruct)
end
## Examples
defmodule User do
defstruct name: "User"
end
# Using a map
iex> Miss.Kernel.struct_inverse(%{name: "Akira"}, User)
%User{name: "Akira"}
# Using keywords
iex> Miss.Kernel.struct_inverse([name: "Akira"], User)
%User{name: "Akira"}
# Updating an existing struct
iex> user = %User{name: "Other"}
...> Miss.Kernel.struct_inverse(%{name: "Akira"}, user)
%User{name: "Akira"}
# Known keys are used and unknown keys are ignored
iex> Miss.Kernel.struct_inverse(%{name: "Akira", last_name: "Hamasaki"}, User)
%User{name: "Akira"}
# Unknown keys are ignored
iex> Miss.Kernel.struct_inverse(%{last_name: "Hamasaki"}, User)
%User{name: "User"}
# String keys are ignored
iex> Miss.Kernel.struct_inverse(%{"name" => "Akira"}, User)
%User{name: "User"}
# Using empty fields
iex> Miss.Kernel.struct_inverse(%{}, User)
%User{name: "User"}
"""
@spec struct_inverse(Enum.t(), module() | struct()) :: struct()
def struct_inverse(fields, struct), do: struct(struct, fields)
@doc """
Similar to `Miss.Kernel.struct_inverse/2` but checks for key validity emulating the compile time
behaviour of structs.
## Examples
defmodule User do
defstruct name: "User"
end
# Using a map
iex> Miss.Kernel.struct_inverse!(%{name: "Akira"}, User)
%User{name: "Akira"}
# Using keywords
iex> Miss.Kernel.struct_inverse!([name: "Akira"], User)
%User{name: "Akira"}
# Updating an existing struct
iex> user = %User{name: "Other"}
...> Miss.Kernel.struct_inverse!(%{name: "Akira"}, user)
%User{name: "Akira"}
# Unknown keys raises KeyError
iex> Miss.Kernel.struct_inverse!(%{name: "Akira", last_name: "Hamasaki"}, User)
** (KeyError) key :last_name not found in: %Miss.KernelTest.User{name: "User"}
# String keys raises KeyError
iex> Miss.Kernel.struct_inverse!(%{"name" => "Akira"}, User)
** (KeyError) key "name" not found in: %Miss.KernelTest.User{name: "User"}
# Using empty fields
iex> Miss.Kernel.struct_inverse!(%{}, User)
%User{name: "User"}
"""
@spec struct_inverse!(Enum.t(), module() | struct()) :: struct()
def struct_inverse!(fields, struct), do: struct!(struct, fields)
@doc """
Creates a list of structs similar to `Kernel.struct/2`.
In the same way that `Kernel.struct/2`, the `struct` argument may be an atom (which defines
`defstruct`) or a `struct` itself.
The second argument is a list of any `Enumerable` that emits two-element tuples (key-value
pairs) during enumeration.
Keys in the `Enumerable` that do not exist in the struct are automatically discarded. Note that
keys must be atoms, as only atoms are allowed when defining a struct. If keys in the
`Enumerable` are duplicated, the last entry will be taken (the same behaviour as `Map.new/1`).
This function is useful for dynamically creating a list of structs, as well as for converting a
list of maps to a list of structs.
## Examples
defmodule User do
defstruct name: "User"
end
# Using a list of maps
iex> Miss.Kernel.struct_list(User, [
...> %{name: "Akira"},
...> %{name: "Fernando"}
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Using a list of keywords
iex> Miss.Kernel.struct_list(User, [
...> [name: "Akira"],
...> [name: "Fernando"]
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Using an existing struct
iex> user = %User{name: "Other"}
...> Miss.Kernel.struct_list(user, [
...> %{name: "Akira"},
...> %{name: "Fernando"}
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Known keys are used and unknown keys are ignored
iex> Miss.Kernel.struct_list(User, [
...> %{name: "Akira", last_name: "Hamasaki"},
...> %{name: "Fernando", last_name: "Hamasaki"}
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Unknown keys are ignored
iex> Miss.Kernel.struct_list(User, [
...> %{first_name: "Akira"},
...> %{last_name: "Hamasaki"}
...> ])
[
%User{name: "User"},
%User{name: "User"}
]
# String keys are ignored
iex> Miss.Kernel.struct_list(User, [
...> %{"name" => "Akira"},
...> %{"name" => "Fernando"}
...> ])
[
%User{name: "User"},
%User{name: "User"}
]
"""
@spec struct_list(module() | struct(), [Enum.t()]) :: [struct()]
def struct_list(struct, list), do: Enum.map(list, &struct(struct, &1))
@doc """
Creates a list of structs similar to `Miss.Kernel.struct_list/2`, but checks for key
validity emulating the compile time behaviour of structs.
## Examples
defmodule User do
defstruct name: "User"
end
# Using a list of maps
iex> Miss.Kernel.struct_list!(User, [
...> %{name: "Akira"},
...> %{name: "Fernando"}
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Using a list of keywords
iex> Miss.Kernel.struct_list!(User, [
...> [name: "Akira"],
...> [name: "Fernando"]
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Using an existing struct
iex> user = %User{name: "Other"}
...> Miss.Kernel.struct_list!(user, [
...> %{name: "Akira"},
...> %{name: "Fernando"}
...> ])
[
%User{name: "Akira"},
%User{name: "Fernando"}
]
# Unknown keys raises KeyError
iex> Miss.Kernel.struct_list!(User, [
...> %{name: "Akira", last_name: "Hamasaki"},
...> %{name: "Fernando", last_name: "Hamasaki"}
...> ])
** (KeyError) key :last_name not found in: %Miss.KernelTest.User{name: "User"}
# String keys raises KeyError
iex> Miss.Kernel.struct_list!(User, [
...> %{"name" => "Akira"},
...> %{"name" => "Fernando"}
...> ])
** (KeyError) key "name" not found in: %Miss.KernelTest.User{name: "User"}
"""
@spec struct_list!(module() | struct(), [Enum.t()]) :: [struct()]
def struct_list!(struct, list), do: Enum.map(list, &struct!(struct, &1))
end
|
lib/miss/kernel.ex
| 0.91976 | 0.451871 |
kernel.ex
|
starcoder
|
defmodule AdventOfCode.Day04 do
import AdventOfCode.Utils
@type board :: [[integer]]
@spec part1([binary()]) :: integer()
def part1(args) do
{sequence, boards} = parse_args(args)
{winning_board, numbers} = find_winning_board([], sequence, boards)
(winning_board |> unmarked_numbers(numbers) |> Enum.sum()) * hd(numbers)
end
# Find the winning 5x5 bingo board for a given sequence of numbers
@spec find_winning_board([integer], [integer], [board]) :: {board, [integer]}
defp find_winning_board(numbers, sequence, boards) do
case boards |> Enum.find(&is_winning_board?(&1, numbers)) do
nil ->
find_winning_board([hd(sequence) | numbers], tl(sequence), boards)
board ->
{board, numbers}
end
end
@spec part2([binary()]) :: integer()
def part2(args) do
{sequence, boards} = parse_args(args)
{losing_board, numbers} = find_losing_board([], sequence, boards)
(losing_board |> unmarked_numbers(numbers) |> Enum.sum()) * hd(numbers)
end
# Find the least-likely-to-win 5x5 bingo board for a given sequence of numbers
@spec find_losing_board([integer], [integer], [board]) :: {board, [integer]}
defp find_losing_board(numbers, sequence, boards) do
case Enum.filter(boards, &(!is_winning_board?(&1, numbers))) do
[] ->
{hd(boards), numbers}
filtered_boards ->
find_losing_board([hd(sequence) | numbers], tl(sequence), filtered_boards)
end
end
@spec is_winning_board?(board, [integer]) :: boolean
defp is_winning_board?(board, numbers) do
rows = board |> Enum.chunk_every(5)
columns = rows |> Enum.zip() |> Enum.map(&Tuple.to_list/1)
Enum.any?(rows ++ columns, fn row_or_col ->
Enum.all?(row_or_col, &Enum.member?(numbers, &1))
end)
end
@spec unmarked_numbers(board, [integer]) :: [integer]
defp unmarked_numbers(board, numbers) do
Enum.filter(board, &(!Enum.member?(numbers, &1)))
end
@spec parse_args([binary()]) :: {[integer()], [board()]}
defp parse_args(args) do
[[raw_sequence] | raw_boards] =
Enum.map(args, &String.trim/1)
|> Enum.chunk_by(&(&1 == ""))
|> Enum.filter(&(&1 !== [""]))
sequence = String.split(raw_sequence, ",") |> Enum.map(&parse_int!/1)
boards = raw_boards |> Enum.map(&parse_board_numbers/1)
{sequence, boards}
end
@spec parse_board_numbers([String.t()]) :: [integer]
defp parse_board_numbers(rows),
do: Enum.join(rows, " ") |> String.split() |> Enum.map(&parse_int!/1)
end
|
lib/advent_of_code/day_04.ex
| 0.775817 | 0.46041 |
day_04.ex
|
starcoder
|
defmodule Sanbase.Clickhouse.MetricAdapter.HistogramMetric do
import Sanbase.DateTimeUtils, only: [str_to_sec: 1]
import Sanbase.Clickhouse.MetricAdapter.HistogramSqlQuery
alias Sanbase.Metric
alias Sanbase.ClickhouseRepo
@spent_coins_cost_histograms ["price_histogram", "spent_coins_cost", "all_spent_coins_cost"]
@eth2_string_label_float_value_metrics [
"eth2_staked_amount_per_label",
"eth2_staked_address_count_per_label",
"eth2_unlabeled_staker_inflow_sources"
]
@eth2_string_address_string_label_float_value_metrics [
"eth2_top_stakers"
]
@spec histogram_data(String.t(), map(), DateTime.t(), DateTime.t(), String.t(), number()) ::
{:ok, list(map())} | {:error, String.t()}
def histogram_data(metric, selector, from, to, interval, limit)
def histogram_data("age_distribution" = metric, %{slug: slug}, from, to, interval, limit) do
{query, args} = histogram_data_query(metric, slug, from, to, interval, limit)
ClickhouseRepo.query_transform(query, args, fn [unix, value] ->
range_from = unix |> DateTime.from_unix!()
range_to =
[range_from |> Timex.shift(seconds: str_to_sec(interval)), to]
|> Enum.min_by(&DateTime.to_unix/1)
%{
range: [range_from, range_to],
value: value
}
end)
end
def histogram_data(metric, %{slug: slug}, from, to, interval, limit)
when metric in @spent_coins_cost_histograms do
{query, args} = histogram_data_query(metric, slug, from, to, interval, limit)
ClickhouseRepo.query_transform(query, args, fn [price, amount] ->
%{
price: Sanbase.Math.to_float(price),
value: Sanbase.Math.to_float(amount)
}
end)
|> maybe_transform_into_buckets(slug, from, to, limit)
end
def histogram_data(
metric,
%{slug: "ethereum" = slug},
from,
to,
interval,
limit
)
when metric in @eth2_string_label_float_value_metrics do
{query, args} = histogram_data_query(metric, slug, from, to, interval, limit)
ClickhouseRepo.query_transform(query, args, fn [label, amount] ->
%{
label: label,
value: Sanbase.Math.to_float(amount)
}
end)
end
def histogram_data(
metric,
%{slug: "ethereum" = slug},
from,
to,
interval,
limit
)
when metric in @eth2_string_address_string_label_float_value_metrics do
{query, args} = histogram_data_query(metric, slug, from, to, interval, limit)
ClickhouseRepo.query_transform(query, args, fn [address, label, amount] ->
%{
address: address,
label: label,
value: Sanbase.Math.to_float(amount)
}
end)
end
def first_datetime(metric, selector, opts \\ [])
def first_datetime(metric, %{slug: slug}, opts)
when metric in @spent_coins_cost_histograms do
with {:ok, dt1} <- Metric.first_datetime("price_usd", %{slug: slug}, opts),
{:ok, dt2} <- Metric.first_datetime("age_distribution", %{slug: slug}, opts) do
{:ok, Enum.max([dt1, dt2], DateTime)}
end
end
def last_datetime_computed_at(metric, selector, opts \\ [])
def last_datetime_computed_at(metric, %{slug: slug}, opts)
when metric in @spent_coins_cost_histograms do
with {:ok, dt1} <- Metric.last_datetime_computed_at("price_usd", %{slug: slug}, opts),
{:ok, dt2} <- Metric.last_datetime_computed_at("age_distribution", %{slug: slug}, opts) do
{:ok, Enum.min([dt1, dt2], DateTime)}
end
end
# Aggregate the separate prices into `limit` number of evenly spaced buckets
defp maybe_transform_into_buckets({:ok, []}, _slug, _from, _to, _limit), do: {:ok, []}
defp maybe_transform_into_buckets({:ok, data}, slug, from, to, limit) do
{min, max} = Enum.map(data, & &1.price) |> Sanbase.Math.min_max()
# Avoid precision issues when using `round` for prices.
min = Float.floor(min, 2)
max = Float.ceil(max, 2)
# `limit - 1` because one of the buckets will be split into 2
bucket_size = Enum.max([Float.round((max - min) / (limit - 1), 2), 0.01])
# Generate the range for given low and high price
low_high_range = fn low, high ->
[Float.round(low, 2), Float.round(high, 2)]
end
# Generate ranges tuples in the format needed by Stream.unfold/2
price_ranges = fn value ->
[lower, upper] = low_high_range.(value, value + bucket_size)
{[lower, upper], upper}
end
# Generate limit number of ranges to properly empty ranges as 0
ranges_map =
Stream.unfold(min, price_ranges)
|> Enum.take(limit)
|> Enum.into(%{}, fn range -> {range, 0.0} end)
# Map every price to the proper range
price_to_range = fn price ->
bucket = floor((price - min) / bucket_size)
lower = min + bucket * bucket_size
upper = min + (1 + bucket) * bucket_size
low_high_range.(lower, upper)
end
# Get the average price for the queried. time range. It will break the [X,Y]
# price interval containing that price into [X, price_break] and [price_break, Y]
{:ok, %{^slug => price_break}} =
Metric.aggregated_timeseries_data("price_usd", %{slug: slug}, from, to, aggregation: :avg)
price_break = price_break |> Sanbase.Math.round_float()
price_break_range = price_to_range.(price_break)
# Put every amount moved at a given price in the proper bucket
bucketed_data =
Enum.reduce(data, ranges_map, fn %{price: price, value: value}, acc ->
key = price_to_range.(price)
Map.update(acc, key, 0.0, fn curr_amount -> Float.round(curr_amount + value, 2) end)
end)
|> break_bucket(data, price_break_range, price_break)
|> Enum.map(fn {range, amount} -> %{range: range, value: amount} end)
|> Enum.sort_by(fn %{range: [range_start | _]} -> range_start end)
{:ok, bucketed_data}
end
defp maybe_transform_into_buckets({:error, error}, _slug, _from, _to, _limit),
do: {:error, error}
# Break a bucket with range [low, high] into 2 buckes [low, divider] and [divider, high]
# putting the proper number of entities that fall into each of the 2 ranges
defp break_bucket(bucketed_data, original_data, [low, high], divider) do
{lower_half_amount, upper_half_amount} =
original_data
|> Enum.reduce({0.0, 0.0}, fn %{price: price, value: value}, {acc_lower, acc_upper} ->
cond do
price >= low and price < divider -> {acc_lower + value, acc_upper}
price >= divider and price < high -> {acc_lower, acc_upper + value}
true -> {acc_lower, acc_upper}
end
end)
bucketed_data
|> Map.delete([low, high])
|> Map.put([low, divider], Float.round(lower_half_amount, 2))
|> Map.put([divider, high], Float.round(upper_half_amount, 2))
end
end
|
lib/sanbase/clickhouse/metric/histogram_metric.ex
| 0.857559 | 0.532547 |
histogram_metric.ex
|
starcoder
|
defmodule Addict.Interactors.ValidatePassword do
@moduledoc """
Validates a password according to the defined strategies.
For now, only the `:default` strategy exists: password must be at least 6 chars long.
Returns `{:ok, []}` or `{:error, [errors]}`
"""
@password_length 8
@uppercase_condition_regex ~r/[A-Z]+/
@lowercase_condition_regex ~r/[a-z]+/
@number_condition_regex ~r/\d+/
@special_characters_regex ~r/[.,!?@#$%^&*()<>]+/
@password_regex_validations [
@uppercase_condition_regex,
@lowercase_condition_regex,
@number_condition_regex,
@special_characters_regex
]
def call(changeset, nil) do
call(changeset, [])
end
def call(changeset, strategies) do
strategies =
case Enum.count(strategies) do
0 -> [:default]
_ -> strategies
end
strategies
|> Enum.reduce(changeset, fn strategy, acc ->
validate(strategy, acc)
end)
|> format_response
end
defp format_response([]) do
{:ok, []}
end
defp format_response(messages) do
{:error, messages}
end
defp validate(:frame_password, :length, password) when is_bitstring(password) do
if String.length(password) > @password_length - 1,
do: [],
else: [{:password, {"must be at least #{@<PASSWORD>} characters long", []}}]
end
defp validate(:frame_password, :uppercase, password) when is_bitstring(password) do
if Regex.match?(@uppercase_condition_regex, password),
do: [],
else: [{:password, {"must contain at least one uppercase letter", []}}]
end
defp validate(:frame_password, :lowercase, password) when is_bitstring(password) do
if Regex.match?(@lowercase_condition_regex, password),
do: [],
else: [{:password, {"must contain at least one lowercase letter", []}}]
end
defp validate(:frame_password, :numbers, password) when is_bitstring(password) do
if Regex.match?(@number_condition_regex, password),
do: [],
else: [{:password, {"must contain at least one digit", []}}]
end
defp validate(:frame_password, :special_chars, password) when is_bitstring(password) do
if Regex.match?(@special_characters_regex, password),
do: [],
else: [{:password, {"must contain at least one special character", []}}]
end
defp validate(:default, password) when is_bitstring(password) do
if String.length(password) > 5, do: [], else: [{:password, {"is too short", []}}]
end
defp validate(:default, changeset) do
Ecto.Changeset.validate_change(changeset, :password, fn _field, value ->
validate(:default, value)
end).errors
end
defp validate(:frame_password, changeset) do
Ecto.Changeset.validate_change(changeset, :password, fn _field, value ->
value |> validate_frame_password
end).errors
end
defp validate_frame_password(password) when is_bitstring(password) do
case validate(:frame_password, :length, password) do
[] -> password |> validate_frame_password_complexity
[error] -> [error]
end
end
defp validate_frame_password_complexity(password) do
Enum.map(@password_regex_validations, fn regex -> Regex.match?(regex, password) end)
|> Enum.count(fn valid? -> valid? end)
|> case do
num when num < 3 ->
[{:password, {"must contain at least one uppercase, lowercase letter, number and special character", []}}]
_ ->
[]
end
end
end
|
lib/addict/interactors/validate_password.ex
| 0.759136 | 0.460228 |
validate_password.ex
|
starcoder
|
defmodule UBootEnv.IO do
@moduledoc """
Functions for reading and writing raw blocks to storage
This is the module that handles the low level CRC32 and redundant block
details.
"""
alias UBootEnv.{Config, Location}
@type generation() :: byte() | :unused
@doc """
Read a U-Boot environment block
This function performs the actual read and in the case of redundant U-Boot
environments, it returns the newer block. It does not decode.
"""
@spec read(Config.t()) :: {:ok, binary()} | {:error, any()}
def read(config = %Config{}) do
# read all locations and pick the only or latest one.
format = Config.format(config)
config.locations
|> Enum.map(&read_and_unpackage(&1, format))
|> Enum.filter(fn result -> match?({:ok, _contents, _generation}, result) end)
|> pick_best_for_reading()
end
defp read_and_unpackage(location, format) do
with {:ok, bin} <- read_location(location) do
unpackage(bin, format)
end
end
defp pick_best_for_reading([]), do: {:error, :no_valid_blocks}
defp pick_best_for_reading([{:ok, contents, _generation}]), do: {:ok, contents}
defp pick_best_for_reading([{:ok, contents1, generation1}, {:ok, contents2, generation2}]) do
if newer?(generation1, generation2) do
{:ok, contents1}
else
{:ok, contents2}
end
end
defp newer?(a, b) do
# a and b are unsigned bytes (0-255) that count up. Roughly speaking,
# a is newer than b if it is greater than b. When wrapping, we have to
# deal with a=0 being newer than b=255. The solution is to use 8-bit
# subtraction. In Elixir, subtract and mask to 8 bits. Then pick the
# halfway point in the range of values to decide newer or older.
:erlang.band(a - b, 0xFF) < 128
end
@doc """
Write a U-Boot environment block
This function performs the actual write. In the case of redundant
U-Boot environments, it writes the block in the right location and
marks the generation byte appropriately so that it is used on the next
read. It does not encode.
"""
@spec write(Config.t(), iodata()) :: :ok | {:error, atom()}
def write(config = %Config{}, data) do
do_write(Config.format(config), config, IO.iodata_to_binary(data))
end
defp do_write(:nonredundant, config, flattened_data) do
with {:ok, packaged_data} <- package(flattened_data, Config.size(config), :unused) do
write_location(Config.first(config), packaged_data)
end
end
defp do_write(:redundant, config, flattened_data) do
{location, gen} = find_write_location(config)
with {:ok, packaged_data} <- package(flattened_data, Config.size(config), gen) do
write_location(location, packaged_data)
end
end
defp find_write_location(config) do
result1 = read_and_unpackage(Config.first(config), :redundant)
result2 = read_and_unpackage(Config.second(config), :redundant)
case location_and_gen_to_write(result1, result2) do
{1, gen} -> {Config.first(config), gen}
{2, gen} -> {Config.second(config), gen}
end
end
# Prefer writing over corrupt and old locations
defp location_and_gen_to_write({:error, _}, {:error, _}), do: {1, 0}
defp location_and_gen_to_write({:ok, _contents, generation}, {:error, _}),
do: {2, incr_generation(generation)}
defp location_and_gen_to_write({:error, _}, {:ok, _contents, generation}),
do: {1, incr_generation(generation)}
defp location_and_gen_to_write({:ok, _contents1, gen1}, {:ok, _contents2, gen2}) do
if newer?(gen1, gen2) do
{2, incr_generation(gen1)}
else
{1, incr_generation(gen2)}
end
end
defp incr_generation(x), do: :erlang.band(x + 1, 0xFF)
@doc """
Package up U-Boot environment contents
The result can be written to where ever the environment block is persisted.
"""
@spec package(binary(), pos_integer(), generation()) ::
{:ok, iodata()} | {:error, :environment_too_small}
def package(bin, env_size, generation) do
padding_len = env_size - byte_size(bin) - header_size(generation)
if padding_len >= 0 do
padding = :binary.copy(<<-1>>, padding_len)
crc = :erlang.crc32([bin, padding])
{:ok, [<<crc::little-size(32)>>, encode_generation(generation), bin, padding]}
else
{:error, :environment_too_small}
end
end
defp header_size(:unused), do: 4
defp header_size(_generation), do: 5
defp encode_generation(:unused), do: []
defp encode_generation(generation) when generation >= 0 and generation < 256, do: generation
@doc """
Unpackage a U-Boot environment block
This is the opposite of package/3. It will only return successfully
if the input passes the U-Boot CRC check.
"""
@spec unpackage(binary(), :redundant | :nonredundant) ::
{:ok, binary(), generation()} | {:error, :invalid_crc}
def unpackage(<<expected_crc::little-32, contents::binary>>, :nonredundant) do
with :ok <- validate_crc(contents, expected_crc) do
{:ok, contents, :unused}
end
end
def unpackage(<<expected_crc::little-32, generation, contents::binary>>, :redundant) do
with :ok <- validate_crc(contents, expected_crc) do
{:ok, contents, generation}
end
end
defp validate_crc(contents, expected) do
case :erlang.crc32(contents) do
^expected -> :ok
_other -> {:error, :invalid_crc}
end
end
defp read_location(location = %Location{}) do
case File.open(location.path, [:raw, :binary, :read]) do
{:ok, fd} ->
rc = :file.pread(fd, location.offset, location.size) |> eof_is_error()
_ = File.close(fd)
rc
error ->
error
end
end
defp write_location(location = %Location{}, contents) do
case File.open(location.path, [:raw, :binary, :write, :read]) do
{:ok, fd} ->
rc = :file.pwrite(fd, location.offset, contents)
_ = File.close(fd)
rc
error ->
error
end
end
defp eof_is_error(:eof), do: {:error, :empty}
defp eof_is_error(other), do: other
end
|
lib/uboot_env/io.ex
| 0.765287 | 0.44553 |
io.ex
|
starcoder
|
defmodule TwoFactorInACan.Hotp do
@moduledoc """
Provides functions for working with the HMAC-based One Time Password
algorithm as defined in RFC 4226.
For details on RFC 4226, see https://tools.ietf.org/rfc/rfc4226.txt.
"""
use Bitwise, only_operators: true
@doc """
Generates a token from a shared secret and a counter which can be
synchronized.
This token can be used by one party to verify whether another party has the
same secret.
Options:
- `:secret_format` - the format that the secret is passed in as. Options
include:
- `:binary` (default)
- `:base32`
- `:base64`
- `:token_length` (Default: 6) - the length of the generated token. A longer
token is harder to guess and thus more secure. A longer token can also be
more difficult for users to accurately transmit. Although everything in
`TwoFactorInACan` supports variable token length, you should be sure that
other apps and programs used support the token length set here.
## Examples
```elixir
iex> secret = TwoFactorInACan.Secrets.generate_totp_secret()
iex> TwoFactorInACan.Hotp.generate_token(secret, 0)
"<PASSWORD>"
iex> TwoFactorInACan.Hotp.generate_token(secret, 1)
"5<PASSWORD>"
iex> TwoFactorInACan.Hotp.generate_token(secret, 0, token_length: 10)
"1807866564"
```
"""
def generate_token(secret, count, opts \\ []) do
token_length = Keyword.get(opts, :token_length, 6)
binary_secret = secret |> convert_to_binary(opts)
hash = compute_hash(binary_secret, count)
four_bytes_from_hash = dynamically_truncate(hash)
four_bytes_as_integer =
four_bytes_from_hash
|> binary_to_integer
|> wrap_to(0x7FFFFFFF)
truncation_factor = 10 |> :math.pow(token_length) |> trunc
token_as_integer = rem(four_bytes_as_integer, truncation_factor)
token_as_integer
|> :erlang.integer_to_binary()
|> String.pad_leading(token_length, "0")
end
if System.otp_release() |> String.to_integer() >= 23 do
defp compute_hash(binary_secret, count) do
:crypto.mac(:hmac, :sha, binary_secret, count |> as_8_byte_binary)
end
else
defp compute_hash(binary_secret, count) do
:crypto.hmac(:sha, binary_secret, count |> as_8_byte_binary)
end
end
@doc """
Verifies that the provided HOTP token was generated using the provided secret
and count.
This function uses the secret and count to generate a token. It then compares
that generated token to the passed in token. If they match, then this
function returns true. If they do not match, then this function returns
false.
This function allows a number of options:
- `:secret_format` - the format that the secret is passed in as. Options
include:
- `:binary` (default)
- `:base32`
- `:base64`
- `:token_length` (Default: 6) - the length of the generated token. A longer
token is harder to guess and thus more secure. A longer token can also be
more difficult for users to accurately transmit. Although everything in
`TwoFactorInACan` supports variable token length, you should be sure that
other apps and programs used support the token length set here.
"""
def same_secret?(secret, token, count, opts \\ []) do
token == generate_token(secret, count, opts)
end
defp convert_to_binary(secret, opts) do
secret_format = Keyword.get(opts, :secret_format, :binary)
case secret_format do
:binary ->
secret
:base32 ->
case Base.decode32(secret) do
{:ok, binary_secret} ->
binary_secret
_ ->
raise ArgumentError, """
Error calculating token value.
Secret format specified as :base32, but there was an error
decoding the secret that was passed in with Base.decode32/1.
"""
end
:base64 ->
case Base.decode64(secret) do
{:ok, binary_secret} ->
binary_secret
_ ->
raise ArgumentError, """
Error calculating token value.
Secret format specified as :base64, but there was an error
decoding the secret that was passed in with Base.decode64/1.
"""
end
_ ->
raise ArgumentError, """
Invalid secret format supplied when decoding secret:
secret_format: #{secret_format}
Valid options include:
- :binary
- :base32
- :base64
"""
end
end
defp as_8_byte_binary(integer) when is_integer(integer) do
<<integer::size(8)-big-unsigned-integer-unit(8)>>
end
defp dynamically_truncate(binary) when is_binary(binary) do
# Convert final byte of binary to a number between 0 and 15.
# This will be uniformly randomly between 0 and 15.
offset = :binary.at(binary, 19) &&& 15
# Use that offset to randomly select 4 contiguous bytes from the original
# binary.
:binary.part(binary, offset, 4)
end
defp binary_to_integer(<<_, _, _, _>> = binary) do
<<integer::size(4)-integer-unit(8)>> = binary
integer
end
defp wrap_to(integer, wrap) when is_integer(integer) and is_integer(wrap) do
integer &&& wrap
end
end
|
lib/hotp/hotp.ex
| 0.910732 | 0.890628 |
hotp.ex
|
starcoder
|
defmodule MysterySoup.PCG32 do
@moduledoc """
Documentation for `MysterySoup`'s PCG32 implementation.
"""
@type t :: __MODULE__
defstruct [:state, :inc]
alias MysterySoup.PCG32.Nif
@doc """
Initializes a new PCG32 state, seeding with system random.
"""
@spec init() :: MysterySoup.t()
def init, do: Nif.init_state()
@doc """
Generates the next random unsigned 32-bit integer from the PCG32 state.
"""
@spec next(MysterySoup.PCG32.t()) :: {integer(), MysterySoup.PCG32.t()}
def next(pcg), do: Nif.next(pcg)
@doc """
Generates a float between 0 and 1. Consumes multiple `next/1` calls.
# Notes
This function will pull an indeterminate amount
"""
@spec decimal(MysterySoup.PCG32.t()) :: {float(), MysterySoup.PCG32.t()}
def decimal(pcg), do: Nif.next_float(pcg)
@doc """
Generates an integer between 1 and `sides`, as though rolling a die with as many sides.
"""
@spec roll_die(MysterySoup.PCG32.t(), integer()) :: {integer(), MysterySoup.PCG32.t()}
def roll_die(pcg, sides) do
{next, pcg} = next(pcg)
# Using the remainder will confine the value to a range between `sides - 1` and 0. This is off
# by one, so we simply add 1 to it.
val = rem(next, sides) + 1
{val, pcg}
end
@doc """
Generates an integer between `low` and `high`, inclusive.
"""
@spec from_range(MysterySoup.PCG32.t(), integer(), integer()) :: {integer(), MysterySoup.PCG32.t()}
def from_range(pcg, low, high) when high > low do
{next, pcg} = next(pcg)
# Similar to `roll_die/2`, this uses remainders to get a value from the range, then adjusts for
# the offsets
val = rem(next, (high - low)) + low + 1
{val, pcg}
end
@doc """
Picks `n` options from `set`.
"""
@spec pick_n(MysterySoup.PCG32.t(), integer(), [any()]) :: [any()]
def pick_n(_, n, set) when n == 0 or set == [], do: []
def pick_n(pcg, n, set) when is_list(set) do
gen_pick_n_loop(pcg, {false, n, set}, [])
end
@doc """
Picks `n` _unique_ options from `set`.
"""
@spec pick_n_uniq(MysterySoup.PCG32.t(), integer(), [any()]) :: [any()]
def pick_n_uniq(_, n, set) when n == 0 or set == [], do: []
def pick_n_uniq(pcg, n, set) when is_list(set) do
gen_pick_n_loop(pcg, {false, n, set}, [])
end
# Common logic for pick of set functions
defp gen_pick_n_loop(pcg, {remove, n, set}, out) when n != 0 do
# Next random
{next, pcg} = next(pcg)
# The remainder of the random number divided by
# the length produces a valid index.
index = rem(next, Enum.count(set))
# Add the element to the out list
out = [out | Enum.at(set, index)]
# If we're removing used values, remove it
set = pop_if(remove, set, index)
gen_pick_n_loop(pcg, {remove, n - 1, set}, out)
end
defp gen_pick_n_loop(pcg, {_, 0, _}, out), do: {pcg, out}
defp pop_if(true, set, i), do: List.delete_at(set, i)
defp pop_if(false, set, _), do: set
end
|
lib/pcg32.ex
| 0.811639 | 0.482429 |
pcg32.ex
|
starcoder
|
defmodule Reaper.DataExtract.ExtractStep do
@moduledoc """
This module processes extract steps as defined in an ingestion definition. After
iterating through the steps, accumulating any destination values in the assigns block
it is assumed the final step will be http (at this time) which returns a data stream
"""
require Logger
alias Reaper.DataSlurper
alias Reaper.UrlBuilder
def execute_extract_steps(ingestion, steps) do
Enum.reduce(steps, %{}, fn step, acc ->
step = AtomicMap.convert(step, underscore: false)
execute_extract_step(ingestion, step, acc)
end)
end
defp execute_extract_step(ingestion, step, assigns_accumulator) do
step = Map.put(step, :assigns, Map.merge(step.assigns, assigns_accumulator))
process_extract_step(ingestion, step)
rescue
error ->
Logger.error(Exception.format(:error, error, __STACKTRACE__))
reraise "Unable to process #{step.type} step for ingestion #{ingestion.id}.", __STACKTRACE__
end
defp process_extract_step(ingestion, %{type: "http"} = step) do
{body, headers} = evaluate_body_and_headers(step)
output_file =
UrlBuilder.decode_http_extract_step(step)
|> DataSlurper.slurp(ingestion.id, headers, step.context.protocol, step.context.action, body)
Map.put(step.assigns, :output_file, output_file)
end
defp process_extract_step(ingestion, %{type: "s3"} = step) do
headers =
UrlBuilder.safe_evaluate_parameters(step.context.headers, step.assigns)
|> Enum.into(%{})
output_file =
UrlBuilder.build_safe_url_path(step.context.url, step.assigns)
|> DataSlurper.slurp(ingestion.id, headers)
Map.put(step.assigns, :output_file, output_file)
end
defp process_extract_step(ingestion, %{type: "sftp"} = step) do
output_file =
UrlBuilder.build_safe_url_path(step.context.url, step.assigns)
|> DataSlurper.slurp(ingestion.id)
Map.put(step.assigns, :output_file, output_file)
end
defp process_extract_step(_ingestion, %{type: "date"} = step) do
date =
case step.context.deltaTimeUnit do
nil ->
Timex.now()
_ ->
unit = String.to_atom(step.context.deltaTimeUnit)
Timex.shift(Timex.now(), [{unit, step.context.deltaTimeValue}])
end
formatted_date = Timex.format!(date, step.context.format)
Map.put(step.assigns, step.context.destination |> String.to_atom(), formatted_date)
end
defp process_extract_step(_ingestion, %{type: "secret"} = step) do
{:ok, cred} = Reaper.SecretRetriever.retrieve_ingestion_credentials(step.context.key)
secret = Map.get(cred, step.context.sub_key)
Map.put(step.assigns, step.context.destination |> String.to_atom(), secret)
end
defp process_extract_step(ingestion, %{type: "auth"} = step) do
{body, headers} = evaluate_body_and_headers(step)
url = UrlBuilder.build_safe_url_path(step.context.url, step.assigns)
response =
Reaper.AuthRetriever.authorize(ingestion.id, url, body, step.context.encodeMethod, headers, step.context.cacheTtl)
|> Jason.decode!()
|> get_in(step.context.path)
Map.put(step.assigns, step.context.destination |> String.to_atom(), response)
end
defp evaluate_body_and_headers(step) do
body = process_body(step.context.body, step.assigns)
headers = UrlBuilder.safe_evaluate_parameters(step.context.headers, step.assigns)
{body, headers}
end
defp process_body(body, _assigns) when body in ["", nil], do: ""
defp process_body(body, assigns) do
body
|> UrlBuilder.safe_evaluate_parameters(assigns)
|> Enum.into(%{})
|> Jason.encode!()
end
end
|
apps/reaper/lib/reaper/data_extract/extract_step.ex
| 0.694717 | 0.485234 |
extract_step.ex
|
starcoder
|
defmodule Monet.Connection do
@moduledoc """
Represents a connection (a socket) to the MonetDB Server.
Although this can be accessed directy (staring with conn/1), the intention is
for it to be accessed via the Monet module (which manages a pool of these
connections).
"""
require Record
require Logger
alias Monet.{Error, Prepared, Reader, Transaction, Writer}
Record.defrecord(:connection,
socket: nil,
pool_name: nil,
read_timeout: 10_000,
send_timeout: 10_000,
connect_timeout: 10_000
)
@doc """
A query with no arguments is executed as a simple query. A query with arguments
is executed as a prepare + exec + deallocate.
Query does not mutate the conn, but conn is returned nonetheless (along with the
result or error). The returned conn can be nil, which indicates that the connection
can no longer be used.
"""
def query(conn, sql, args \\ nil)
def query(conn, sql, nil) do
with :ok <- Writer.query(conn, sql),
{:ok, } = result <- Reader.result(conn)
do
{result, conn}
else
err -> error_result(err, conn)
end
end
def query(conn, sql, args) do
with {:ok, prepared} <- Prepared.new(conn, sql),
{:ok, result, c} <- Prepared.exec_and_close(prepared, args)
do
case c do
:ok -> {result, conn}
{:error, %{code: 7003}} ->
# Deallocating failed because the id wasn't valid. This easily happens
# (monetd automatically deallocates on a failed execution). It's no
# reason to remove the connection from the pool.
{result, conn}
_ ->
# We got an error deallocating which wasn't specifically about an invalid
# id. The connection is probably still good, but we don't want to leak
# prepared stamenents on the server. Safer to close this connection to
# force a cleanup.
{result, close(conn)}
end
else
err -> error_result(err, conn)
end
end
@doc """
Runs `fun` in a transaction. Automaticaly starts and commits/rollsback the
transaction.
When called through the pool (that is via `Monet.transaction/1`) the connection
is automatically closed (and thus the transaction rolledback) on an exception.
If calling this directly, it is up to the caller to deal with exceptions.
"""
def transaction(conn, fun) do
tx = Transaction.new(conn)
with :ok <- Writer.query(conn, "start transaction"),
{:ok, _} <- Reader.result(conn)
do
try do
fun_result = fun.(tx)
# commit/rollback result
crb_result = case fun_result do
{:rollback, _} -> Transaction.rollback(tx)
_ -> Transaction.commit(tx)
end
case crb_result do
# If the commit/rollback failed, this is the result we'll return
# plus we might need to close the connection
{:error, err} ->
conn = if Error.closed?(err), do: nil, else: conn
{crb_result, conn}
:ok ->
# If the commit/rollback succeeded, we need to clean up the value
# returned by the supplied fun and we know we don't need to close
# the connection
value = case fun_result do
{:ok, _} = ok -> ok
{:error, _} = err -> err
{:rollback, _} = rlb -> rlb
{:commit, value} -> {:ok, value}
value -> {:ok, value}
end
{value, conn}
end
after
Transaction.close(tx) # deallocate any prepared statements
end
else
err -> error_result(err, conn)
end
end
@doc """
Connects to the MonetDB server. See Monet.start_link/1 for available options
(although some of the options listed there such as `pool_size` are
specific to the Monet pool and not this individual connection).
"""
def connect(opts) do
connect_timeout = Keyword.get(opts, :connect_timeout, 10_000)
case :gen_tcp.connect(host(opts), port(opts), [packet: :raw, mode: :binary, active: false], connect_timeout) do
{:ok, socket} ->
with {:ok, conn} <- authenticate(socket, opts),
{:ok, conn} <- configure(conn, opts)
do
{:ok, conn}
else
{:error, err} = error ->
:gen_tcp.close(socket)
Logger.error("Failed to initialie connection - #{inspect(err)}")
error
end
{:error, err} = error ->
Logger.error("Failed to connect to MonetDB on #{host(opts)}:#{port(opts)} - #{inspect(err)}")
error
end
end
def close(conn) do
:gen_tcp.close(connection(conn, :socket))
nil
end
# I don't think this logic is quite right. The idea is that we don't want
# to return a dead connection back into the pool.
defp error_result({:error, err} = result, conn) do
case Error.closed?(err) do
true -> {result, close(conn)}
false -> {result, conn}
end
end
defp error_result(result, conn) do
{result, conn}
end
@doc """
In Elixir, every socket is assigned a "controlling" process. This is to control
the destination of incoming data when the socket is in active mode. We don't
use active mode (but we might leverage it in the future).
Still, it appears that the socket is also tied to the lifetime of the
controlling process, so we do have to set this once in Monet.init_worker.
"""
def controlling_process(conn, pid) do
socket = connection(conn, :socket)
case :gen_tcp.controlling_process(socket, pid) do
:ok -> :ok
err -> :gen_tcp.close(socket); err
end
end
defp authenticate(socket, opts) do
pool_name = Keyword.get(opts, :name, Monet)
send_timeout = Keyword.get(opts, :send_timeout, 10_000)
read_timeout = Keyword.get(opts, :read_timeout, 10_000)
connect_timeout = Keyword.get(opts, :connect_timeout, 10_000)
username = Keyword.get(opts, :username, "monetdb")
password = Keyword.get(opts, :password, "<PASSWORD>")
database = Keyword.get(opts, :database, "monetdb")
conn = connection(
socket: socket,
pool_name: pool_name,
send_timeout: send_timeout,
read_timeout: read_timeout,
connect_timeout: connect_timeout
)
:inet.setopts(socket, send_timeout: Keyword.get(opts, :send_timeout, 10_000))
case Monet.Auth.login(conn, username: username, password: password, database: database) do
{:ok, _} = ok -> ok
{:error, _} = err -> err
{:redirect, redirect} ->
:gen_tcp.close(socket)
connect(Keyword.merge(opts, redirect))
end
end
# there are some commands we want to send on startup
defp configure(conn, opts) do
with {:ok, conn} <- set_time_zone(conn, opts),
{:ok, conn} <- set_reply_size(conn),
{:ok, conn} <- set_schema(conn, opts),
{:ok, conn} <- set_role(conn, opts)
do
{:ok, conn}
end
end
defp set_time_zone(conn, opts) do
offset = case Keyword.get(opts, :time_zone_offset) do
nil -> "0"
n when is_integer(n) -> Integer.to_string(n)
err -> {:error, ":time_zone_offset offset must be nil or an integer, got: #{inspect(err)}"}
end
Writer.query(conn, "set time zone interval '#{offset}' minute;")
case Reader.message(conn) do
{:ok, <<"&3 ", _::binary>>} -> {:ok, conn}
{:ok, invalid} -> {:error, "Unexpected reply from set time zone: #{invalid}"}
err -> err
end
end
# I don't know what the default is, but every other drivers sets this so that
# queries don't return a limited result.
defp set_reply_size(conn) do
Writer.command(conn, "reply_size -1")
case Reader.message(conn) do
{:ok, ""} -> {:ok, conn}
{:ok, invalid} -> {:error, "Unexpected reply from reply_size command: #{invalid}"}
err -> err
end
end
defp set_schema(conn, opts) do
case Keyword.get(opts, :schema) do
nil -> {:ok, conn}
schema ->
Writer.query(conn, "set schema #{schema}")
case Reader.message(conn) do
{:ok, <<"&3 ", _::binary>>} -> {:ok, conn}
{:ok, invalid} -> {:error, "Unexpected reply from set schema: #{invalid}"}
err -> err
end
end
end
defp set_role(conn, opts) do
case Keyword.get(opts, :role) do
nil -> {:ok, conn}
role ->
Writer.query(conn, "set role #{role}")
case Reader.message(conn) do
{:ok, <<"&3 ", _::binary>>} -> {:ok, conn}
{:ok, invalid} -> {:error, "Unexpected reply from set role: #{invalid}"}
err -> err
end
end
end
# only extracted so that we can reuse the logic when logging a connection error
defp port(opts), do: Keyword.get(opts, :port, 50_000)
defp host(opts) do
case Keyword.get(opts, :host) do
nil -> '127.0.0.1'
host -> String.to_charlist(host)
end
end
@doc """
Get the name of the pool this connection belongs tos
"""
def pool_name(conn), do: connection(conn, :pool_name)
end
|
lib/connection.ex
| 0.532668 | 0.409575 |
connection.ex
|
starcoder
|
defmodule Knock do
@moduledoc """
Official SDK for interacting with Knock.
## Example usage
### As a module
The recommended way to configure Knock is as a module in your application. Doing so will
allow you to customize the options via configuration in your app.
```elixir
# lib/my_app/knock.ex
defmodule MyApp.Knock do
use Knock, otp_app: :my_app
end
# config/runtime.exs
config :my_app, MyApp.KnockClient,
api_key: System.get_env("KNOCK_API_KEY")
```
In your application you can now execute commands on your configured Knock instance.
```elixir
client = MyApp.Knock.client()
{:ok, user} = Knock.Users.get_user(client, "user_1")
```
### Invoking directly
Optionally you can forgo implementing your own Knock module and create client instances
manually:
```elixir
client = Knock.Client.new(api_key: "sk_test_12345")
```
### Customizing options
Out of the box the client will specify Tesla and Jason as the HTTP adapter and JSON client,
respectively. However, you can customize this at will:
```elixir
config :my_app, Knock,
adapter: Tesla.Adapter.Finch,
json_client: JSX
```
You can read more about the availble adapters in the [Tesla documentation](https://hexdocs.pm/tesla/readme.html#adapters)
"""
defmacro __using__(opts) do
quote do
@app_name Keyword.fetch!(unquote(opts), :otp_app)
@api_key_env_var "KNOCK_API_KEY"
alias Knock.Client
@doc """
Creates a new client, reading the configuration set for this
applicaton and module in the process
"""
def client(overrides \\ []) do
overrides
|> fetch_options()
|> Client.new()
end
defp fetch_options(overrides) do
Application.get_env(@app_name, __MODULE__, [])
|> maybe_resolve_api_key()
|> Keyword.merge(overrides)
end
defp maybe_resolve_api_key(opts) do
case Keyword.get(opts, :api_key) do
api_key when is_binary(api_key) -> opts
{:system, var_name} -> Keyword.put(opts, :api_key, System.get_env(var_name))
_ -> Keyword.put(opts, :api_key, System.get_env(@api_key_env_var))
end
end
end
end
@doc """
Issues a notify call, triggering a workflow with the given key.
"""
defdelegate notify(client, key, properties), to: Knock.Workflows, as: :trigger
end
|
lib/knock.ex
| 0.836454 | 0.625638 |
knock.ex
|
starcoder
|
defmodule ShEx.OneOf do
@moduledoc false
defstruct [
# tripleExprLabel?
:id,
# [tripleExpr{2,}]
:expressions,
# INTEGER?
:min,
# INTEGER?
:max,
# [SemAct+]?
:sem_acts,
# [Annotation+]?
:annotations
]
import ShEx.TripleExpression.Shared
def matches(one_of, triples, graph, schema, association, state) do
with {matched, remainder, match_count, violations} <-
find_matches(triples, one_of, graph, schema, association, state),
:ok <-
check_cardinality(
match_count,
ShEx.TripleExpression.min_cardinality(one_of),
one_of,
violations
) do
{:ok, matched, remainder}
else
violation ->
{:error, violation}
end
end
defp find_matches(triples, one_of, graph, schema, association, state) do
do_find_matches(
{:ok, [], triples, 0, []},
one_of.expressions,
ShEx.TripleExpression.max_cardinality(one_of),
graph,
schema,
association,
state
)
end
defp do_find_matches({:ok, matched, remainder, max, violations}, _, max, _, _, _, _),
do: {matched, remainder, max, violations}
defp do_find_matches(
{:ok, matched, remainder, match_count, violations},
expressions,
max,
graph,
schema,
association,
state
) do
expressions
|> Enum.reduce_while({matched, remainder, match_count, violations}, fn
expression, {matched, remainder, match_count, violations} ->
ShEx.TripleExpression.matches(expression, remainder, graph, schema, association, state)
|> case do
{:ok, new_matched, new_remainder} ->
{:halt, {:ok, new_matched, new_remainder, match_count + 1, violations}}
{:error, violation} ->
{:cont, {matched, remainder, match_count, violations ++ List.wrap(violation)}}
end
end)
|> do_find_matches(expressions, max, graph, schema, association, state)
end
defp do_find_matches(acc, _, _, _, _, _, _), do: acc
defimpl ShEx.TripleExpression do
def matches(one_of, triples, graph, schema, association, state) do
ShEx.OneOf.matches(one_of, triples, graph, schema, association, state)
end
def min_cardinality(one_of), do: ShEx.TripleExpression.Shared.min_cardinality(one_of)
def max_cardinality(one_of), do: ShEx.TripleExpression.Shared.max_cardinality(one_of)
def predicates(one_of, state),
do: ShEx.TripleExpression.Shared.predicates_of_group(one_of, state)
def triple_constraints(one_of, state),
do: ShEx.TripleExpression.Shared.triple_constraints_of_group(one_of, state)
def required_arcs(one_of, state),
do: ShEx.TripleExpression.Shared.required_arcs_of_group(one_of, state)
end
defimpl ShEx.Operator do
def children(one_of) do
Enum.map(one_of.expressions, fn expression ->
if RDF.term?(expression) do
{:triple_expression_label, expression}
else
expression
end
end)
end
def triple_expression_label_and_operands(one_of),
do: {one_of.id, one_of.expressions}
end
end
|
lib/shex/shape_expressions/one_of.ex
| 0.584983 | 0.449816 |
one_of.ex
|
starcoder
|
defmodule Exhort.SAT.LinearExpression do
@moduledoc """
An expression in terms of variables and operators, constraining the overall
model.
Expressions should be defined through `Exhort.SAT.Constraint` or
`Exhort.SAT.Builder`. Alternatively, a new expression may be created using the
`Exhort.SAT.LinearExpression.new/1` macro.
The approach here is to transform values into `LinearExpression`s and then
apply the operator (e.g., `:sum`) to the expressions. This allows for fewer
NIF functions do the combination of the number of arguments.
"""
@type t :: %__MODULE__{}
defstruct res: nil, expr: [], expressions: []
alias __MODULE__
alias Exhort.NIF.Nif
alias Exhort.SAT.BoolVar
alias Exhort.SAT.DSL
alias Exhort.SAT.IntVar
alias Exhort.SAT.Vars
@type eterm :: atom() | String.t() | LinearExpression.t()
@doc """
Associate the parts of a linear expression with the native representation,
recusively associating the constituent components as necessary.
"""
@spec resolve(LinearExpression.t() | IntVar.t() | integer(), map()) :: LinearExpression.t()
def resolve(
%LinearExpression{
res: nil,
expr: {:sum, sum_list}
} = expr,
vars
)
when is_list(sum_list) do
resolve_sum(expr, sum_list, vars)
end
def resolve(
%LinearExpression{
res: nil,
expr: {:sum, %LinearExpression{} = expr1, %LinearExpression{} = expr2}
} = expr,
vars
) do
sum_list = [expr1, expr2]
resolve_sum(expr, sum_list, vars)
end
def resolve(%LinearExpression{res: nil, expr: {:prod, %BoolVar{} = var1, int2}} = expr, _vars)
when is_integer(int2) do
Nif.prod_bool_var1_constant2_nif(var1.res, int2)
|> then(&%LinearExpression{expr | res: &1})
end
def resolve(
%LinearExpression{res: nil, expr: {:prod, %IntVar{} = var1, int2}} = expr,
_vars
)
when is_integer(int2) do
Nif.prod_int_var1_constant2_nif(var1.res, int2)
|> then(&%LinearExpression{expr | res: &1})
end
def resolve(
%LinearExpression{res: nil, expr: {:prod, %LinearExpression{} = expr1, int2}} = expr,
vars
)
when is_integer(int2) do
%LinearExpression{} = expr1 = resolve(expr1, vars)
Nif.prod_expr1_constant2_nif(expr1.res, int2)
|> then(&%LinearExpression{expr | res: &1, expr: {:prod, expr1, int2}})
end
def resolve(
%LinearExpression{res: nil, expr: {:prod, int1, %LinearExpression{} = expr2}} = expr,
vars
)
when is_integer(int1) do
%LinearExpression{} = expr2 = resolve(expr2, vars)
Nif.prod_expr1_constant2_nif(expr2.res, int1)
|> then(&%LinearExpression{expr | res: &1, expr: {:prod, int1, expr2}})
end
def resolve(%LinearExpression{res: nil, expr: {:prod, sym1, int2}} = expr, vars)
when not is_integer(sym1) and is_integer(int2) do
var1 = Vars.get(vars, sym1)
resolve(%LinearExpression{expr | expr: {:prod, var1, int2}}, vars)
end
def resolve(%LinearExpression{res: nil, expr: {:prod, int1, sym2}} = expr, vars)
when is_integer(int1) and not is_integer(sym2) do
var2 = Vars.get(vars, sym2)
resolve(%LinearExpression{expr | expr: {:prod, var2, int1}}, vars)
end
def resolve(%LinearExpression{res: nil, expr: {:prod, _, _}}, _vars) do
raise "Products are only supported when one of the arguments is a constant"
end
def resolve(
%LinearExpression{
res: nil,
expr: {:minus, %LinearExpression{} = expr1, %LinearExpression{} = expr2}
} = expr,
vars
) do
expr1 = resolve(expr1, vars)
expr2 = resolve(expr2, vars)
Nif.minus_nif(expr1.res, expr2.res)
|> then(&%LinearExpression{expr | res: &1, expr: {:minus, expr1, expr2}})
end
def resolve(
%LinearExpression{res: nil, expr: {opr, %IntVar{} = var1, %LinearExpression{} = expr2}} =
expr,
vars
) do
%LinearExpression{} = expr1 = resolve(var1, vars)
%LinearExpression{} = expr2 = resolve(expr2, vars)
resolve(%LinearExpression{expr | expr: {opr, expr1, expr2}}, vars)
end
def resolve(
%LinearExpression{res: nil, expr: {opr, %LinearExpression{} = expr1, %IntVar{} = var2}} =
expr,
vars
) do
resolve(%LinearExpression{expr | expr: {opr, expr1, var2}}, vars)
end
def resolve(
%LinearExpression{res: nil, expr: {opr, %IntVar{} = var1, %IntVar{} = var2}} = expr,
vars
) do
%LinearExpression{} = expr1 = resolve(var1, vars)
%LinearExpression{} = expr2 = resolve(var2, vars)
resolve(%LinearExpression{expr | expr: {opr, expr1, expr2}}, vars)
end
def resolve(
%LinearExpression{res: nil, expr: {opr, %LinearExpression{} = var1, int2}} = expr,
vars
)
when is_integer(int2) do
%LinearExpression{} = expr1 = resolve(var1, vars)
%LinearExpression{} = expr2 = resolve(int2, vars)
resolve(%LinearExpression{expr | expr: {opr, expr1, expr2}}, vars)
end
def resolve(
%LinearExpression{res: nil, expr: {opr, int1, %LinearExpression{} = var2}} = expr,
vars
) do
resolve(%LinearExpression{expr | expr: {opr, var2, int1}}, vars)
end
def resolve(%LinearExpression{res: nil, expr: {opr, sym1, sym2}} = expr, vars) do
expr1 = resolve(sym1, vars)
expr2 = resolve(sym2, vars)
resolve(%LinearExpression{expr | expr: {opr, expr1, expr2}}, vars)
end
def resolve(%LinearExpression{} = expr, _vars), do: expr
def resolve(%BoolVar{} = var, vars) do
var
|> then(&Vars.get(vars, &1))
|> then(&Nif.expr_from_bool_var_nif(&1.res))
|> then(&%LinearExpression{res: &1, expr: var})
end
def resolve(%IntVar{} = var, vars) do
var
|> then(&Vars.get(vars, &1))
|> then(&Nif.expr_from_int_var_nif(&1.res))
|> then(&%LinearExpression{res: &1, expr: var})
end
def resolve(val, _vars) when is_integer(val) do
val
|> then(&Nif.expr_from_constant_nif(&1))
|> then(&%LinearExpression{res: &1, expr: val})
end
def resolve(val, vars) when is_atom(val) or is_binary(val) do
vars
|> Vars.get(val)
|> resolve(vars)
end
@spec resolve_sum(LinearExpression.t(), list(), map()) :: LinearExpression.t()
defp resolve_sum(expr, sum_list, vars) do
sum_list
|> Enum.map(fn item ->
case item do
%LinearExpression{} = item -> item
_ -> Vars.get(vars, item)
end
end)
|> Enum.map(fn expr ->
expr = resolve(expr, vars)
expr.res
end)
|> List.to_tuple()
|> Nif.sum_nif()
|> then(&%LinearExpression{expr | res: &1, expr: {:sum, sum_list}})
end
@doc """
Create a linear expression as the sum of the list of provided variables.
"""
@spec sum([eterm()]) :: LinearExpression.t()
def sum(vars) when is_list(vars) do
%LinearExpression{expr: {:sum, vars}}
end
@doc """
Create a linear expression as the sum of `var1` and `var2`.
"""
@spec sum(eterm(), eterm()) :: LinearExpression.t()
def sum(var1, var2) do
%LinearExpression{expr: {:sum, var1, var2}}
end
@doc """
Create a linear expression as the difference of `var1` and `var2`.
"""
@spec minus(eterm(), eterm()) :: LinearExpression.t()
def minus(var1, var2) do
%LinearExpression{expr: {:minus, var1, var2}}
end
@doc """
Create a linear expression as the product of `val1` and `val2`.
"""
@spec prod(IntVar.t() | integer() | list(), IntVar.t() | integer() | list()) ::
LinearExpression.t()
def prod(val1, val2) when is_list(val1) and is_list(val2) do
Enum.zip(val1, val2)
|> Enum.map(&prod(elem(&1, 0), elem(&1, 1)))
|> then(&%LinearExpression{expr: {:sum, &1}})
end
def prod(val1, val2) do
%LinearExpression{expr: {:prod, val1, val2}}
end
@doc """
Create a linear expression from the given integer constant.
"""
@spec constant(integer()) :: LinearExpression.t()
def constant(int) do
%LinearExpression{expr: {:constant, int}}
end
@doc """
Create an expression from the given `items`. Each expression is created using
`term_fn` and joined using `join_fn`.
"""
def terms(items, term_fn, join_fn) do
items
|> Enum.reduce(nil, fn
item, nil ->
term_fn.(item)
item, expr ->
term = term_fn.(item)
join_fn.(expr, term)
end)
end
@doc """
Create an expression from the given list of terms. Each term is is a two-value
tuple. The first element of the tuple is a constant coefficient. The second
element is an integer variable.
```
[{3, :x}, {4, :y}, {5, :z}] => 3*x + 4*y + 5*z
```
"""
@spec terms([{integer(), atom() | String.t()}]) :: LinearExpression.t()
def terms(items) when is_list(items) do
Enum.unzip(items)
|> then(fn {coeff, vars} ->
prod(vars, coeff)
end)
end
defmacro term(expression_ast) do
DSL.transform_expression(expression_ast)
end
end
|
lib/exhort/sat/linear_expression.ex
| 0.863478 | 0.550426 |
linear_expression.ex
|
starcoder
|
defmodule Flex.Variable do
alias Flex.{Set, Variable}
@moduledoc """
An interface to create Fuzzy Variables.
"""
defstruct tag: nil,
fuzzy_sets: nil,
mf_values: %{},
range: nil,
rule_output: nil,
type: nil
@typedoc """
Fuzzy Variable struct.
- `:tag` - (string) Defines the linguistic name of the fuzzy variable (e.g., "error").
- `:fuzzy_sets` - (list) Defines which all the fuzzy sets related to the variable.
- `:mf_values` - (map) The current values of each of the membership functions of the fuzzy sets.
- `:range` - (range) The range in which the variable exists.
- `:type` - (atom) :antecedent if the variable is an input or consequent for outputs.
"""
@type t :: %__MODULE__{
tag: String.t(),
fuzzy_sets: [Flex.Set.t(), ...],
mf_values: %{},
range: any(),
type: :antecedent | :consequent
}
@doc """
Creates a Fuzzy Variable.
The following options are require:
* `:tag` - (string) Defines the linguistic name of the fuzzy variable (e.g., "error"),
* `:fuzzy_sets` - (list) Defines which type of membership function use the set (e.g., "triangle").
* `:type` - (atom) Defines the type of variable (e.g., :antecedent or :consequent),
* `:range` - (range) The range in which the variable exists.
"""
@spec new(keyword) :: Flex.Variable.t()
def new(params) do
tag = Keyword.fetch!(params, :tag)
range = Keyword.fetch!(params, :range)
fuzzy_sets = Keyword.fetch!(params, :fuzzy_sets)
type = Keyword.fetch!(params, :type)
%Variable{range: range, fuzzy_sets: fuzzy_sets, type: type, tag: tag}
end
@doc """
Updates an antecedent Fuzzy Variable (ANFIS).
"""
@spec update(Flex.Variable.t(), list(), number()) :: Flex.Variable.t()
def update(fuzzy_variable, gradients, learning_rate) do
new_fuzzy_sets =
fuzzy_variable.fuzzy_sets
|> Enum.zip(gradients)
|> Enum.map(fn {fuzzy_set, gradient} -> Set.update(fuzzy_set, gradient, learning_rate) end)
%{fuzzy_variable | fuzzy_sets: new_fuzzy_sets, rule_output: nil, mf_values: %{}}
end
@doc """
Updates a consequent Fuzzy Variable (ANFIS).
"""
@spec update(Flex.Variable.t(), list()) :: Flex.Variable.t()
def update(fuzzy_variable, x_vector) do
{new_fuzzy_sets, []} =
for fuzzy_set <- fuzzy_variable.fuzzy_sets, reduce: {[], x_vector} do
{new_fuzzy_sets, [arg1, arg2, arg3 | x_vector_tail]} ->
{new_fuzzy_sets ++ [Set.update(fuzzy_set, [arg1, arg2, arg3])], x_vector_tail}
end
%{fuzzy_variable | fuzzy_sets: new_fuzzy_sets, rule_output: nil, mf_values: %{}}
end
@doc """
Turns an antecedent fuzzy variable (input) from a crisp value to a fuzzy value.
"""
@spec fuzzification(Flex.Variable.t(), any()) :: :error | Flex.Variable.t()
def fuzzification(%Variable{type: type} = fuzzy_var, input) when type == :antecedent do
res = map_all_mf(fuzzy_var.fuzzy_sets, input, %{})
%{fuzzy_var | mf_values: res}
end
def fuzzification(_fuzzy_var, _input), do: :error
defp map_all_mf([], _input, acc), do: acc
defp map_all_mf([fs | tail], input, acc) do
mu = fs.mf.(input)
key = fs.tag
acc = Map.put(acc, key, mu)
map_all_mf(tail, input, acc)
end
end
|
lib/variable.ex
| 0.856077 | 0.751876 |
variable.ex
|
starcoder
|
defmodule EpicenterWeb.Test.Pages.Search do
import Euclid.Test.Extra.Assertions
import ExUnit.Assertions
import Phoenix.LiveViewTest
alias Epicenter.Test
alias Epicenter.Test.HtmlAssertions
alias Phoenix.LiveViewTest.View
def assert_disabled(view, link) when link in ~w[prev next]a do
assert view |> render() |> Test.Html.parse() |> Test.Html.attr("[data-role=search-#{link}]", "disabled") == ["disabled"]
view
end
def assert_no_results(view, search_term) do
view
|> render()
|> Test.Html.parse()
|> HtmlAssertions.assert_contains_text("no-search-results", "No results found for")
|> HtmlAssertions.assert_contains_text("no-search-results", search_term)
view
end
def assert_results(view, search_result_rows) do
view
|> render()
|> Test.Html.parse()
|> assert_results_visible(true)
|> Test.Html.all("[data-role=search-result]", fn search_result ->
[
Test.Html.text(search_result, "[data-role=search-result-name"),
Test.Html.text(search_result, "[data-role=search-result-details"),
Test.Html.text(search_result, "[data-role=search-result-labs")
]
end)
|> assert_eq(search_result_rows, returning: view)
end
def assert_results_tids(view, expected_tids) do
view
|> render()
|> Test.Html.parse()
|> assert_results_visible(true)
|> Test.Html.all("[data-role=search-result]", as: :tids)
|> assert_eq(expected_tids, returning: view)
end
def assert_results_visible(%View{} = view, expected_visible?) do
view
|> render()
|> Test.Html.parse()
|> assert_results_visible(expected_visible?)
view
end
def assert_results_visible(parsed_html, expected_visible?) do
results_element = parsed_html |> Test.Html.find("[data-role=search-results]")
if expected_visible? do
assert length(results_element) > 0
else
assert results_element == []
end
parsed_html
end
def assert_search_term_in_search_box(view, search_term) do
view
|> render()
|> Test.Html.parse()
|> Test.Html.attr("[data-role=search-term-input]", "value")
|> assert_eq([search_term], returning: view)
end
def click_next(view) do
view |> element("[data-role=search-next]") |> render_click()
view
end
def click_page_number(view, page_number) do
view |> element("[data-page-number=#{page_number}]") |> render_click()
view
end
def click_prev(view) do
view |> element("[data-role=search-prev]") |> render_click()
view
end
def close_search_results(view) do
view
|> element("[data-role=close-search-results]")
|> render_click()
view
end
def search(view, term) do
view
|> form("[data-role=app-search] form", %{search: %{"term" => term}})
|> render_change()
view
end
end
|
test/support/pages/search.ex
| 0.575946 | 0.489259 |
search.ex
|
starcoder
|
defmodule Battleship.Game.Board do
@moduledoc """
Game board
"""
alias Battleship.{Ship}
require Logger
@ships_sizes [5, 4, 3, 2, 2, 1, 1]
@size 10
@orientations [:horizontal, :vertical]
@grid_value_water "Β·"
@grid_value_ship "/"
@grid_value_water_hit "O"
@grid_value_ship_hit "*"
defstruct [
player_id: nil,
ships: [],
grid: %{},
ready: false,
hit_points: 0
]
@doc """
Creates a new board for a Player
"""
def create(player_id) do
Logger.debug "Starting board for player #{player_id}"
grid = build_grid
ships = Enum.map(@ships_sizes, &(%Ship{size: &1}))
Agent.start(fn -> %__MODULE__{player_id: player_id, grid: grid, ships: ships} end, name: ref(player_id))
end
@doc """
Destroys an existing Player board
"""
def destroy(player_id) do
case GenServer.whereis(ref(player_id)) do
nil ->
Logger.debug "Attempt to destroy unesxisting Board for player #{player_id}"
pid ->
Logger.debug "Stopping board for player #{player_id}"
Agent.stop(pid, :normal, :infinity)
end
end
@doc """
Adds a new ship to the board
"""
def add_ship(_player_id, %Ship{size: size}) when not size in @ships_sizes, do: {:error, "Invalid size"}
def add_ship(_player_id, %Ship{x: x}) when x > (@size - 1) or x < 0, do: {:error, "Invalid position"}
def add_ship(_player_id, %Ship{y: y}) when y > (@size - 1) or y < 0, do: {:error, "Invalid position"}
def add_ship(_player_id, %Ship{orientation: orientation}) when not orientation in @orientations, do: {:error, "Invalid orientation"}
def add_ship(player_id, ship) do
Logger.debug "Adding ship for player #{player_id}"
board = Agent.get(ref(player_id), &(&1))
cond do
board.ready ->
{:error, "All ships are placed"}
ship_already_placed?(board, ship) ->
{:error, "Ship already added"}
ship_with_invalid_bounds?(ship) || ship_with_invalid_coordinates?(board, ship) ->
{:error, "Ship has invalid coordinates"}
true ->
new_board = board
|> add_ship_to_grid(ship)
|> set_hit_points
|> set_is_ready
Agent.update(ref(player_id), fn(_) -> new_board end)
{:ok, new_board}
end
end
@doc """
Returns the board
"""
def get_data(player_id) do
Logger.debug "Getting board state for player #{player_id}"
Agent.get(ref(player_id), &(&1))
end
@doc """
Returns the board for an opponent player, replacing ship positions with
water values.
"""
def get_opponents_data(player_id) do
Logger.debug "Getting opponents board state for player #{player_id}"
board = Agent.get(ref(player_id), &(&1))
new_grid = board
|> Map.get(:grid)
|> Enum.reduce(%{}, fn({coords, value}, acc) -> Map.put(acc, coords, opponent_grid_value(value)) end)
%{board | ships: nil, grid: new_grid}
end
@doc """
Takes a hit, checks the result and returns the board updated
"""
def take_shot(player_id, x: x, y: y) do
coords = Enum.join([y, x], "")
Logger.debug "Player #{player_id} taking show at #{coords}"
result = player_id
|> get_data
|> Map.get(:grid)
|> Map.get(coords)
|> shot_result
Logger.debug "Shot result: #{result}"
result
|> add_result_to_board(player_id, coords)
|> update_hit_points
{:ok, result}
end
# Generates global reference name for the board process
defp ref(player_id), do: {:global, {:board, player_id}}
# Checks if a similar ship has been already placed
defp ship_already_placed?(%__MODULE__{ships: ships}, %Ship{size: size}) do
permited_amount = Enum.count(@ships_sizes, &(&1 == size))
Enum.count(ships, &(&1.size == size and ship_placed?(&1))) == permited_amount
end
# Checks if the ship is inside the boards boundaries
defp ship_with_invalid_bounds?(%Ship{orientation: :horizontal} = ship) do
ship.x + ship.size > @size
end
defp ship_with_invalid_bounds?(%Ship{orientation: :vertical} = ship) do
ship.y + ship.size > @size
end
# Checks is the ship is overlaps an exisiting one
defp ship_with_invalid_coordinates?(board, ship) do
ship
|> Ship.coordinates
|> Enum.map(&(board.grid[&1] == @grid_value_ship))
|> Enum.any?(&(&1 == true))
end
# Adds a ship to the grid
defp add_ship_to_grid(%__MODULE__{ships: ships} = board, ship) do
coordinates = ship
|> Ship.coordinates
|> Enum.reduce(%{}, fn(coord, acc) -> Map.put(acc, coord, @grid_value_ship) end)
ship_index = Enum.find_index(ships, &(&1.size == ship.size and !ship_placed?(&1)))
ships = List.update_at(ships, ship_index, &(%{&1 | x: ship.x, y: ship.y, coordinates: coordinates}))
grid = Map.merge board.grid, coordinates
%{board | grid: grid, ships: ships}
end
# Builds a default grid map
defp build_grid do
0..@size - 1
|> Enum.reduce([], &build_rows/2)
|> List.flatten
|> Enum.reduce(%{}, fn item, acc -> Map.put(acc, item, @grid_value_water) end)
end
# Builds cells for a given row
defp build_rows(y, rows) do
row = 0..@size - 1
|> Enum.reduce(rows, fn x, col -> [Enum.join([y, x], "") | col] end)
[row | rows]
end
# Returns shot result depending on the cell's current value
defp shot_result(@grid_value_ship), do: @grid_value_ship_hit
defp shot_result(@grid_value_ship_hit), do: @grid_value_ship_hit
defp shot_result(_current_value), do: @grid_value_water_hit
defp add_result_to_board(result, player_id, coords) do
Agent.update(ref(player_id), &(put_in(&1.grid[coords], result)))
get_data(player_id)
end
defp update_hit_points(board) do
hits = board.grid
|> Map.values
|> Enum.count(&(&1 == @grid_value_ship_hit))
hit_points = Enum.reduce(board.ships, 0, &(&1.size + &2)) - hits
Agent.update(ref(board.player_id), fn(_) -> %{board | hit_points: hit_points} end)
{:ok, get_data(board.player_id)}
end
defp set_is_ready(board), do: %{board | ready: Enum.all?(board.ships, &ship_placed?(&1))}
defp set_hit_points(board), do: %{board | hit_points: Enum.reduce(board.ships, 0, &(&1.size + &2))}
defp opponent_grid_value(@grid_value_ship), do: @grid_value_water
defp opponent_grid_value(value), do: value
defp ship_placed?(ship), do: length(Map.keys(ship.coordinates)) != 0
end
|
lib/battleship/game/board.ex
| 0.836521 | 0.400749 |
board.ex
|
starcoder
|
defprotocol Xema.Castable do
@moduledoc """
Converts data using the specified schema.
"""
@doc """
Converts the given data using the specified schema.
"""
def cast(value, schema)
end
defimpl Xema.Castable, for: Atom do
use Xema.Castable.Helper
def cast(nil, nil, _module, _schema), do: {:ok, nil}
def cast(nil, :string, _module, _schema), do: {:error, %{to: :string, value: nil}}
def cast(atom, type, module, _schema) when is_atom(type) do
case type do
:atom ->
{:ok, atom}
:boolean when atom in [true, false] ->
{:ok, atom}
nil ->
{:error, %{to: nil, value: atom}}
:string ->
{:ok, to_string(atom)}
:struct ->
{:error, %{to: module(module), value: atom}}
_ ->
{:error, %{to: type, value: atom}}
end
end
end
defimpl Xema.Castable, for: BitString do
use Xema.Castable.Helper
def cast(str, :struct, module, _schema)
when module in [Date, DateTime, NaiveDateTime, Time] do
case apply(module, :from_iso8601, [str]) do
{:ok, value, _offset} -> {:ok, value}
{:ok, value} -> {:ok, value}
{:error, _} -> {:error, %{to: module, value: str}}
end
end
def cast(str, :struct, Decimal, _schema) do
# silence the xref warning
decimal = Decimal
{:ok, decimal.new(str)}
rescue
_ -> {:error, %{to: Decimal, value: str}}
end
def cast("true", :boolean, _module, _schema), do: {:ok, true}
def cast("false", :boolean, _module, _schema), do: {:ok, false}
def cast(str, type, module, _schema) do
case type do
:atom ->
case to_existing_atom(str) do
nil -> {:error, %{to: :atom, value: str}}
atom -> {:ok, atom}
end
:float ->
to_float(str, :float)
:integer ->
to_integer(str, :integer)
:number ->
case String.contains?(str, ".") do
true -> to_float(str, :number)
false -> to_integer(str, :number)
end
:string ->
{:ok, str}
:struct ->
{:error, %{to: module(module), value: str}}
_ ->
{:error, %{to: type, value: str}}
end
end
end
defimpl Xema.Castable, for: Date do
use Xema.Castable.Helper
def cast(date, :struct, Date, _schema), do: {:ok, date}
def cast(date, :struct, module, _schema), do: {:error, %{to: module(module), value: date}}
def cast(date, type, _module, _schema), do: {:error, %{to: type, value: date}}
end
defimpl Xema.Castable, for: DateTime do
use Xema.Castable.Helper
def cast(date_time, :struct, DateTime, _schema), do: {:ok, date_time}
def cast(date_time, :struct, module, _schema),
do: {:error, %{to: module(module), value: date_time}}
def cast(date_time, type, _module, _schema), do: {:error, %{to: type, value: date_time}}
end
defimpl Xema.Castable, for: Decimal do
use Xema.Castable.Helper
def cast(decimal, :struct, Decimal, _schema), do: {:ok, decimal}
def cast(decimal, :struct, module, _schema), do: {:error, %{to: module(module), value: decimal}}
def cast(decimal, type, _module, _schema), do: {:error, %{to: type, value: decimal}}
end
defimpl Xema.Castable, for: Float do
use Xema.Castable.Helper
def cast(float, :struct, Decimal, _schema) do
# silence the xref warning
decimal = Decimal
{:ok, decimal.from_float(float)}
end
def cast(float, type, module, _schema) do
case type do
:float ->
{:ok, float}
:number ->
{:ok, float}
:string ->
{:ok, to_string(float)}
:struct ->
{:error, %{to: module(module), value: float}}
_ ->
{:error, %{to: type, value: float}}
end
end
end
defimpl Xema.Castable, for: Integer do
use Xema.Castable.Helper
def cast(int, :struct, Decimal, _schema) do
# silence the xref warning
decimal = Decimal
{:ok, decimal.new(int)}
end
def cast(int, type, module, _schema) do
case type do
:integer ->
{:ok, int}
:number ->
{:ok, int}
:string ->
{:ok, to_string(int)}
:float ->
{:ok, int * 1.0}
:struct ->
{:error, %{to: module(module), value: int}}
_ ->
{:error, %{to: type, value: int}}
end
end
end
defimpl Xema.Castable, for: List do
use Xema.Castable.Helper
def cast([], type, module, _schema) do
case type do
:keyword ->
{:ok, []}
:map ->
{:ok, %{}}
:list ->
{:ok, []}
:tuple ->
{:ok, {}}
:struct ->
{:error, %{to: module(module), value: []}}
_ ->
{:error, %{to: type, value: []}}
end
end
def cast(list, :map, _module, %Schema{keys: :strings}) do
with :ok <- check_keyword(list, :map) do
{:ok, Enum.into(list, %{}, fn {key, value} -> {to_string(key), value} end)}
end
end
def cast(list, :map, _module, _schema) do
with :ok <- check_keyword(list, :map) do
{:ok, Enum.into(list, %{}, & &1)}
end
end
def cast(list, type, module, _schema) do
case type do
:keyword ->
{:ok, list}
:struct ->
case module do
nil ->
{:error, %{to: :struct, value: list}}
module ->
case Keyword.keyword?(list) do
true -> to_struct(module, list)
false -> {:error, %{to: module, value: list}}
end
end
:tuple ->
case Keyword.keyword?(list) do
true -> {:error, %{to: :tuple, value: list}}
false -> {:ok, List.to_tuple(list)}
end
:list ->
{:ok, list}
_ ->
{:error, %{to: type, value: list}}
end
end
end
defimpl Xema.Castable, for: Map do
use Xema.Castable.Helper
def cast(map, :struct, nil, _schema), do: {:ok, map}
def cast(map, :struct, module, _schema) do
with {:ok, fields} <- fields(map), do: to_struct(module, fields)
end
def cast(map, :keyword, _module, _schema) do
map
|> Enum.reduce_while({:ok, []}, fn {key, value}, {:ok, acc} ->
case cast_key(key, :atoms) do
{:ok, key} ->
if Keyword.has_key?(acc, key) do
{:halt, {:error, %{to: :keyword, key: {:ambiguous, key}}}}
else
{:cont, {:ok, [{key, value} | acc]}}
end
:error ->
{:halt, {:error, %{to: :keyword, key: key}}}
end
end)
|> case do
{:ok, list} -> {:ok, Enum.reverse(list)}
error -> error
end
end
def cast(map, :map, _module, %Schema{keys: keys}) do
Enum.reduce_while(map, {:ok, %{}}, fn {key, value}, {:ok, acc} ->
case cast_key(key, keys) do
{:ok, key} ->
if Map.has_key?(acc, key) do
{:halt, {:error, %{to: :keyword, key: {:ambiguous, key}}}}
else
{:cont, {:ok, Map.put(acc, key, value)}}
end
:error ->
{:halt, {:error, %{to: :map, key: key}}}
end
end)
end
def cast(map, type, _module, _schema),
do: {:error, %{to: type, value: map}}
end
defimpl Xema.Castable, for: NaiveDateTime do
use Xema.Castable.Helper
def cast(date_time, :struct, NaiveDateTime, _schema), do: {:ok, date_time}
def cast(date_time, :struct, module, _schema),
do: {:error, %{to: module(module), value: date_time}}
def cast(date_time, type, _module, _schema), do: {:error, %{to: type, value: date_time}}
end
defimpl Xema.Castable, for: Time do
use Xema.Castable.Helper
def cast(time, :struct, Time, _schema), do: {:ok, time}
def cast(time, :struct, module, _schema),
do: {:error, %{to: module(module), value: time}}
def cast(time, type, _module, _schema), do: {:error, %{to: type, value: time}}
end
defimpl Xema.Castable, for: Tuple do
use Xema.Castable.Helper
def cast(tuple, type, module, _schema) do
case type do
:tuple ->
{:ok, tuple}
:list ->
{:ok, Tuple.to_list(tuple)}
:struct ->
{:error, %{to: module(module), value: tuple}}
_ ->
{:error, %{to: type, value: tuple}}
end
end
end
|
lib/xema/castable.ex
| 0.884177 | 0.521288 |
castable.ex
|
starcoder
|
defmodule Calamity.Stack do
@moduledoc """
The state object of `Calamity`.
This object contains all the stores and metadata necessary for command dispatch.
"""
defstruct aggregate_store: %{},
aggregate_versions: %{},
event_store: %Calamity.EventStore.ListEventStore{},
process_manager_mods: [],
process_manager_store: %{},
process_manager_versions: %{}
@doc """
Use a certain aggregate store the stack.
Aggregates will be caught up with the event store the first time a command is addressed to them.
If your aggregates have a certain state, then you should pass in their versions as a second argument to make sure that the aggregate will not be sent events it has already seen before.
"""
def put_aggregate_store(%__MODULE__{} = stack, aggregate_store, aggregate_versions \\ %{}),
do: %{stack | aggregate_store: aggregate_store, aggregate_versions: aggregate_versions}
@doc """
Use a certain event store in the stack.
"""
def put_event_store(%__MODULE__{} = stack, event_store),
do: %{stack | event_store: event_store}
@doc """
Use a certain process manager store in the stack.
Process managers will be caught up the first time they are interested in an event.
If your process manage store contains process managers with a previous state,
then you should pass in their versions as a second argument to make sure that the process manager will not see events that it has already seen before.
"""
def put_process_manager_store(
%__MODULE__{} = stack,
process_manager_store,
process_manager_versions \\ %{}
),
do: %{
stack
| process_manager_store: process_manager_store,
process_manager_versions: process_manager_versions
}
@doc """
Add a process manager to the stack.
"""
def add_process_manager_module(%__MODULE__{} = stack, process_manager_module),
do: %{stack | process_manager_mods: [process_manager_module | stack.process_manager_mods]}
end
|
lib/calamity/stack.ex
| 0.836388 | 0.408867 |
stack.ex
|
starcoder
|
defmodule Rayray.Renderings.Cube do
alias Rayray.Camera
alias Rayray.Canvas
alias Rayray.Lights
alias Rayray.Material
alias Rayray.Matrix
alias Rayray.Plane
alias Rayray.Cube
alias Rayray.Transformations
alias Rayray.Tuple
alias Rayray.World
def do_it() do
floor = Plane.new()
floor_material = Material.new()
floor_material = %{floor_material | color: Tuple.color(1, 0.9, 0.9), specular: 0}
floor = %{floor | material: floor_material}
middle = Cube.new()
middle_material = Material.new()
middle_material = %{
middle_material
| color: Tuple.color(0.1, 1, 0.5),
diffuse: 0.7,
specular: 0.3
}
middle = %{middle | transform: Matrix.translation(-0.5, 1, 0.5), material: middle_material}
# right
right = Cube.new()
right_material = Material.new()
right_material = %{
right_material
| color: Tuple.color(0.5, 1, 0.1),
diffuse: 0.7,
specular: 0.3
}
right = %{
right
| transform:
Matrix.multiply(Matrix.translation(1.5, 0.5, -0.5), Matrix.scaling(0.5, 0.5, 0.5)),
material: right_material
}
# left
left = Cube.new()
left_material = Material.new()
left_material = %{
left_material
| color: Tuple.color(1, 0.8, 0.1),
diffuse: 0.7,
specular: 0.3
}
left = %{
left
| transform:
Matrix.multiply(Matrix.translation(-1.5, 0.33, -0.75), Matrix.scaling(0.33, 0.33, 0.33)),
material: left_material
}
world = World.new()
world = %{
world
| light: Lights.point_light(Tuple.point(-10, 10, -10), Tuple.color(1, 1, 1)),
objects: [floor, middle, right, left]
}
camera = Camera.new(1200, 1200, :math.pi() / 3)
camera = %{
camera
| transform:
Transformations.view_transform(
Tuple.point(0, 1.5, -5),
Tuple.point(0, 1, 0),
Tuple.vector(0, 1, 0)
)
}
IO.puts("started rendering")
canvas = Camera.render(camera, world)
IO.puts("done rendering")
ppm = Canvas.canvas_to_ppm(canvas)
IO.puts("Done ppm")
File.write!("cube_1200x1200.ppm", ppm)
end
end
|
lib/rayray/renderings/cube.ex
| 0.815783 | 0.433622 |
cube.ex
|
starcoder
|
defmodule JSONRPC2.Clients.HTTP do
@moduledoc """
A client for JSON-RPC 2.0 using an HTTP transport with JSON in the body.
"""
@default_headers [{"content-type", "application/json"}]
@type batch_result :: {:ok, JSONRPC2.Response.id_and_response()} | {:error, any}
@doc """
Make a call to `url` for JSON-RPC 2.0 `method` with `params`.
You can also pass `headers`, `http_method`, `hackney_opts` to customize the options for
hackney, and `request_id` for a custom JSON-RPC 2.0 request ID.
See [hackney](https://github.com/benoitc/hackney) for more information on the available options.
"""
@spec call(String.t(), JSONRPC2.method(), JSONRPC2.params(), any, atom, list, JSONRPC2.id()) ::
{:ok, any} | {:error, any}
def call(
url,
method,
params,
headers \\ @default_headers,
http_method \\ :post,
hackney_opts \\ [],
request_id \\ "0"
) do
serializer = Application.get_env(:jsonrpc2, :serializer)
{:ok, payload} = JSONRPC2.Request.serialized_request({method, params, request_id}, serializer)
response = :hackney.request(http_method, url, headers, payload, hackney_opts)
with(
{:ok, 200, _headers, body_ref} <- response,
{:ok, body} <- :hackney.body(body_ref),
{:ok, {_id, result}} <- JSONRPC2.Response.deserialize_response(body, serializer)
) do
result
else
{:ok, status_code, headers, body_ref} ->
{:error, {:http_request_failed, status_code, headers, :hackney.body(body_ref)}}
{:ok, status_code, headers} ->
{:error, {:http_request_failed, status_code, headers}}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Notify via `url` for JSON-RPC 2.0 `method` with `params`.
You can also pass `headers`, `http_method`, and `hackney_opts` to customize the options for
hackney.
See [hackney](https://github.com/benoitc/hackney) for more information on the available options.
"""
@spec notify(String.t(), JSONRPC2.method(), JSONRPC2.params(), any, atom, list) :: :ok | {:error, any}
def notify(url, method, params, headers \\ @default_headers, http_method \\ :post, hackney_opts \\ []) do
serializer = Application.get_env(:jsonrpc2, :serializer)
{:ok, payload} = JSONRPC2.Request.serialized_request({method, params}, serializer)
case :hackney.request(http_method, url, headers, payload, hackney_opts) do
{:ok, 200, _headers, _body_ref} -> :ok
{:ok, 200, _headers} -> :ok
{:error, reason} -> {:error, reason}
end
end
@doc """
Make a batch request via `url` for JSON-RPC 2.0 `requests`.
You can also pass `headers`, `http_method`, and `hackney_opts` to customize the options for
hackney.
See [hackney](https://github.com/benoitc/hackney) for more information on the available options.
"""
@spec batch(String.t(), [JSONRPC2.Request.request()], any, atom, list) ::
[batch_result] | :ok | {:error, any}
def batch(url, requests, headers \\ @default_headers, http_method \\ :post, hackney_opts \\ []) do
serializer = Application.get_env(:jsonrpc2, :serializer)
{:ok, payload} =
Enum.map(requests, &JSONRPC2.Request.request/1)
|> serializer.encode()
response = :hackney.request(http_method, url, headers, payload, hackney_opts)
with(
{:ok, 200, _headers, body_ref} <- response,
{:ok, body} <- :hackney.body(body_ref),
{:ok, deserialized_body} <- serializer.decode(body)
) do
process_batch(deserialized_body)
else
{:ok, status_code, headers, body_ref} ->
{:error, {:http_request_failed, status_code, headers, :hackney.body(body_ref)}}
{:ok, 200, _headers} ->
:ok
{:ok, status_code, headers} ->
{:error, {:http_request_failed, status_code, headers}}
{:error, reason} ->
{:error, reason}
end
end
defp process_batch(responses) when is_list(responses) do
Enum.map(responses, &JSONRPC2.Response.id_and_response/1)
end
defp process_batch(response) do
JSONRPC2.Response.id_and_response(response)
end
end
|
lib/jsonrpc2/clients/http.ex
| 0.838084 | 0.413596 |
http.ex
|
starcoder
|
defmodule Monet.Transaction do
@moduledoc """
Created via `Monet.transaction/1` or `Monet.transaction/2`.
"""
require Record
alias Monet.{Connection, Error, Prepared, Reader, Writer}
Record.defrecord(:transaction, conn: nil, ref: nil, pool_name: nil)
def new(conn) do
transaction(
conn: conn,
ref: make_ref(),
pool_name: Connection.pool_name(conn)
)
end
@doc """
Executes the query using the specific transaction
"""
def query(tx, name_or_sql, args \\ nil)
def query(tx, name, args) when is_atom(name) do
ref = transaction(tx, :ref)
pool_name = transaction(tx, :pool_name)
case :ets.lookup(pool_name, {ref, name}) do
[{_, prepared}] -> Prepared.exec(prepared, args)
_ -> {:error, Error.new(:driver, "unknown prepared statement", name)}
end
end
def query(tx, sql, args) do
# Connection.query returns {result, conn} for the pool
# we only care about the result
tx
|> transaction(:conn)
|> Connection.query(sql, args)
|> elem(0)
end
@doc """
Commits the transaction
"""
def commit(tx) do
conn = transaction(tx, :conn)
with :ok <- Writer.query(conn, "commit"),
{:ok, "&4 t\n"} <- Reader.message(conn) # make sure auto-commit is turned back on
do
:ok
else
{:ok, data} -> {:error, Error.new(:driver, "invalid commit response", data)}
err -> err
end
end
@doc """
Rollsback the transaction
"""
def rollback(tx) do
conn = transaction(tx, :conn)
with :ok <- Writer.query(conn, "rollback"),
{:ok, "&4 t\n"} <- Reader.message(conn)
do
:ok
else
{:ok, data} -> {:error, Error.new(:driver, "invalid rollback response", data)}
err -> err
end
end
@doc """
Prepares the statement and stores it in the transaction cache. See
`Monet.prepare/3`.
"""
def prepare(tx, name, sql) do
with {:ok, prepared} <- Prepared.new(transaction(tx, :conn), sql) do
ref = transaction(tx, :ref)
pool_name = transaction(tx, :pool_name)
:ets.insert(pool_name, {{ref, name}, prepared})
:ok
end
end
@doc """
Deallocates any prepared statements that were allocated as part of this
transaction
"""
def close(tx) do
ref = transaction(tx, :ref)
pool_name = transaction(tx, :pool_name)
Enum.each(:ets.match(pool_name, {{ref, :_}, :'$1'}), fn [prepared] ->
Prepared.close(prepared)
end)
:ets.match_delete(pool_name, {{ref, :_}, :'$1'})
end
end
|
lib/transaction.ex
| 0.687945 | 0.403302 |
transaction.ex
|
starcoder
|
defmodule AWS.SSOAdmin do
@moduledoc """
Amazon Web Services Single Sign On (SSO) is a cloud SSO service that makes it
easy to centrally manage SSO access to multiple Amazon Web Services accounts and
business applications.
This guide provides information on SSO operations which could be used for access
management of Amazon Web Services accounts. For information about Amazon Web
Services SSO features, see the [Amazon Web Services Single Sign-On User Guide](https://docs.aws.amazon.com/singlesignon/latest/userguide/what-is.html).
Many operations in the SSO APIs rely on identifiers for users and groups, known
as principals. For more information about how to work with principals and
principal IDs in Amazon Web Services SSO, see the [Amazon Web Services SSO Identity Store API
Reference](https://docs.aws.amazon.com/singlesignon/latest/IdentityStoreAPIReference/welcome.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "SSO Admin",
api_version: "2020-07-20",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "sso",
global?: false,
protocol: "json",
service_id: "SSO Admin",
signature_version: "v4",
signing_name: "sso",
target_prefix: "SWBExternalService"
}
end
@doc """
Attaches an IAM managed policy ARN to a permission set.
If the permission set is already referenced by one or more account assignments,
you will need to call ` `ProvisionPermissionSet` ` after this operation. Calling
`ProvisionPermissionSet` applies the corresponding IAM policy updates to all
assigned accounts.
"""
def attach_managed_policy_to_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AttachManagedPolicyToPermissionSet", input, options)
end
@doc """
Assigns access to a principal for a specified Amazon Web Services account using
a specified permission set.
The term *principal* here refers to a user or group that is defined in Amazon
Web Services SSO.
As part of a successful `CreateAccountAssignment` call, the specified permission
set will automatically be provisioned to the account in the form of an IAM
policy. That policy is attached to the SSO-created IAM role. If the permission
set is subsequently updated, the corresponding IAM policies attached to roles in
your accounts will not be updated automatically. In this case, you must call `
`ProvisionPermissionSet` ` to make these updates.
"""
def create_account_assignment(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAccountAssignment", input, options)
end
@doc """
Enables the attributes-based access control (ABAC) feature for the specified
Amazon Web Services SSO instance.
You can also specify new attributes to add to your ABAC configuration during the
enabling process. For more information about ABAC, see [Attribute-Based Access Control](/singlesignon/latest/userguide/abac.html) in the *Amazon Web Services
SSO User Guide*.
"""
def create_instance_access_control_attribute_configuration(
%Client{} = client,
input,
options \\ []
) do
Request.request_post(
client,
metadata(),
"CreateInstanceAccessControlAttributeConfiguration",
input,
options
)
end
@doc """
Creates a permission set within a specified SSO instance.
To grant users and groups access to Amazon Web Services account resources, use `
`CreateAccountAssignment` `.
"""
def create_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePermissionSet", input, options)
end
@doc """
Deletes a principal's access from a specified Amazon Web Services account using
a specified permission set.
"""
def delete_account_assignment(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAccountAssignment", input, options)
end
@doc """
Deletes the inline policy from a specified permission set.
"""
def delete_inline_policy_from_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DeleteInlinePolicyFromPermissionSet",
input,
options
)
end
@doc """
Disables the attributes-based access control (ABAC) feature for the specified
Amazon Web Services SSO instance and deletes all of the attribute mappings that
have been configured.
Once deleted, any attributes that are received from an identity source and any
custom attributes you have previously configured will not be passed. For more
information about ABAC, see [Attribute-Based Access Control](/singlesignon/latest/userguide/abac.html) in the *Amazon Web Services
SSO User Guide*.
"""
def delete_instance_access_control_attribute_configuration(
%Client{} = client,
input,
options \\ []
) do
Request.request_post(
client,
metadata(),
"DeleteInstanceAccessControlAttributeConfiguration",
input,
options
)
end
@doc """
Deletes the specified permission set.
"""
def delete_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePermissionSet", input, options)
end
@doc """
Describes the status of the assignment creation request.
"""
def describe_account_assignment_creation_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeAccountAssignmentCreationStatus",
input,
options
)
end
@doc """
Describes the status of the assignment deletion request.
"""
def describe_account_assignment_deletion_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeAccountAssignmentDeletionStatus",
input,
options
)
end
@doc """
Returns the list of Amazon Web Services SSO identity store attributes that have
been configured to work with attributes-based access control (ABAC) for the
specified Amazon Web Services SSO instance.
This will not return attributes configured and sent by an external identity
provider. For more information about ABAC, see [Attribute-Based Access Control](/singlesignon/latest/userguide/abac.html) in the *Amazon Web Services
SSO User Guide*.
"""
def describe_instance_access_control_attribute_configuration(
%Client{} = client,
input,
options \\ []
) do
Request.request_post(
client,
metadata(),
"DescribeInstanceAccessControlAttributeConfiguration",
input,
options
)
end
@doc """
Gets the details of the permission set.
"""
def describe_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePermissionSet", input, options)
end
@doc """
Describes the status for the given permission set provisioning request.
"""
def describe_permission_set_provisioning_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribePermissionSetProvisioningStatus",
input,
options
)
end
@doc """
Detaches the attached IAM managed policy ARN from the specified permission set.
"""
def detach_managed_policy_from_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DetachManagedPolicyFromPermissionSet",
input,
options
)
end
@doc """
Obtains the inline policy assigned to the permission set.
"""
def get_inline_policy_for_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetInlinePolicyForPermissionSet", input, options)
end
@doc """
Lists the status of the Amazon Web Services account assignment creation requests
for a specified SSO instance.
"""
def list_account_assignment_creation_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListAccountAssignmentCreationStatus",
input,
options
)
end
@doc """
Lists the status of the Amazon Web Services account assignment deletion requests
for a specified SSO instance.
"""
def list_account_assignment_deletion_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListAccountAssignmentDeletionStatus",
input,
options
)
end
@doc """
Lists the assignee of the specified Amazon Web Services account with the
specified permission set.
"""
def list_account_assignments(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAccountAssignments", input, options)
end
@doc """
Lists all the Amazon Web Services accounts where the specified permission set is
provisioned.
"""
def list_accounts_for_provisioned_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListAccountsForProvisionedPermissionSet",
input,
options
)
end
@doc """
Lists the SSO instances that the caller has access to.
"""
def list_instances(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListInstances", input, options)
end
@doc """
Lists the IAM managed policy that is attached to a specified permission set.
"""
def list_managed_policies_in_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListManagedPoliciesInPermissionSet", input, options)
end
@doc """
Lists the status of the permission set provisioning requests for a specified SSO
instance.
"""
def list_permission_set_provisioning_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListPermissionSetProvisioningStatus",
input,
options
)
end
@doc """
Lists the `PermissionSet`s in an SSO instance.
"""
def list_permission_sets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPermissionSets", input, options)
end
@doc """
Lists all the permission sets that are provisioned to a specified Amazon Web
Services account.
"""
def list_permission_sets_provisioned_to_account(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ListPermissionSetsProvisionedToAccount",
input,
options
)
end
@doc """
Lists the tags that are attached to a specified resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
The process by which a specified permission set is provisioned to the specified
target.
"""
def provision_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ProvisionPermissionSet", input, options)
end
@doc """
Attaches an IAM inline policy to a permission set.
If the permission set is already referenced by one or more account assignments,
you will need to call ` `ProvisionPermissionSet` ` after this action to apply
the corresponding IAM policy updates to all assigned accounts.
"""
def put_inline_policy_to_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutInlinePolicyToPermissionSet", input, options)
end
@doc """
Associates a set of tags with a specified resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Disassociates a set of tags from a specified resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates the Amazon Web Services SSO identity store attributes that you can use
with the Amazon Web Services SSO instance for attributes-based access control
(ABAC).
When using an external identity provider as an identity source, you can pass
attributes through the SAML assertion as an alternative to configuring
attributes from the Amazon Web Services SSO identity store. If a SAML assertion
passes any of these attributes, Amazon Web Services SSO replaces the attribute
value with the value from the Amazon Web Services SSO identity store. For more
information about ABAC, see [Attribute-Based Access Control](/singlesignon/latest/userguide/abac.html) in the *Amazon Web Services
SSO User Guide*.
"""
def update_instance_access_control_attribute_configuration(
%Client{} = client,
input,
options \\ []
) do
Request.request_post(
client,
metadata(),
"UpdateInstanceAccessControlAttributeConfiguration",
input,
options
)
end
@doc """
Updates an existing permission set.
"""
def update_permission_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdatePermissionSet", input, options)
end
end
|
lib/aws/generated/sso_admin.ex
| 0.895074 | 0.469338 |
sso_admin.ex
|
starcoder
|
defmodule Elasticlunr.Storage.S3 do
@moduledoc """
This provider writes to indexes to an s3 project. To use, you need
to include necessary s3 dependencies, see [repository](https://github.com/ex-aws/ex_aws_s3).
```elixir
config :elasticlunr,
storage: Elasticlunr.Storage.S3
config :elasticlunr, Elasticlunr.Storage.S3,
bucket: "elasticlunr",
access_key_id: "minioadmin",
secret_access_key: "minioadmin",
scheme: "http://", # optional
host: "127.0.0.1", # optional
port: 9000 # optional
```
"""
use Elasticlunr.Storage
alias Elasticlunr.S3.ClientImpl
alias Elasticlunr.{Index, Deserializer, Serializer}
alias Elasticlunr.Storage.Disk
@impl true
def load_all do
config(:bucket)
|> client_module().list_objects(config_all())
|> Stream.map(fn %{key: file} ->
name = Path.basename(file, ".index")
read(name)
end)
end
@impl true
def write(%Index{name: name} = index) do
bucket = config(:bucket)
object = "#{name}.index"
data = Serializer.serialize(index)
with path <- tmp_file("#{name}.index"),
:ok <- Disk.write_serialized_index_to_file(path, data),
{:ok, _} <- upload_object(bucket, object, path) do
File.rm(path)
end
end
@impl true
def read(name) do
bucket = config(:bucket)
object = "#{name}.index"
desirialize = fn path ->
File.stream!(path, ~w[compressed]a)
|> Deserializer.deserialize()
end
with path <- tmp_file("#{name}.index"),
{:ok, _} <- download_object(bucket, object, path),
%Index{} = index <- desirialize.(path),
:ok <- File.rm(path) do
index
end
end
@impl true
def delete(name) do
config(:bucket)
|> client_module().delete_object("#{name}.index", config_all())
|> case do
{:ok, _} ->
:ok
err ->
err
end
end
defp download_object(bucket, object, file) do
client_module().download_object(bucket, object, file, config_all())
end
defp upload_object(bucket, object, file) do
client_module().upload_object(bucket, object, file, config_all())
end
defp client_module, do: config(:client_module, ClientImpl)
defp tmp_file(file) do
Path.join(System.tmp_dir!(), file)
end
end
|
lib/storage/s3.ex
| 0.653459 | 0.645637 |
s3.ex
|
starcoder
|
defmodule BSV.KeyPair do
@moduledoc """
Module for generating and using Bitcoin key pairs.
Bitcoin keys are ECDSA keys. Virtually any 256-bit number is a valid private
key, and the corresponding point on the `secp256k1` curve is the public key.
"""
alias BSV.Crypto.ECDSA
alias BSV.Crypto.ECDSA.{PublicKey, PrivateKey}
defstruct network: :main, public_key: nil, private_key: nil
@typedoc "BSV Key Pair"
@type t :: %__MODULE__{
public_key: binary,
private_key: binary
}
@doc """
Generates a new BSV key pair.
## Options
The accepted options are:
* `:compressed` - Specify whether to compress the generated public key. Defaults to `true`.
## Examples
iex> keypair = BSV.KeyPair.generate
...> keypair.__struct__ == BSV.KeyPair
true
"""
@spec generate(keyword) :: __MODULE__.t
def generate(options \\ []) do
ECDSA.generate_key
|> from_ecdsa_key(options)
end
@doc """
Converts ECDSA keys to a BSV key pair.
## Options
The accepted options are:
* `:compressed` - Specify whether to compress the given public key. Defaults to `true`.
## Examples
iex> keypair = BSV.KeyPair.from_ecdsa_key(BSV.Test.bsv_keys)
...> keypair.__struct__ == BSV.KeyPair
true
"""
@spec from_ecdsa_key(PrivateKey.t | {binary, binary}, keyword) :: __MODULE__.t
def from_ecdsa_key(key, options \\ [])
def from_ecdsa_key({public_key, private_key}, options) do
network = Keyword.get(options, :network, :main)
public_key = case Keyword.get(options, :compressed, true) do
true -> PublicKey.compress(public_key)
false -> public_key
end
struct(__MODULE__, [
network: network,
public_key: public_key,
private_key: private_key
])
end
def from_ecdsa_key(key, options) do
from_ecdsa_key({key.public_key, key.private_key}, options)
end
@doc """
Decodes the given Wallet Import Format (WIF) binary into a BSV key pair.
## Examples
iex> BSV.KeyPair.wif_decode("<KEY>")
...> |> BSV.Address.from_public_key
...> |> BSV.Address.to_string
"18cqNbEBxkAttxcZLuH9LWhZJPd1BNu1A5"
iex> BSV.KeyPair.wif_decode("<KEY>")
...> |> BSV.Address.from_public_key
...> |> BSV.Address.to_string
"<KEY>"
"""
@spec wif_decode(binary) :: __MODULE__.t
def wif_decode(wif) do
{private_key, compressed} = case B58.decode58_check!(wif) do
{<<private_key::binary-32, 1>>, <<0x80>>} -> {private_key, true}
{<<private_key::binary-32>>, <<0x80>>} -> {private_key, false}
end
ECDSA.generate_key_pair(private_key: private_key)
|> from_ecdsa_key(compressed: compressed)
end
@doc """
Encodes the given BSV key pair into a Wallet Import Format (WIF) binary.
## Examples
iex> BSV.Crypto.ECDSA.PrivateKey.from_sequence(BSV.Test.ecdsa_key)
...> |> BSV.KeyPair.from_ecdsa_key
...> |> BSV.KeyPair.wif_encode
"<KEY>"
iex> BSV.Crypto.ECDSA.PrivateKey.from_sequence(BSV.Test.ecdsa_key)
...> |> BSV.KeyPair.from_ecdsa_key(compressed: false)
...> |> BSV.KeyPair.wif_encode
"<KEY>"
"""
def wif_encode(key = %__MODULE__{}) do
suffix = case byte_size(key.public_key) do
33 -> <<0x01>>
_ -> ""
end
(key.private_key <> suffix)
|> B58.encode58_check!(<<0x80>>)
end
end
|
lib/bsv/key_pair.ex
| 0.860721 | 0.453201 |
key_pair.ex
|
starcoder
|
defmodule Tradehub.Wallet do
@moduledoc """
This module aims to signing, generating, and interacting with a Tradehub account.
"""
require Logger
import Tradehub.Raising
alias Tradehub.ExtendedKey
@network Application.get_env(:tradehub, :network, :testnet)
@typedoc "The wallet address"
@type address :: String.t()
@typedoc "The public key of a wallet"
@type public_key :: String.t() | bitstring()
@typedoc "The private key of a wallet"
@type private_key :: String.t() | bitstring()
@typedoc "The mnemonic used to generate a wallet"
@type mnemonic :: String.t()
@typedoc "The network of the wallet"
@type network :: :mainnet | :testnet
@typedoc "Signing message"
@type signing_message :: map()
@typedoc "Tradehub Wallet"
@type t :: %__MODULE__{
mnemonic: mnemonic(),
private_key: private_key(),
public_key: public_key(),
address: address(),
network: atom()
}
defstruct mnemonic: "",
private_key: <<>>,
public_key: <<>>,
address: "",
network: @network
@doc """
Look for the private key based on the given mnemonic phrase.
## Examples
iex> Tradehub.Wallet.private_key_from_mnemonic("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito")
{:ok, <<21, 31, 133, 212, 19, 88, 245, 109, 20, 190, 196, 132, 108, 83, 112, 163, 174, 79, 52, 222, 203, 167, 29, 72, 254, 172, 117, 236, 191, 108, 140, 161>>}
iex> Tradehub.Wallet.private_key_from_mnemonic("clumb twenty either puppy thank liquid vital rigid tide tragic flash elevator")
{:error, "Invalid mnemonic"}
iex> Tradehub.Wallet.private_key_from_mnemonic!("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito")
<<21, 31, 133, 212, 19, 88, 245, 109, 20, 190, 196, 132, 108, 83, 112, 163, 174, 79, 52, 222, 203, 167, 29, 72, 254, 172, 117, 236, 191, 108, 140, 161>>
"""
@spec private_key_from_mnemonic(mnemonic()) :: {:ok, private_key()} | {:error, String.t()}
@spec private_key_from_mnemonic!(mnemonic()) :: private_key()
def private_key_from_mnemonic(mnemonic) do
case Tradehub.Mnemonic.validate_mnemonic(mnemonic) do
true ->
private_key =
mnemonic
|> Tradehub.Mnemonic.mnemonic_to_seed()
|> String.upcase()
|> Base.decode16!()
|> ExtendedKey.master()
|> ExtendedKey.derive_path("m/44'/118'/0'/0/0")
|> Map.get(:key)
{:ok, private_key}
false ->
{:error, "Invalid mnemonic"}
end
end
raising(:private_key_from_mnemonic, mnemonic)
@doc """
Look for the public key based on the given mnemonic phrase.
## Examples
iex> Tradehub.Wallet.public_key_from_mnemonic("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito")
{:ok, <<2, 230, 25, 59, 87, 182, 114, 223, 41, 153, 127, 228, 149, 215, 139, 79, 211, 234, 174, 157, 170, 224, 165, 226, 128, 49, 41, 226, 194, 27, 80, 78, 35>>}
iex> Tradehub.Wallet.public_key_from_mnemonic!("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito")
<<2, 230, 25, 59, 87, 182, 114, 223, 41, 153, 127, 228, 149, 215, 139, 79, 211, 234, 174, 157, 170, 224, 165, 226, 128, 49, 41, 226, 194, 27, 80, 78, 35>>
"""
@spec public_key_from_mnemonic(mnemonic()) :: {:ok, public_key()} | {:error, String.t()}
@spec public_key_from_mnemonic!(mnemonic()) :: public_key()
def public_key_from_mnemonic(mnemonic) do
result = private_key_from_mnemonic(mnemonic)
case result do
{:ok, private_key} ->
:libsecp256k1.ec_pubkey_create(private_key, :compressed)
_ ->
result
end
end
raising(:public_key_from_mnemonic, mnemonic)
@doc """
Look for the public key based on the given private key phrase.
## Examples
iex> Tradehub.Wallet.public_key_from_private_key("151f85d41358f56d14bec4846c5370a3ae4f34decba71d48feac75ecbf6c8ca1")
{:ok, <<2, 230, 25, 59, 87, 182, 114, 223, 41, 153, 127, 228, 149, 215, 139, 79, 211, 234, 174, 157, 170, 224, 165, 226, 128, 49, 41, 226, 194, 27, 80, 78, 35>>}
iex> Tradehub.Wallet.public_key_from_private_key!("151f85d41358f56d14bec4846c5370a3ae4f34decba71d48feac75ecbf6c8ca1")
<<2, 230, 25, 59, 87, 182, 114, 223, 41, 153, 127, 228, 149, 215, 139, 79, 211, 234, 174, 157, 170, 224, 165, 226, 128, 49, 41, 226, 194, 27, 80, 78, 35>>
"""
@spec public_key_from_private_key(private_key()) ::
{:ok, public_key()} | {:error, String.t()}
@spec public_key_from_private_key!(private_key()) :: public_key()
def public_key_from_private_key(private_key) do
{:ok, private_key} = normalize_hex_string(private_key)
case :libsecp256k1.ec_pubkey_create(private_key, :compressed) do
{:error, _} -> {:error, "Private key invalid"}
other -> other
end
end
raising(:public_key_from_private_key, private_key)
@doc """
Look for the wallet address based on the given mnemonic phrase within a network.
## Examples
iex> Tradehub.Wallet.address_from_mnemonic("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito", :testnet)
{:ok, "tswth174cz08dmgluavwcz2suztvydlptp4a8f8t5h4t"}
iex> Tradehub.Wallet.address_from_mnemonic("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito", :mainnet)
{:ok, "swth174cz08dmgluavwcz2suztvydlptp4a8fru98vw"}
iex> Tradehub.Wallet.address_from_mnemonic!("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito", :mainnet)
"swth174cz08dmgluavwcz2suztvydlptp4a8fru98vw"
iex> Tradehub.Wallet.address_from_mnemonic("wrost coyote fuel wet evil tag shoot yellow morning history visit mosquito")
{:error, "Invalid mnemonic"}
"""
@spec address_from_mnemonic(mnemonic(), network()) :: {:ok, address()} | {:error, String.t()}
@spec address_from_mnemonic!(mnemonic(), network()) :: address()
def address_from_mnemonic(mnemonic, network \\ @network) do
result = public_key_from_mnemonic(mnemonic)
case result do
{:ok, public_key} ->
sha = :crypto.hash(:sha256, public_key)
rip = :crypto.hash(:ripemd160, sha)
prefix =
case network do
:mainnet -> "swth"
:testnet -> "tswth"
other -> Atom.to_string(other)
end
{:ok, Bech32.encode_from_5bit(prefix, Bech32.convertbits(rip, 8, 5, false))}
_ ->
result
end
end
raising(:address_from_mnemonic, mnemonic)
raising(:address_from_mnemonic, mnemonic, network)
@doc """
Look for the wallet address based on the given public key within a network.
## Examples
iex> Tradehub.Wallet.address_from_private_key("151f85d41358f56d14bec4846c5370a3ae4f34decba71d48feac75ecbf6c8ca1", :testnet)
{:ok, "tswth174cz08dmgluavwcz2suztvydlptp4a8f8t5h4t"}
iex> Tradehub.Wallet.address_from_private_key!("151f85d41358f56d14bec4846c5370a3ae4f34decba71d48feac75ecbf6c8ca1", :mainnet)
"swth174cz08dmgluavwcz2suztvydlptp4a8fru98vw"
"""
@spec address_from_private_key(private_key()) :: {:ok, address()} | {:error, String.t()}
@spec address_from_private_key!(private_key()) :: address()
def address_from_private_key(private_key, network \\ @network) do
{:ok, private_key} = normalize_hex_string(private_key)
{:ok, public_key} = public_key_from_private_key(private_key)
address_from_public_key(public_key, network)
end
raising(:address_from_private_key, private_key)
raising(:address_from_private_key, private_key, network)
@doc """
Look for the wallet address of based on the given public key within a network.
## Examples
iex> Tradehub.Wallet.address_from_public_key("<KEY>", :testnet)
{:ok, "tswth174cz08dmgluavwcz2suztvydlptp4a8f8t5h4t"}
iex> Tradehub.Wallet.address_from_public_key("<KEY>", :mainnet)
{:ok, "swth174cz08dmgluavwcz2suztvydlptp4a8fru98vw"}
"""
@spec address_from_public_key(public_key(), network()) :: {:ok, address()} | {:error, String.t()}
def address_from_public_key(public_key, network \\ @network) do
case normalize_hex_string(public_key) do
{:ok, public_key} ->
sha = :crypto.hash(:sha256, public_key)
rip = :crypto.hash(:ripemd160, sha)
prefix =
case network do
:mainnet -> "swth"
:testnet -> "tswth"
other -> Atom.to_string(other)
end
{:ok, Bech32.encode_from_5bit(prefix, Bech32.convertbits(rip, 8, 5, false))}
:error ->
{:error, "Public key invalid"}
end
end
@doc """
Generate a new Tradehub wallet
## Examples
iex> Tradehub.Wallet.create_wallet
iex> Tradehub.Wallet.create_wallet(:testnet)
"""
@spec create_wallet(network()) :: Tradehub.Wallet.t()
def create_wallet(network \\ @network) do
mnemonic = Tradehub.Mnemonic.generate(128)
private_key =
private_key_from_mnemonic!(mnemonic)
|> Base.encode16()
|> String.downcase()
public_key = public_key_from_private_key!(private_key)
address = address_from_private_key!(private_key, network)
%Tradehub.Wallet{
mnemonic: mnemonic,
private_key: private_key,
public_key: public_key,
address: address
}
end
@doc """
Open a wallet based on its private key.
## Examples
iex> {:ok, wallet} = Tradehub.Wallet.from_private_key("151f85d41358f56d14bec4846c5370a3ae4f34decba71d48feac75ecbf6c8ca1")
iex> wallet.address
"swth174cz08dmgluavwcz2suztvydlptp4a8fru98vw"
"""
@spec from_private_key(private_key(), network()) :: {:ok, Tradehub.Wallet.t()} | {:error, String.t()}
@spec from_private_key!(private_key(), network()) :: Tradehub.Wallet.t()
def from_private_key(private_key, network \\ @network) do
case public_key_from_private_key(private_key) do
{:ok, public_key} ->
{:ok, address} = address_from_private_key(private_key, network)
wallet = %__MODULE__{
private_key: private_key,
public_key: public_key,
address: address,
network: network
}
{:ok, wallet}
other ->
other
end
end
raising(:from_private_key, private_key)
raising(:from_private_key, private_key, network)
@doc """
Open a wallet based on its mnemonic.
## Examples
iex> {:ok, wallet} = Tradehub.Wallet.from_mnemonic("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito", :mainnet)
iex> wallet.address
"swth174cz08dmgluavwcz2suztvydlptp4a8fru98vw"
iex> {:ok, wallet} = Tradehub.Wallet.from_mnemonic("wrist coyote fuel wet evil tag shoot yellow morning history visit mosquito", :testnet)
iex> wallet.address
"tswth174cz08dmgluavwcz2suztvydlptp4a8f8t5h4t"
"""
@spec from_mnemonic(mnemonic(), network()) :: {:ok, Tradehub.Wallet.t()} | {:error, String.t()}
@spec from_mnemonic!(mnemonic(), network()) :: Tradehub.Wallet.t()
def from_mnemonic(mnemonic, network \\ @network) do
case private_key_from_mnemonic(mnemonic) do
{:ok, private_key} ->
from_private_key(private_key, network)
other ->
other
end
end
raising(:from_mnemonic, mnemonic)
raising(:from_mnemonic, mnemonic, network)
@doc """
Sign the given message by using a wallet private key, and verify the signed messaged by using the wallet public key.
Due to the nature of blockchain, the message will sign by the curve digital signature algorithm (ECDSA), with curve
is `secp256k1` and the hash algorithm is `sha256`.
## Examples
iex> wallet = Tradehub.Wallet.create_wallet()
iex> Tradehub.Wallet.sign(%{message: "hello world"}, wallet)
"""
@spec sign(signing_message(), t()) :: {:ok, String.t()} | {:error, String.t()}
def sign(message, wallet) do
message_with_correct_keys_order = encode_object_in_alphanumeric_key_order(message)
hash = :crypto.hash(:sha256, message_with_correct_keys_order)
result =
:libsecp256k1.ecdsa_sign_compact(
hash,
wallet.private_key,
:nonce_function_rfc6979,
<<>>
)
case result do
{:ok, sig, _} ->
{:ok, sig |> Base.encode64()}
other ->
other
end
end
@doc ~S"""
Encode a map to JSON with all of the keys in alphabetical order (nested included).
## Examples
iex> Tradehub.Wallet.encode_object_in_alphanumeric_key_order(%{b: 1, c: 2, a: 3, d: 4})
"{\"a\":3,\"b\":1,\"c\":2,\"d\":4}"
iex> Tradehub.Wallet.encode_object_in_alphanumeric_key_order(%{b: %{e: 1, f: 2}, c: 2, a: 3, d: 4})
"{\"a\":3,\"b\":{\"e\":1,\"f\":2},\"c\":2,\"d\":4}"
iex> Tradehub.Wallet.encode_object_in_alphanumeric_key_order("")
"\"\""
"""
@spec encode_object_in_alphanumeric_key_order(any) :: binary
def encode_object_in_alphanumeric_key_order(obj) when is_map(obj) do
az_keys = obj |> Map.keys() |> Enum.sort()
iodata = [
"{",
Enum.map(az_keys, fn k ->
v = obj[k]
[Jason.encode!(k), ":", encode_object_in_alphanumeric_key_order(v)]
end)
|> Enum.intersperse(","),
"}"
]
IO.iodata_to_binary(iodata)
end
def encode_object_in_alphanumeric_key_order(obj), do: Jason.encode!(obj)
## Private functions
defp normalize_hex_string(string) do
case String.valid?(string) do
true ->
string
|> String.upcase()
|> Base.decode16()
false ->
{:ok, string}
end
end
end
|
lib/tradehub/wallet.ex
| 0.873532 | 0.443781 |
wallet.ex
|
starcoder
|
defmodule Aja.IO do
@moduledoc ~S"""
Some extra helper functions for working with IO data,
that are not in the core `IO` module.
"""
# TODO: Link about cowboy/mint, benchmarks with Jason
# TODO bench then inline
@doc ~S"""
Checks if IO data is empty in "constant" time.
Should only need to loop until it finds one character or binary to stop,
unlike `IO.iodata_length(iodata) == 0` which needs to perform the complete loop
to compute the length first.
## Examples
iex> Aja.IO.iodata_empty?(["", []])
true
iex> Aja.IO.iodata_empty?('a')
false
iex> Aja.IO.iodata_empty?(["a"])
false
iex> Aja.IO.iodata_empty?(["", [], ["" | "c"]])
false
## Rationale
Even if `IO.iodata_length/1` is a very efficient BIF implemented in C, it has a linear
algorithmic complexity and can become slow if invoked on an IO list with many elements.
This is not a far-fetched scenario, and a production use case can easily include
"big" IO-lists with:
- JSON encoding to IO-data of long lists / nested objects
- loops within HTML templates
"""
@spec iodata_empty?(iodata) :: boolean
def iodata_empty?(iodata)
def iodata_empty?(binary) when is_binary(binary), do: binary === ""
def iodata_empty?([]), do: true
def iodata_empty?([head | _]) when is_integer(head), do: false
def iodata_empty?([head | rest]) do
# optimized `and`
case iodata_empty?(head) do
false -> false
_ -> iodata_empty?(rest)
end
end
@doc """
Converts the argument to IO data according to the `String.Chars` protocol.
Leaves lists untouched without any validation, calls `to_string/1` on everything else.
This is the function invoked in string interpolations within the [i sigil](`Aja.sigil_i/2`).
Works with both [IO data](https://hexdocs.pm/elixir/IO.html#module-io-data) and
[Chardata](https://hexdocs.pm/elixir/IO.html?#module-chardata),
depending on the type of the `data` parameter.
## Examples
iex> Aja.IO.to_iodata(:foo)
"foo"
iex> Aja.IO.to_iodata(99)
"99"
iex> Aja.IO.to_iodata(["abc", 'def' | "ghi"])
["abc", 'def' | "ghi"]
"""
@compile {:inline, to_iodata: 1}
@spec to_iodata(String.Chars.t() | iodata | IO.chardata()) :: iodata | IO.chardata()
def to_iodata(data) when is_list(data) or is_binary(data) do
data
end
def to_iodata(data) do
String.Chars.to_string(data)
end
end
|
lib/io.ex
| 0.523664 | 0.63114 |
io.ex
|
starcoder
|
defmodule Bs.Death do
alias Bs.GameState
alias Bs.World
alias Bs.Death.Collision
alias Bs.Point
alias Bs.Snake
alias __MODULE__
use Point
defstruct [:turn, :causes]
defmodule(Kill, do: defstruct([:turn, :with, :cause]))
defmodule(BodyCollisionCause, do: defstruct([:with]))
defmodule(Cause, do: defstruct([]))
defmodule(HeadCollisionCause, do: defstruct([:with]))
defmodule(SelfCollisionCause, do: defstruct([]))
defmodule(StarvationCause, do: defstruct([]))
defmodule(WallCollisionCause, do: defstruct([]))
def combine_live(l) do
l
|> Stream.map(&MapSet.new/1)
|> Enum.reduce(&MapSet.intersection/2)
|> MapSet.to_list()
end
def combine_dead(l, turn) do
l
|> Enum.flat_map(& &1)
|> do_combine_dead(turn, %{})
end
defp do_combine_dead([], turn, acc) do
to_death = fn snake ->
causes = snake.death
death = %Death{
turn: turn,
causes: causes
}
put_in(snake.death, death)
end
acc
|> Map.values()
|> Enum.map(to_death)
end
defp do_combine_dead([snake | rest], turn, acc) do
acc = update_dead_snake(snake, acc)
do_combine_dead(rest, turn, acc)
end
defp update_dead_snake(%Snake{} = snake, acc) do
cause = snake.death
merge_cause = &(cause ++ &1)
update_snake = &update_in(&1.death, merge_cause)
Map.update(acc, snake.id, snake, update_snake)
end
def reap(%GameState{} = state) do
world = state.world
world = reap(world)
put_in(state.world, world)
end
def reap(%World{} = world) do
turn = world.turn
dim = {world.width, world.height}
snakes = world.snakes
{l1, d1} = starvation(snakes)
{l2, d2} = wall_collision(snakes, dim)
{l3, d3} = collision(snakes)
live = combine_live([l1, l2, l3])
dead = combine_dead([d1, d2, d3], turn)
world = put_in(world.snakes, live)
update_in(world.dead_snakes, &(dead ++ &1))
end
@doc "Kill all snakes that starved this turn"
def starvation(snakes) do
{live, dead} = do_starvation(snakes)
{live, dead}
end
def do_starvation(snakes, acc \\ {[], []})
def do_starvation([], acc) do
acc
end
def do_starvation([%{health_points: hp} = snake | rest], {live, dead})
when hp <= 0 do
reason = [%StarvationCause{}]
snake = put_in(snake.death, reason)
do_starvation(rest, {live, [snake | dead]})
end
def do_starvation([snake | rest], {live, dead}) do
do_starvation(rest, {[snake | live], dead})
end
@doc "Kills all snakes that hit a wall"
def wall_collision(snakes, dim) do
do_wall_collision(snakes, dim)
end
def do_wall_collision(snakes, dim, acc \\ {[], []})
def do_wall_collision([], _dim, acc) do
acc
end
def do_wall_collision([%{coords: [p(x, y) | _]} = snake | rest], {w, h}, {
live,
dead
})
when x not in 0..(w - 1) or y not in 0..(h - 1) do
reason = [%WallCollisionCause{}]
snake = put_in(snake.death, reason)
do_wall_collision(rest, {w, h}, {live, [snake | dead]})
end
def do_wall_collision([snake | rest], {w, h}, {live, dead}) do
do_wall_collision(rest, {w, h}, {[snake | live], dead})
end
@doc "Kill all snakes that crashed into another snake"
def collision(snakes) do
tasks = Task.async_stream(snakes, Collision, :run, [snakes])
results =
tasks
|> Stream.zip(snakes)
|> Stream.map(&unzip_result/1)
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
dead = Map.get(results, :dead, [])
live = Map.get(results, :live, [])
{live, dead}
end
defp unzip_result({{:ok, []}, snake}) do
{:live, snake}
end
defp unzip_result({{:ok, reason}, snake}) do
snake = put_in(snake.death, reason)
{:dead, snake}
end
defmodule Collision do
def run(snake, snakes) do
head = hd(snake.coords)
Stream.map(snakes, fn other ->
cond do
other.id != snake.id and
(head == hd(other.coords)
or (length(snake.coords) > 1 and length(other.coords) > 1 and hd(tl(snake.coords)) == hd(other.coords) and head == hd(tl(other.coords)))) and
length(snake.coords) <= length(other.coords) ->
%HeadCollisionCause{with: other.id}
other.id == snake.id and head in tl(other.coords) ->
%SelfCollisionCause{}
head in tl(other.coords) ->
%BodyCollisionCause{with: other.id}
true ->
false
end
end)
|> Stream.filter(& &1)
|> Enum.to_list()
end
end
end
|
lib/bs/death.ex
| 0.511961 | 0.462412 |
death.ex
|
starcoder
|
defmodule X.Html do
@moduledoc """
Contains a set of functions to build a valid and safe HTML from X templates.
"""
@escape_chars [
{?<, "<"},
{?>, ">"},
{?&, "&"},
{?", """},
{?', "'"}
]
@merge_attr_names ["class", "style"]
@doc ~S"""
Merges given attrs and returns a list with key-value tuples:
iex> X.Html.merge_attrs(%{demo: true, env: "test"}, [demo: false])
[{"env", "test"}, {"demo", false}]
It doesn't override `"style"` and `"class"` attributes from `base_attrs`
but adds merged values into the list:
iex> X.Html.merge_attrs(
...> %{style: [{"color", "#fff"}, {"size", 1}]},
...> [style: [{"color", "#aaa"}, {"font", "test"}]]
...> )
[{"style", [{"size", 1}, {"color", "#aaa"}, {"font", "test"}]}]
"""
@spec merge_attrs(any(), any()) :: [{String.t(), any()}]
def merge_attrs(base_attrs, merge_attrs) do
merge_attrs = value_to_key_list(merge_attrs)
base_attrs
|> value_to_key_list()
|> Enum.reduce(merge_attrs, fn {b_key, b_value}, acc ->
case List.keytake(acc, b_key, 0) do
{{m_key, m_value}, rest} when m_key in @merge_attr_names ->
[{m_key, merge_attrs(b_value, m_value)} | rest]
{m_attr, rest} ->
[m_attr | rest]
nil ->
[{b_key, b_value} | acc]
end
end)
end
@doc ~S"""
Converts given attrs into HTML-safe iodata:
iex> X.Html.attrs_to_iodata(%{"demo" => true, "env" => "<test>"})
[["demo", '="', "true", '"'], 32, "env", '="', [[[] | "<"], "test" | ">"], '"']
"""
@spec attrs_to_iodata(map() | [{String.t(), any()}]) :: iodata()
def attrs_to_iodata(attrs) when is_map(attrs) do
attrs
|> Map.to_list()
|> attrs_to_iodata()
end
def attrs_to_iodata([{_, value} | tail]) when value in [nil, false] do
attrs_to_iodata(tail)
end
def attrs_to_iodata([{key, value} | tail]) do
value_iodata = attr_value_to_iodata(value, key)
attr_list = [key, '="', value_iodata, '"']
case {tail, attrs_to_iodata(tail)} do
{_, []} -> attr_list
{[], acc} -> [attr_list | acc]
{_, acc} -> [attr_list, ?\s | acc]
end
end
def attrs_to_iodata([]) do
[]
end
@doc ~S"""
Converts attr value into HTML-safe iodata:
iex> X.Html.attr_value_to_iodata("<test>")
[[[] | "<"], "test" | ">"]
`"style"` and `"class"` attr values are joined with a delimiter:
iex> X.Html.attr_value_to_iodata([{"color", "#fff"}, {"font", "small"}], "style")
[["color", ": ", "#fff"], "; ", ["font", ": ", "small"]]
"""
@spec attr_value_to_iodata(any()) :: iodata()
@spec attr_value_to_iodata(any(), String.t()) :: iodata()
def attr_value_to_iodata(value, key \\ "")
def attr_value_to_iodata(true, _) do
"true"
end
def attr_value_to_iodata(value, key) when is_map(value) and key not in @merge_attr_names do
to_safe_iodata(value)
end
def attr_value_to_iodata(value, key) when is_map(value) or is_list(value) do
delimiter = if(key == "style", do: "; ", else: " ")
value
|> Enum.to_list()
|> join_values_to_iodata(delimiter)
end
def attr_value_to_iodata(value, _) do
to_safe_iodata(value)
end
@doc ~S"""
Converts given value into HTML-safe iodata:
iex> X.Html.to_safe_iodata("<test>")
[[[] | "<"], "test" | ">"]
"""
@spec to_safe_iodata(any()) :: iodata()
def to_safe_iodata(value) when is_binary(value) do
escape_to_iodata(value, 0, value, [])
end
def to_safe_iodata(value) when is_integer(value) do
:erlang.integer_to_binary(value)
end
def to_safe_iodata(value) when is_float(value) do
:io_lib_format.fwrite_g(value)
end
def to_safe_iodata(value = %module{}) when module in [Date, Time, NaiveDateTime, Decimal] do
module.to_string(value)
end
def to_safe_iodata(value = %DateTime{}) do
value
|> to_string()
|> to_safe_iodata()
end
if Code.ensure_compiled?(X.json_library()) do
def to_safe_iodata(value) when is_map(value) do
value
|> X.json_library().encode!(%{escape: :html_safe})
|> to_safe_iodata()
end
end
def to_safe_iodata(value) do
value
|> to_string()
|> to_safe_iodata()
end
@spec value_to_key_list(any()) :: [{String.t(), any()}]
defp value_to_key_list([head | tail]) do
result =
case head do
{key, value} -> {to_string(key), value}
key -> {to_string(key), true}
end
[result | value_to_key_list(tail)]
end
defp value_to_key_list([]) do
[]
end
defp value_to_key_list(value) when is_map(value) do
value
|> Map.to_list()
|> value_to_key_list()
end
defp value_to_key_list(value) when is_tuple(value) do
[{value, true}]
end
defp value_to_key_list(value) do
[{to_string(value), true}]
end
@spec join_values_to_iodata([{any(), any()}], String.t()) :: iodata()
defp join_values_to_iodata([{_, value} | tail], delimiter) when value in [nil, false] do
join_values_to_iodata(tail, delimiter)
end
defp join_values_to_iodata([head | tail], delimiter) do
result =
case head do
{key, true} ->
to_safe_iodata(key)
{key, value} ->
[to_safe_iodata(key), ": ", to_safe_iodata(value)]
key ->
to_safe_iodata(key)
end
case {tail, join_values_to_iodata(tail, delimiter)} do
{_, []} -> result
{[], acc} -> [result, acc]
{_, acc} -> [result, delimiter, acc]
end
end
defp join_values_to_iodata([], _) do
[]
end
# https://github.com/elixir-plug/plug/blob/master/lib/plug/html.ex
@spec escape_to_iodata(binary(), integer(), binary(), iodata()) :: iodata()
@spec escape_to_iodata(binary(), integer(), binary(), iodata(), integer()) :: iodata()
for {match, insert} <- @escape_chars do
defp escape_to_iodata(<<unquote(match), rest::bits>>, skip, original, acc) do
escape_to_iodata(rest, skip + 1, original, [acc | unquote(insert)])
end
end
defp escape_to_iodata(<<_char, rest::bits>>, skip, original, acc) do
escape_to_iodata(rest, skip, original, acc, 1)
end
defp escape_to_iodata(<<>>, _skip, _original, acc) do
acc
end
for {match, insert} <- @escape_chars do
defp escape_to_iodata(<<unquote(match), rest::bits>>, skip, original, acc, len) do
part = binary_part(original, skip, len)
escape_to_iodata(rest, skip + len + 1, original, [acc, part | unquote(insert)])
end
end
defp escape_to_iodata(<<_char, rest::bits>>, skip, original, acc, len) do
escape_to_iodata(rest, skip, original, acc, len + 1)
end
defp escape_to_iodata(<<>>, 0, original, _acc, _len) do
original
end
defp escape_to_iodata(<<>>, skip, original, acc, len) do
[acc | binary_part(original, skip, len)]
end
end
|
lib/x/html.ex
| 0.807612 | 0.561756 |
html.ex
|
starcoder
|
defmodule Cronex.Every do
@moduledoc """
This module defines scheduling macros.
"""
@doc """
`Cronex.Every.every/2` macro is used as a simple interface to add a job to the `Cronex.Table`.
## Input Arguments
`frequency` supports the following values: `:minute`, `:hour`, `:day`, `:month`, `:year`, `:monday`, `:tuesday`, `:wednesday`, `:thursday`, `:friday`, `:saturday`, `:sunday`
`job` must be a list with the following structure: `[do: block]`, where `block` is the code refering to a specific job
## Example
every :day do
# Daily task here
end
every :month do
# Monthly task here
end
"""
defmacro every(frequency, [do: block] = _job)
when is_atom(frequency) do
job_name = String.to_atom("job_every_#{frequency}")
quote do
@jobs unquote(job_name)
@doc false
def unquote(job_name)() do
Cronex.Job.new(
unquote(frequency),
fn -> unquote(block) end
)
|> Cronex.Job.validate!()
end
end
end
@doc """
`Cronex.Every.every/3` macro is used as a simple interface to add a job to the `Cronex.Table`.
Different argument data types combinations are accepted:
- When `arg1` is an atom and `arg2` is a string, they represent the `frequency` and `at` respectively.
- When `arg1` is an integer and `arg2` is a atom, they represent the `interval` and `frequency` respectively.
## Input Arguments
`frequency` supports the following values `:minute`, `:hour`, `:day`, `:month`, `:year`, `:monday`, `:tuesday`, `:wednesday`, `:thursday`, `:friday`, `:saturday`, `:sunday`, when an `interval` is given, only the following values are accepted `:minute`, `:hour`, `:day`, `:month`
`interval` must be an integer representing the interval of frequencies that should exist between each job run
`at` must be a list with the following structure: `[at: time]`, where `time` is a string with the following format `HH:MM`, where `HH` represents the hour and `MM` the minutes at which the job should be run, this value is ignored when given in an every minute or every hour job
`job` must be a list with the following structure: `[do: block]`, where `block` is the code corresponding to a specific job
## Example
every :day, at: "10:00" do
# Daily task at 10:00 here
end
every :monday, at: "12:00" do
# Monday task at 12:00 here
end
every 2, :day do
# Every 2 days task
end
every 3, :week do
# Every 3 weeks task
end
every [:sunday, :monday], at: "14:00" do
# Sunday and Monday task at 14:00 here
end
"""
defmacro every(arg1, [at: time] = _arg2, [do: block] = _job)
when is_atom(arg1) and is_bitstring(time) do
job_name = String.to_atom("job_every_#{arg1}_at_#{time}")
quote do
@jobs unquote(job_name)
@doc false
def unquote(job_name)() do
Cronex.Job.new(
unquote(arg1),
unquote(time),
fn -> unquote(block) end
)
|> Cronex.Job.validate!()
end
end
end
defmacro every(arg1, arg2, [do: block] = _job)
when is_integer(arg1) and is_atom(arg2) do
job_name = String.to_atom("job_every_#{arg1}_#{arg2}")
quote do
@jobs unquote(job_name)
@doc false
def unquote(job_name)() do
Cronex.Job.new(
unquote(arg1),
unquote(arg2),
fn -> unquote(block) end
)
|> Cronex.Job.validate!()
end
end
end
defmacro every(arg1, [at: time], [do: block] = _job)
when is_list(arg1) and is_bitstring(time) do
days = Enum.join(arg1, "_")
job_name = String.to_atom("job_every_#{days}_at_#{time}")
quote do
@jobs unquote(job_name)
@doc false
def unquote(job_name)() do
Cronex.Job.new(
unquote(arg1),
unquote(time),
fn -> unquote(block) end
)
|> Cronex.Job.validate!()
end
end
end
@doc """
`Cronex.Every.every/4` macro is used as a simple interface to add a job to the `Cronex.Table`.
## Input Arguments
`interval` must be an integer representing the interval of frequencies that should exist between each job run
`frequency` supports the following values: `:minute`, `:hour`, `:day`, `:month`
`at` must be a list with the following structure: `[at: time]`, where `time` is a string with the following format `HH:MM`, where `HH` represents the hour and `MM` the minutes at which the job should be run, this value is ignored when given in an every minute or every hour job
`job` must be a list with the following structure: `[do: block]`, where `block` is the code corresponding to a specific job
## Example
every 2, :day, at: "10:00" do
# Every 2 days task
end
every 3, :week, at: "10:00" do
# Every 3 weeks task
end
"""
defmacro every(interval, frequency, [at: time] = _at, [do: block] = _job)
when is_integer(interval) and is_atom(frequency) do
job_name = String.to_atom("job_every_#{interval}_#{frequency}_at_#{time}")
quote do
@jobs unquote(job_name)
@doc false
def unquote(job_name)() do
Cronex.Job.new(
unquote(interval),
unquote(frequency),
unquote(time),
fn -> unquote(block) end
)
|> Cronex.Job.validate!()
end
end
end
end
|
lib/cronex/every.ex
| 0.90532 | 0.82963 |
every.ex
|
starcoder
|
defmodule Bolt.Sips.Types do
@moduledoc """
Basic support for representing nodes, relationships and paths belonging to
a Neo4j graph database.
Four supported types of entities:
- Node
- Relationship
- UnboundRelationship
- Path
More details, about the Bolt protocol, here:
https://github.com/boltprotocol/boltprotocol/blob/master/README.md
Additionally, since bolt V2, new types appears: spatial and temporal
Those are not documented in bolt protocol, but neo4j documentation can be found here:
https://neo4j.com/docs/cypher-manual/current/syntax/temporal/
https://neo4j.com/docs/cypher-manual/current/syntax/spatial/
To work with temporal types, the following Elixir structs are available:
- Time, DateTime, NaiveDateTime
- Calendar.DateTime to work with timezone (as string)
- TimeWithTZOffset, DateTimeWithTZOffset to work with (date)time and timezone offset(as integer)
- Duration
For spatial types, you only need Point struct as it covers:
- 2D point (cartesian or geographic)
- 3D point (cartesian or geographic)
"""
alias Bolt.Sips.TypesHelper
defmodule Entity do
@moduledoc """
base structure for Node and Relationship
"""
@base_fields [id: nil, properties: nil]
defmacro __using__(fields) do
fields = @base_fields ++ fields
quote do
defstruct unquote(fields)
end
end
end
defmodule Node do
@moduledoc """
Self-contained graph node.
A Node represents a node from a Neo4j graph and consists of a
unique identifier (within the scope of its origin graph), a list of
labels and a map of properties.
https://github.com/boltprotocol/boltprotocol/blob/master/v1/_serialization.asciidoc#node
"""
use Entity, labels: nil
end
defmodule Relationship do
@moduledoc """
Self-contained graph relationship.
A Relationship represents a relationship from a Neo4j graph and consists of
a unique identifier (within the scope of its origin graph), identifiers
for the start and end nodes of that relationship, a type and a map of properties.
https://github.com/boltprotocol/boltprotocol/blob/master/v1/_serialization.asciidoc#relationship
"""
use Entity, start: nil, end: nil, type: nil
end
defmodule UnboundRelationship do
@moduledoc """
Self-contained graph relationship without endpoints.
An UnboundRelationship represents a relationship relative to a
separately known start point and end point.
https://github.com/boltprotocol/boltprotocol/blob/master/v1/_serialization.asciidoc#unboundrelationship
"""
use Entity, start: nil, end: nil, type: nil
end
defmodule Path do
@moduledoc """
Self-contained graph path.
A Path is a sequence of alternating nodes and relationships corresponding to a
walk in the graph. The path always begins and ends with a node.
Its representation consists of a list of distinct nodes,
a list of distinct relationships and a sequence of integers describing the
path traversal
https://github.com/boltprotocol/boltprotocol/blob/master/v1/_serialization.asciidoc#path
"""
@type t :: %__MODULE__{
nodes: list() | nil,
relationships: list() | nil,
sequence: list() | nil
}
defstruct nodes: nil, relationships: nil, sequence: nil
@doc """
represents a traversal or walk through a graph and maintains a direction
separate from that of any relationships traversed
"""
@spec graph(Path.t()) :: list() | nil
def graph(path) do
entities = [List.first(path.nodes)]
draw_path(
path.nodes,
path.relationships,
path.sequence,
0,
Enum.take_every(path.sequence, 2),
entities,
# last node
List.first(path.nodes),
# next node
nil
)
end
# @lint false
defp draw_path(_n, _r, _s, _i, [], acc, _ln, _nn), do: acc
defp draw_path(n, r, s, i, [h | t] = _rel_index, acc, ln, _nn) do
next_node = Enum.at(n, Enum.at(s, 2 * i + 1))
urel =
if h > 0 && h < 255 do
# rel: rels[rel_index - 1], start/end: (ln.id, next_node.id)
rel = Enum.at(r, h - 1)
unbound_relationship =
[:id, :type, :properties, :start, :end]
|> Enum.zip([rel.id, rel.type, rel.properties, ln.id, next_node.id])
struct(UnboundRelationship, unbound_relationship)
else
# rel: rels[-rel_index - 1], start/end: (next_node.id, ln.id)
# Neo4j sends: -1, and Bolt.Sips.Internals. returns 255 instead? Investigating,
# meanwhile ugly path:
# oh dear ...
haha = if h == 255, do: -1, else: h
rel = Enum.at(r, -haha - 1)
unbound_relationship =
[:id, :type, :properties, :start, :end]
|> Enum.zip([rel.id, rel.type, rel.properties, next_node.id, ln.id])
struct(UnboundRelationship, unbound_relationship)
end
draw_path(n, r, s, i + 1, t, (acc ++ [urel]) ++ [next_node], next_node, ln)
end
end
defmodule TimeWithTZOffset do
@moduledoc """
Manage a Time and its time zone offset.
This temporal types hs been added in bolt v2
"""
defstruct [:time, :timezone_offset]
@type t :: %__MODULE__{
time: Calendar.time(),
timezone_offset: integer()
}
@doc """
Create a valid TimeWithTZOffset from a Time and offset in seconds
"""
@spec create(Calendar.time(), integer()) :: TimeWithTZOffset.t()
def create(%Time{} = time, offset) when is_integer(offset) do
%TimeWithTZOffset{
time: time,
timezone_offset: offset
}
end
@doc """
Convert TimeWithTZOffset struct in a cypher-compliant string
"""
@spec format_param(TimeWithTZOffset.t()) :: {:ok, String.t()} | {:error, any()}
def format_param(%TimeWithTZOffset{time: time, timezone_offset: offset})
when is_integer(offset) do
param = Time.to_iso8601(time) <> TypesHelper.formated_time_offset(offset)
{:ok, param}
end
def format_param(param) do
{:error, param}
end
end
defmodule DateTimeWithTZOffset do
@moduledoc """
Manage a Time and its time zone offset.
This temporal types hs been added in bolt v2
"""
defstruct [:naive_datetime, :timezone_offset]
@type t :: %__MODULE__{
naive_datetime: Calendar.naive_datetime(),
timezone_offset: integer()
}
@doc """
Create a valid DateTimeWithTZOffset from a NaiveDateTime and offset in seconds
"""
@spec create(Calendar.naive_datetime(), integer()) :: DateTimeWithTZOffset.t()
def create(%NaiveDateTime{} = naive_datetime, offset) when is_integer(offset) do
%DateTimeWithTZOffset{
naive_datetime: naive_datetime,
timezone_offset: offset
}
end
@doc """
Convert DateTimeWithTZOffset struct in a cypher-compliant string
"""
@spec format_param(DateTimeWithTZOffset.t()) :: {:ok, String.t()} | {:error, any()}
def format_param(%DateTimeWithTZOffset{naive_datetime: ndt, timezone_offset: offset})
when is_integer(offset) do
formated = NaiveDateTime.to_iso8601(ndt) <> TypesHelper.formated_time_offset(offset)
{:ok, formated}
end
def format_param(param) do
{:error, param}
end
end
defmodule Duration do
@moduledoc """
a Duration type, as introduced in bolt V2.
Composed of months, days, seconds and nanoseconds, it can be used in date operations
"""
defstruct years: 0,
months: 0,
weeks: 0,
days: 0,
hours: 0,
minutes: 0,
seconds: 0,
nanoseconds: 0
@type t :: %__MODULE__{
years: non_neg_integer(),
months: non_neg_integer(),
weeks: non_neg_integer(),
days: non_neg_integer(),
hours: non_neg_integer(),
minutes: non_neg_integer(),
seconds: non_neg_integer(),
nanoseconds: non_neg_integer()
}
@period_prefix "P"
@time_prefix "T"
@year_suffix "Y"
@month_suffix "M"
@week_suffix "W"
@day_suffix "D"
@hour_suffix "H"
@minute_suffix "M"
@second_suffix "S"
@doc """
Create a Duration struct from data returned by Neo4j.
Neo4j returns a list of 4 integers:
- months
- days
- seconds
- nanoseconds
Struct elements are computed in a logical way, then for exmple 65 seconds is 1min and 5
seconds. Beware that you may not retrieve the same data you send!
Note: days are not touched as they are not a fixed number of days for each month.
## Example:
iex> Duration.create(15, 53, 125, 54)
%Bolt.Sips.Types.Duration{
days: 53,
hours: 0,
minutes: 2,
months: 3,
nanoseconds: 54,
seconds: 5,
weeks: 0,
years: 1
}
"""
@spec create(integer(), integer(), integer(), integer()) :: Duration.t()
def create(months, days, seconds, nanoseconds)
when is_integer(months) and is_integer(days) and is_integer(seconds) and
is_integer(nanoseconds) do
years = div(months, 12)
months_ = rem(months, 12)
{hours, minutes, seconds_inter} = TypesHelper.decompose_in_hms(seconds)
{seconds_, nanoseconds_} = manage_nanoseconds(seconds_inter, nanoseconds)
%Duration{
years: years,
months: months_,
days: days,
hours: hours,
minutes: minutes,
seconds: seconds_,
nanoseconds: nanoseconds_
}
end
@spec manage_nanoseconds(integer(), integer()) :: {integer(), integer()}
defp manage_nanoseconds(seconds, nanoseconds) when nanoseconds >= 1_000_000_000 do
seconds_ = seconds + div(nanoseconds, 1_000_000_000)
nanoseconds_ = rem(nanoseconds, 1_000_000_000)
{seconds_, nanoseconds_}
end
defp manage_nanoseconds(seconds, nanoseconds) do
{seconds, nanoseconds}
end
@doc """
Convert a %Duration in a cypher-compliant string.
To know everything about duration format, please see:
https://neo4j.com/docs/cypher-manual/current/syntax/temporal/#cypher-temporal-durations
"""
@spec format_param(Duration.t()) :: {:ok, String.t()} | {:error, any()}
def format_param(
%Duration{
years: y,
months: m,
days: d,
hours: h,
minutes: mm,
seconds: s,
nanoseconds: ss
} = duration
)
when is_integer(y) and is_integer(m) and is_integer(d) and is_integer(h) and
is_integer(mm) and is_integer(s) and is_integer(ss) do
formated = format_date(duration) <> format_time(duration)
param =
case formated do
"" -> ""
formated_duration -> @period_prefix <> formated_duration
end
{:ok, param}
end
def format_param(param) do
{:error, param}
end
@spec format_date(Duration.t()) :: String.t()
defp format_date(%Duration{years: years, months: months, weeks: weeks, days: days}) do
format_duration_part(years, @year_suffix) <>
format_duration_part(months, @month_suffix) <>
format_duration_part(weeks, @week_suffix) <> format_duration_part(days, @day_suffix)
end
@spec format_time(Duration.t()) :: String.t()
defp format_time(%Duration{
hours: hours,
minutes: minutes,
seconds: s,
nanoseconds: ns
})
when hours > 0 or minutes > 0 or s > 0 or ns > 0 do
{seconds, nanoseconds} = manage_nanoseconds(s, ns)
nanoseconds_f = nanoseconds |> Integer.to_string() |> String.pad_leading(9, "0")
seconds_f = "#{Integer.to_string(seconds)}.#{nanoseconds_f}" |> String.to_float()
@time_prefix <>
format_duration_part(hours, @hour_suffix) <>
format_duration_part(minutes, @minute_suffix) <>
format_duration_part(seconds_f, @second_suffix)
end
defp format_time(_) do
""
end
@spec format_duration_part(number(), String.t()) :: String.t()
defp format_duration_part(duration_part, suffix)
when duration_part > 0 and is_bitstring(suffix) do
"#{stringify_number(duration_part)}#{suffix}"
end
defp format_duration_part(_, _) do
""
end
@spec stringify_number(number()) :: String.t()
defp stringify_number(number) when is_integer(number) do
Integer.to_string(number)
end
defp stringify_number(number) do
Float.to_string(number)
end
end
defmodule Point do
@moduledoc """
Manage spatial data introduced in Bolt V2
Point can be:
- Cartesian 2D
- Geographic 2D
- Cartesian 3D
- Geographic 3D
"""
@srid_cartesian 7203
@srid_cartesian_3d 9157
@srid_wgs_84 4326
@srid_wgs_84_3d 4979
defstruct [:crs, :srid, :x, :y, :z, :longitude, :latitude, :height]
@type t :: %__MODULE__{
crs: String.t(),
srid: integer(),
x: number() | nil,
y: number() | nil,
z: number() | nil,
longitude: number() | nil,
latitude: number() | nil,
height: number() | nil
}
defguardp is_crs(crs) when crs in ["cartesian", "cartesian-3d", "wgs-84", "wgs-84-3d"]
defguardp is_srid(srid)
when srid in [@srid_cartesian, @srid_cartesian_3d, @srid_wgs_84, @srid_wgs_84_3d]
defguardp are_coords(lt, lg, h, x, y, z)
when (is_number(lt) or is_nil(lt)) and (is_number(lg) or is_nil(lg)) and
(is_number(h) or is_nil(h)) and (is_number(x) or is_nil(x)) and
(is_number(y) or is_nil(y)) and (is_number(z) or is_nil(z))
defguardp is_valid_coords(x, y) when is_number(x) and is_number(y)
defguardp is_valid_coords(x, y, z) when is_number(x) and is_number(y) and is_number(z)
@doc """
A 2D point either needs:
- 2 coordinates and a atom (:cartesian or :wgs_84) to define its type
- 2 coordinates and a srid (4326 or 7203) to define its type
## Examples:
iex> Point.create(:cartesian, 10, 20.0)
%Bolt.Sips.Types.Point{
crs: "cartesian",
height: nil,
latitude: nil,
longitude: nil,
srid: 7203,
x: 10.0,
y: 20.0,
z: nil
}
iex> Point.create(4326, 10, 20.0)
%Bolt.Sips.Types.Point{
crs: "wgs-84",
height: nil,
latitude: 20.0,
longitude: 10.0,
srid: 4326,
x: 10.0,
y: 20.0,
z: 30.0
}
"""
@spec create(:cartesian | :wgs_84 | 4326 | 7203, number(), number()) :: Point.t()
def create(:cartesian, x, y) do
create(@srid_cartesian, x, y)
end
def create(:wgs_84, longitude, latitude) do
create(@srid_wgs_84, longitude, latitude)
end
def create(@srid_cartesian, x, y) when is_valid_coords(x, y) do
%Point{
crs: crs(@srid_cartesian),
srid: @srid_cartesian,
x: format_coord(x),
y: format_coord(y)
}
end
def create(@srid_wgs_84, longitude, latitude) when is_valid_coords(longitude, latitude) do
%Point{
crs: crs(@srid_wgs_84),
srid: @srid_wgs_84,
x: format_coord(longitude),
y: format_coord(latitude),
longitude: format_coord(longitude),
latitude: format_coord(latitude)
}
end
@doc """
Create a 3D point
A 3D point either needs:
- 3 coordinates and a atom (:cartesian or :wgs_84) to define its type
- 3 coordinates and a srid (4979 or 9147) to define its type
## Examples:
iex> Point.create(:cartesian, 10, 20.0, 30)
%Bolt.Sips.Types.Point{
crs: "cartesian-3d",
height: nil,
latitude: nil,
longitude: nil,
srid: 9157,
x: 10.0,
y: 20.0,
z: 30.0
}
iex> Point.create(4979, 10, 20.0, 30)
%Bolt.Sips.Types.Point{
crs: "wgs-84-3d",
height: 30.0,
latitude: 20.0,
longitude: 10.0,
srid: 4979,
x: 10.0,
y: 20.0,
z: 30.0
}
"""
@spec create(:cartesian | :wgs_84 | 4979 | 9157, number(), number(), number()) :: Point.t()
def create(:cartesian, x, y, z) do
create(@srid_cartesian_3d, x, y, z)
end
def create(:wgs_84, longitude, latitude, height) do
create(@srid_wgs_84_3d, longitude, latitude, height)
end
def create(@srid_cartesian_3d, x, y, z) when is_valid_coords(x, y, z) do
%Point{
crs: crs(@srid_cartesian_3d),
srid: @srid_cartesian_3d,
x: format_coord(x),
y: format_coord(y),
z: format_coord(z)
}
end
def create(@srid_wgs_84_3d, longitude, latitude, height)
when is_valid_coords(longitude, latitude, height) do
%Point{
crs: crs(@srid_wgs_84_3d),
srid: @srid_wgs_84_3d,
x: format_coord(longitude),
y: format_coord(latitude),
z: format_coord(height),
longitude: format_coord(longitude),
latitude: format_coord(latitude),
height: format_coord(height)
}
end
@spec crs(4326 | 4979 | 7203 | 9157) :: String.t()
defp crs(@srid_cartesian), do: "cartesian"
defp crs(@srid_cartesian_3d), do: "cartesian-3d"
defp crs(@srid_wgs_84), do: "wgs-84"
defp crs(@srid_wgs_84_3d), do: "wgs-84-3d"
defp format_coord(coord) when is_integer(coord), do: coord / 1
defp format_coord(coord), do: coord
@doc """
Convert a Point struct into a cypher-compliant map
## Example
iex(8)> Point.create(4326, 10, 20.0) |> Point.format_to_param
%{crs: "wgs-84", latitude: 20.0, longitude: 10.0, x: 10.0, y: 20.0}
"""
@spec format_param(Point.t()) :: {:ok, map()} | {:error, any()}
def format_param(
%Point{crs: crs, srid: srid, latitude: lt, longitude: lg, height: h, x: x, y: y, z: z} =
point
)
when is_crs(crs) and is_srid(srid) and are_coords(lt, lg, h, x, y, z) do
param =
point
|> Map.from_struct()
|> Enum.filter(fn {_, val} -> not is_nil(val) end)
|> Map.new()
|> Map.drop([:srid])
{:ok, param}
end
def format_param(param) do
{:error, param}
end
end
end
|
lib/bolt_sips/types.ex
| 0.911577 | 0.62458 |
types.ex
|
starcoder
|
defmodule TelemetryMetricsZabbix do
@moduledoc """
Provides a Zabbix format reporter and server for Telemetry.Metrics definitions.
## Installation
The package can be installed by adding `telemetry_metrics_zabbix` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:telemetry_metrics_zabbix, "~> 0.1"}
]
end
```
## Configuration
Add approperiate config section to your `config.exs`
```elixir
config :telemetry_metrics_zabbix, :config,
host: "zabbix.trapper.host",
port: 10051,
hostname: "monitored.host",
batch_window_size: 1000,
timestamping: true
```
## Usage
Add `TelemetryMetricsZabbix` to your application supervision tree and pass metrics as a param.
### Example
```elixir
metrics = [
Telemetry.Metrics.sum("http.request.latency", tags: [:host])
]
children = [
{TelemetryMetricsZabbix, metrics: metrics}
]
opts = [strategy: :one_for_one, name: MyApp]
Supervisor.start_link(children, opts)
```
### Currently supported metrics
- `Telemetry.Metrics.Counter`: counts events
- `Telemetry.Metrics.Sum`: sums events' values
- `Telemetry.Metrics.Summary`: calculates events' values average
- `Telemetry.Metrics.LastValue`: returns all events' values with timestamps
### Measuremet to zabbix value conversion
Measurements are aggregated by event name, measurement and tag values. All those parts are included as Zabbix Sender Protocol key.
Tag values are treated as Zabbix key parameters sorted by tag key.
#### Example
with metric
```elixir
Telemetry.Metrics.sum("http.request.latency", tags: [:host, :method])
```
and event
```elixir
:telemetry.execute([:http, :request], %{latency: 200}, %{host: "localhost", method: "GET"})
```
Zabbix key will be `http.request.latency["localhost","GET"]`
"""
require Logger
use GenServer
alias TelemetryMetricsZabbix.Collector
alias ZabbixSender.Protocol
# Default application values
@host "127.0.0.1"
@port 10051
@timestamping true
@batch_window_size 1_000
@type t :: %__MODULE__{
host: String.t(),
port: integer(),
hostname: String.t(),
timestamping: boolean,
batch_window_size: integer(),
data: %{},
metrics: list(any),
batch_timeout: reference | nil
}
defstruct [
:host,
:port,
:hostname,
:timestamping,
:batch_window_size,
:data,
:metrics,
:batch_timeout
]
def start_link(opts) do
metrics =
opts[:metrics] ||
raise ArgumentError, "the :metrics option is required by #{inspect(__MODULE__)}"
GenServer.start_link(__MODULE__, metrics, name: opts |> Keyword.get(:name, __MODULE__))
end
@impl true
@spec init([Telemetry.Metrics.t()]) :: {:ok, t()}
def init(metrics) do
env = Application.get_env(:telemetry_metrics_zabbix, :config, [])
host = Keyword.get(env, :host, @host)
port = Keyword.get(env, :port, @port)
timestamping = Keyword.get(env, :timestamping, @timestamping)
batch_window_size = Keyword.get(env, :batch_window_size, @batch_window_size)
hostname = Keyword.get(env, :hostname, "")
for metric <- metrics, name_part <- metric.name do
unless Regex.match?(~r/^[a-zA-Z0-9_]+$/, "#{name_part}") do
raise ArgumentError, message: "invalid metric name #{metric.name}"
end
end
Process.flag(:trap_exit, true)
groups = Enum.group_by(metrics, & &1.event_name)
excape_pattern = :binary.compile_pattern("\"")
for {event, metrics} <- groups do
:ok = :telemetry.attach(get_id(event), event, fn _event_name, measurements, metadata, metrics ->
handle_event(measurements, metadata, metrics, excape_pattern)
end, metrics)
end
{:ok,
%__MODULE__{
host: host,
port: port,
hostname: hostname,
timestamping: timestamping,
batch_window_size: batch_window_size,
data: %{},
metrics: Map.keys(groups)
}}
end
@impl true
@spec terminate(any(), t()) :: :ok
def terminate(_, %__MODULE__{metrics: metrics}) do
for event <- metrics do
:ok = :telemetry.detach(get_id(event))
end
:ok
end
defp get_id(event), do: {__MODULE__, event, self()}
defp handle_event(measurements, metadata, metrics, escape_pattern) do
for metric <- metrics do
try do
if keep?(metric, metadata) do
measurement = extract_measurement(metric, measurements, metadata)
tags = extract_tags(metric, metadata)
key =
metric.name
|> Enum.map_join(".", &"#{&1}")
tags_stringified =
tags
|> Enum.sort_by(fn {k, _v} -> k end)
|> Enum.map_join(",", fn {_k, value} ->
escaped_value = "#{value}" |> String.replace(escape_pattern, "\\\"")
"\"" <> escaped_value <> "\""
end)
key =
case tags_stringified do
"" -> key
_ -> key <> "[" <> tags_stringified <> "]"
end
report(key, measurement, metric)
end
rescue
e ->
Logger.error([
"#{__MODULE__}: could not format metric #{inspect(metric)}\n",
Exception.format(:error, e, __STACKTRACE__)
])
end
end
end
@spec keep?(Telemetry.Metrics.t(), map()) :: boolean()
defp keep?(%{keep: nil}, _metadata), do: true
defp keep?(metric, metadata), do: metric.keep.(metadata)
defp extract_measurement(metric, measurements, metadata) do
case metric.measurement do
fun when is_function(fun, 2) -> fun.(measurements, metadata)
fun when is_function(fun, 1) -> fun.(measurements)
key -> measurements[key]
end
end
defp extract_tags(metric, metadata) do
tag_values = metric.tag_values.(metadata)
Map.take(tag_values, metric.tags)
end
defp report(key, value, metric) do
GenServer.cast(__MODULE__, {:report, key, value, metric, System.system_time(:second)})
end
@impl true
def handle_cast(
{:report, key, value, metric, timestamp},
%__MODULE__{
batch_timeout: batch_timeout,
batch_window_size: batch_window_size,
data: data
} = state
) do
batch_timeout = maybe_schedule_batch_send(batch_timeout, batch_window_size, data == %{})
data =
Map.update(data, key, {metric, Collector.init(metric, value, timestamp)}, fn {_, prev_value} ->
{metric, Collector.update(metric, prev_value, value, timestamp)}
end)
{:noreply, %__MODULE__{state | data: data, batch_timeout: batch_timeout}}
end
@impl true
def handle_info(
{:zabbix, :send},
%__MODULE__{data: data, timestamping: timestamping, hostname: hostname} = state
) do
batch_timestamp = System.system_time(:second)
messages =
data
|> Enum.flat_map(fn {key, {metric, value}} ->
Collector.extract(metric, value)
|> Enum.map(fn
{v, timestamp} ->
Protocol.value(hostname, key, v, if(timestamping, do: timestamp))
v ->
Protocol.value(hostname, key, v, if(timestamping, do: batch_timestamp))
end)
end)
messages
|> send(batch_timestamp, state)
{:noreply, %__MODULE__{state | data: %{}, batch_timeout: nil}}
end
def handle_info(_, state) do
{:noreply, state}
end
# send_after when batch is empty
defp maybe_schedule_batch_send(nil, bws, true) do
Process.send_after(self(), {:zabbix, :send}, bws)
end
defp maybe_schedule_batch_send(reference, _, _), do: reference
defp send(values, timestamp, %__MODULE__{host: host, port: port}) do
case ZabbixSender.send_values(values, timestamp, host, port) do
{:ok, %{failed: 0, total: total}} ->
Logger.debug("#{__MODULE__}: server processed #{total} messages")
{:ok, %{failed: failed, total: total}} ->
keys =
values
|> Enum.map(fn %{key: key} -> key end)
|> Enum.uniq()
Logger.warn(
"#{__MODULE__}: server could not process #{failed} out of #{total} messages. Message keys was: #{
inspect(keys, limit: :infinity)
}"
)
{:error, reason} ->
Logger.warn("#{__MODULE__}: could not send messages due to #{inspect(reason)}")
end
end
end
|
lib/telemetry_metrics_zabbix.ex
| 0.913489 | 0.771241 |
telemetry_metrics_zabbix.ex
|
starcoder
|
defmodule Mason do
@moduledoc """
Mason is a small module to help you coerce values in structs. Note:
Since we cannot define structs in doctests we simply import the `User` module
from in our tests (see test/mason_test.exs).
"""
@doc """
The struct method takes a module and some params and coerces the params into
the struct defined in the module. You need to define a function
`masonstruct/0` on the module which defines how to coerce the params, e.g.:
defmodule User do
defstruct [ :age, :size, :gpa, :active, :role, :STATUS, :created_at ]
def masonstruct do
%{
age: Integer,
gpa: Float,
active: Boolean,
role: Atom,
status: &coerce_status/1,
createdAt: &({ :created_at, elem(DateTime.from_iso8601(&1), 1) })
}
end
def coerce_status(value) do
case value do
"active" -> { :STATUS, :online }
_ -> { :STATUS, value }
end
end
end
# Simple coercion
Mason expects values to be strings by default and converts them to
Integer, Boolean, Float or Atom.
iex> Mason.struct User, %{ age: "23", gpa: "4.0", active: "true", role: "admin" }
%User{
STATUS: nil,
active: true,
age: 23,
created_at: nil,
gpa: 4.0,
role: :admin,
size: nil
}
# Dynamic coercion
You can supply a function to do the coercion. It is also possible
to map keys this way. The function takes the value of the field as argument.
Consider this, e.g.:
def masonstruct do
{
createdAt: &({ :created_at, elem(DateTime.from_iso8601(&1), 1) })
}
end
iex> user = Mason.struct User, %{ createdAt: "2016-02-29T12:30:30+00:00" }
iex> user.created_at
#DateTime<2016-02-29 12:30:30Z>
# Coercing lists
You can coerce into lists by supplying an array with the type. You can supply
Elixir's simple types as well as a function. Lists can be nested. Consider
the following masonstruct definition:
def masonstruct do
{
available: [ Boolean ],
dates: [ &(elem(DateTime.from_iso8601(&1), 1)) ]
}
iex> Mason.struct User, %{ available: [ "true", "false"] }
%User{
STATUS: nil,
active: nil,
age: nil,
available: [true, false],
created_at: nil,
data: nil,
dates: nil,
gpa: nil,
role: nil,
roles: nil,
size: nil,
user_ids: nil
}
iex> user = Mason.struct User, %{ dates: [ "2016-02-29T12:30:30+00:00" ] }
iex> List.first user.dates
#DateTime<2016-02-29 12:30:30Z>
"""
def struct(module, params) do
params =
for {key, value} <- params do
key = if is_binary(key), do: String.to_atom(key), else: key
type = module.masonstruct[key]
if is_function(type) do
type.(value)
else
{key, coerce(value, type)}
end
end
Kernel.struct(module, params)
end
defp coerce(value, type) do
cond do
is_list(type) ->
inner_type = List.first(type)
for inner_value <- value do
coerce(inner_value, inner_type)
end
is_function(type) ->
type.(value)
true ->
case type do
Integer -> String.to_integer(value)
Boolean -> String.to_existing_atom(value)
Float -> String.to_float(value)
Atom -> String.to_atom(value)
_ -> value
end
end
end
end
|
lib/mason.ex
| 0.762778 | 0.651286 |
mason.ex
|
starcoder
|
defmodule Commissar do
@moduledoc """
Commissar provides relatively simple pattern for creating sets of policies to
see if a subject is allowed to execute a particular action related to a given
context.
A subject is whatever is trying to execute a particular action. In most cases
this will probably be a user, but it could just as easily be a group or any
other resource in an application.
A context is any term that provides the authorization policies with what will
be required to decide if the subject is allowed to execute a particular
action. This might be another system resource, but it may also be a tuple
containing some kind of permissions related to the user along with a given
resource. They could be in a map as well. It doesn't matter so long as they
conform to the expectations of the policies.
An action is a descriptor of what subject wants to execute on the context. In
most cases it will be an atom or a string with some sort of CRUD type name
such as `read` or `update`.
A policy is a function that takes an action, a subject, and a context and
returns one of six responses (see `policy_result` type).
"""
@typedoc """
One of six possible responses:
* `:ok` - The subject is allowed. Do not process any further policies.
* `:continue` - The policy could neither allow nor deny the subject. Move on
to the next policy. If no policies remain, the default response is to deny.
* `:error` - The subject is denied. Do not process any further policies.
* `{:error, reason}` - A denial with more information. The `reason` could be
anything from an atom with some kind of error code to a map with a bunch of
contextual information.
* `false`: Identical to `:error`.
* `true`: Identical to `:ok`.
`true` and `false` are conveniences boolean checks which are common in
authorization routines.
"""
@type policy_result() :: :ok | :continue | :error | {:error | any()} | false | true
@typedoc """
A function that takes an action, a subject, and a context, and returns a
`policy_result`.
"""
@type policy() :: (any(), any(), any() -> policy_result())
@doc """
Returns a boolean for a given authorize result. In general, this function
won't be called directly.
"""
@spec allow?(any()) :: boolean()
def allow?(:ok), do: true
def allow?(_), do: false
@doc """
Similar to `authorize/4` but returns a boolean response instead. This should
be used when you have no use for any potential denial reasons.
"""
@spec allow?(any(), any(), any(), [policy()]) :: boolean()
def allow?(subject, action, context, policies) do
allow?(authorize(subject, action, context, policies))
end
@doc """
Checks to see whether a subject attempting an action is allowed to do so on a
context with a given set of policies.
Note that the response is not a `policy_result`.
"""
@spec authorize(any(), any(), any(), [policy()]) :: :ok | {:error | any()}
def authorize(subject, action, context, policies) when is_list(policies) do
check_policies(:continue, subject, action, context, policies)
end
@doc """
Exports a single policy from an authorizer to used as a policy.
"""
@spec export_policy(module(), atom()) :: policy()
def export_policy(authorizer_module, policy_name)
when is_atom(authorizer_module) and is_atom(policy_name),
do: &apply(authorizer_module, :policy, [policy_name, &1, &2, &3])
@doc """
Exports all policies from an authorizer module.
"""
@spec export_policies(module()) :: [policy()]
def export_policies(authorizer_module) when is_atom(authorizer_module) do
authorizer_module.policies()
|> List.flatten()
|> Enum.map(&get_policy(authorizer_module, &1))
end
defp check_policies(:continue, _subject, _action, _context, []) do
{:error, :no_matching_policy}
end
defp check_policies(:continue, subject, action, context, [policy | rest]) do
policy.(subject, action, context)
|> check_policies(subject, action, context, rest)
end
defp check_policies({:error, _} = result, _subject, _action, _context, _policies), do: result
defp check_policies(result, _subject, _action, _context, _policies)
when result in [true, :ok],
do: :ok
defp check_policies(result, _subject, _action, _context, _policies)
when result in [false, :error],
do: {:error, :access_denied}
defp get_policy(_, func) when is_function(func, 3), do: func
defp get_policy(authorizer_module, policy_name),
do: export_policy(authorizer_module, policy_name)
end
|
lib/commissar.ex
| 0.832134 | 0.787032 |
commissar.ex
|
starcoder
|
defmodule Day07 do
def part1(input) do
program = read_program(input)
permutations(0, 4)
|> Stream.filter(&are_unique_phases/1)
|> Stream.map(fn phases -> run_amplifiers(phases, program) end)
|> Enum.max
end
def part2(input) do
program = read_program(input)
permutations(5, 9)
|> Stream.filter(&are_unique_phases/1)
|> Stream.map(fn phases -> run_feedback_loop(phases, program) end)
|> Enum.max
end
defp permutations(min, max) do
n = max - min + 1
permutations(min..max, n, [])
|> Stream.chunk_every(n)
end
defp permutations(_range, 0, prefix), do: prefix
defp permutations(range, iters, prefix) do
range
|> Stream.flat_map(fn elem ->
prefix = [elem | prefix]
permutations(range, iters - 1, prefix)
end)
end
defp are_unique_phases(phases) do
length(phases) === length(Enum.uniq(phases))
end
defp run_amplifiers(phases, memory) do
Enum.reduce(phases, 0, fn phase, thrust ->
memory = set_input(memory, [phase, thrust])
memory = execute(memory)
[output] = read_output(memory)
output
end)
end
defp run_feedback_loop(phases, memory) do
zipped = Enum.zip(phases, List.duplicate(memory, length(phases)))
{memories, thrust} = run_first_loop(zipped, 0, [])
run_loops(memories, thrust)
end
defp run_first_loop([{phase, memory} | tail], thrust, acc) do
memory = set_input(memory, [phase, thrust])
memory = execute(memory)
[thrust] = read_output(memory)
memory = reset_output(memory)
run_first_loop(tail, thrust, [memory | acc])
end
defp run_first_loop([], thrust, acc) do
{Enum.reverse(acc), thrust}
end
defp run_loops(memories, thrust) do
case run_one_loop(memories, thrust, []) do
{memories, thrust} -> run_loops(memories, thrust)
:done -> thrust
end
end
defp run_one_loop([memory | memories], thrust, acc) do
memory = set_input(memory, [thrust])
memory = resume(memory)
case read_output(memory) do
[thrust] ->
memory = reset_output(memory)
run_one_loop(memories, thrust, [memory | acc])
[] ->
run_one_loop(memories, thrust, :done)
end
end
defp run_one_loop([], _thrust, :done), do: :done
defp run_one_loop([], thrust, acc) do
{Enum.reverse(acc), thrust}
end
defp resume(memory) do
execute(memory, Map.fetch!(memory, :ip))
end
defp execute(memory, ip \\ 0) do
{opcode, modes} = fetch_opcode(memory, ip)
case opcode do
1 ->
memory = exec_arith_op(&+/2, modes, memory, ip)
execute(memory, ip + 4)
2 ->
memory = exec_arith_op(&*/2, modes, memory, ip)
execute(memory, ip + 4)
3 ->
memory = exec_input(memory, ip)
execute(memory, ip + 2)
4 ->
memory = exec_output(modes, memory, ip)
Map.put(memory, :ip, ip + 2)
5 ->
ip = exec_if(&(&1 !== 0), modes, memory, ip)
execute(memory, ip)
6 ->
ip = exec_if(&(&1 === 0), modes, memory, ip)
execute(memory, ip)
7 ->
memory = exec_cond(&(&1 < &2), modes, memory, ip)
execute(memory, ip + 4)
8 ->
memory = exec_cond(&(&1 === &2), modes, memory, ip)
execute(memory, ip + 4)
99 ->
Map.delete(memory, :ip)
end
end
defp exec_arith_op(op, modes, memory, ip) do
[in1, in2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read(memory, ip + 3)
result = op.(in1, in2)
write(memory, out_addr, result)
end
defp exec_input(memory, ip) do
out_addr = read(memory, ip + 1)
input = Map.fetch!(memory, :input)
memory = Map.put(memory, :input, tl(input))
write(memory, out_addr, hd(input))
end
defp exec_output(modes, memory, ip) do
[value] = read_operand_values(memory, ip + 1, modes, 1)
output = Map.get(memory, :output, [])
output = [value | output]
Map.put(memory, :output, output)
end
defp exec_if(op, modes, memory, ip) do
[value, new_ip] = read_operand_values(memory, ip + 1, modes, 2)
case op.(value) do
true -> new_ip
false -> ip + 3
end
end
defp exec_cond(op, modes, memory, ip) do
[operand1, operand2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read(memory, ip + 3)
result = case op.(operand1, operand2) do
true -> 1
false -> 0
end
write(memory, out_addr, result)
end
defp read_operand_values(_memory, _addr, _modes, 0), do: []
defp read_operand_values(memory, addr, modes, n) do
operand = read(memory, addr)
operand = case rem(modes, 10) do
0 -> read(memory, operand)
1 -> operand
end
[operand | read_operand_values(memory, addr + 1, div(modes, 10), n - 1)]
end
defp fetch_opcode(memory, ip) do
opcode = read(memory, ip)
modes = div(opcode, 100)
opcode = rem(opcode, 100)
{opcode, modes}
end
defp set_input(memory, input) do
Map.put(memory, :input, input)
end
defp read_output(memory), do: Map.get(memory, :output, [])
defp reset_output(memory), do: Map.put(memory, :output, [])
defp read(memory, addr) do
Map.fetch!(memory, addr)
end
defp write(memory, addr, value) do
Map.put(memory, addr, value)
end
defp read_program(input) do
String.split(input, ",")
|> Stream.map(&String.to_integer/1)
|> Stream.with_index
|> Stream.map(fn {code, index} -> {index, code} end)
|> Map.new
end
end
|
day07/lib/day07.ex
| 0.505615 | 0.405949 |
day07.ex
|
starcoder
|
defmodule SAXMap do
@moduledoc """
XML to Map conversion.
SAXMap uses a SAX parser (built on top of [Saxy](https://hex.pm/packages/saxy)) to transfer an XML string or file stream into a `Map` containing a collection
of pairs where the key is the element name and the value is its content, and it is a optional to process element attribute into the result.
"""
@doc ~S'''
Use `Saxy.parse_string/4` with a custom SAX parse handler to extract a `Map` containing a collection of pairs where the key is the element name
and the value is its content, there can optionally append the key-value pair from the attribute of element.
## Example
Here is an example:
iex> xml = """
...> <?xml version="1.0" encoding="UTF-8"?>
...> <thread>
...> <title>Hello</title>
...> <items>
...> <item>item1</item>
...> <item>item2</item>
...> </items>
...> </thread>
...> """
iex> SAXMap.from_string(xml)
{:ok,
%{
"thread" => %{"items" => %{"item" => ["item1", "item2"]}, "title" => "Hello"}
}}
## Options
* `:ignore_attribute`, whether to ignore the attributes of elements in the final map, by default is `true` so
there will not see any attributes in the result; when set this option as `false`, it equals `{false, ""}`,
in this case, there with append the attributes of all elements by the processing order, and put the attributes
key-value pair into the peer child elements, and automatically naming child elements with "content",
we can also set this option as `{false, "@"}` or `{false, "-"}`, any proper naming prefix you perfer should be fine to process.
```
xml = """
<thread version="1">
<title color="red" font="16">Hello</title>
<items size="3">
<item font="12">item1</item>
<item font="12">item2</item>
<item font="12">item3</item>
</items>
</thread>
"""
# set ignore_attribute: false
SAXMap.from_string(xml, ignore_attribute: false)
{:ok,
%{
"thread" => %{
"content" => %{
"items" => %{
"content" => %{
"item" => [
%{"content" => "item1", "font" => "12"},
%{"content" => "item2", "font" => "12"},
%{"content" => "item3", "font" => "12"}
]
},
"size" => "3"
},
"title" => %{"color" => "red", "content" => "Hello", "font" => "16"}
},
"version" => "1"
}
}}
# set ignore_attribute: {false, "@"}
SAXMap.from_string(xml, ignore_attribute: {false, "@"})
{:ok,
%{
"thread" => %{
"@version" => "1",
"content" => %{
"items" => %{
"@size" => "3",
"content" => %{
"item" => [
%{"@font" => "12", "content" => "item1"},
%{"@font" => "12", "content" => "item2"},
%{"@font" => "12", "content" => "item3"}
]
}
},
"title" => %{"@color" => "red", "@font" => "16", "content" => "Hello"}
}
}
}}
```
Please notice that the comments of XML are ignored.
'''
@spec from_string(xml :: String.t()) ::
{:ok, map :: map()} | {:error, exception :: Saxy.ParseError.t()}
def from_string(xml, opts \\ []) do
ignore_attribute = Keyword.get(opts, :ignore_attribute, true)
parse_from_string(xml, ignore_attribute)
end
defp parse_from_string(xml, true) do
Saxy.parse_string(xml, SAXMap.Handler, ignore_attribute: true)
end
defp parse_from_string(xml, false) do
Saxy.parse_string(xml, SAXMap.Handler, ignore_attribute: false)
end
defp parse_from_string(xml, {false, attribute_prefix}) do
Saxy.parse_string(xml, SAXMap.Handler, ignore_attribute: {false, attribute_prefix})
end
@doc ~S'''
Use `Saxy.parse_stream/4` with a custom SAX parse handler to extract a `Map` containing a collection of pairs where the key is the element name
and the value is its content, there can optionally append the key-value pair from the attribute of element.
## Options
Please see `from_string/2`
'''
def from_stream(stream, opts \\ []) do
ignore_attribute = Keyword.get(opts, :ignore_attribute, true)
parse_from_stream(stream, ignore_attribute)
end
defp parse_from_stream(stream, true) do
Saxy.parse_stream(stream, SAXMap.Handler, ignore_attribute: true)
end
defp parse_from_stream(stream, false) do
Saxy.parse_stream(stream, SAXMap.Handler, ignore_attribute: false)
end
defp parse_from_stream(stream, {false, attribute_prefix}) do
Saxy.parse_stream(stream, SAXMap.Handler, ignore_attribute: {false, attribute_prefix})
end
end
|
lib/sax_map.ex
| 0.906005 | 0.764496 |
sax_map.ex
|
starcoder
|
defmodule Vex.Validators.Number do
@moduledoc """
Ensure a value is a number.
## Options
At least one of the following must be provided:
* `:is`: The value is a number (integer or float) or not.
* `:equal_to`: The value is a number equal to this number.
* `:greater_than` : The value is a number greater than this number.
* `:greater_than_or_equal_to`: The value is a number greater than or equal to this number.
* `:less_than` : The value is a number less than this number.
* `:less_than_or_equal_to`: The value is a number less than or equal to this number.
Optional:
* `:message`: A custom error message. May be in EEx format and use the fields described
in [Custom Error Messages](#module-custom-error-messages).
* `:allow_nil`: A boolean whether to skip this validation for `nil` values.
* `:allow_blank`: A boolean whether to skip this validaton for blank values.
The `:is` option can be provided in place of the keyword list if no other options are set.
When multiple options are than the validator will do an `and` logic between them.
## Examples
Examples when using the `:is` option:
iex> Vex.Validators.Number.validate("not_a_number", is: true)
{:error, "must be a number"}
iex> Vex.Validators.Number.validate(3.14, is: true)
:ok
iex> Vex.Validators.Number.validate("not_a_number", is: false)
:ok
iex> Vex.Validators.Number.validate(3.14, is: false)
{:error, "must not be a number"}
Examples when using the boolean value in options for the `:is` option:
iex> Vex.Validators.Number.validate("not_a_number", true)
{:error, "must be a number"}
iex> Vex.Validators.Number.validate(3.14, true)
:ok
iex> Vex.Validators.Number.validate("not_a_number", false)
:ok
iex> Vex.Validators.Number.validate(3.14, false)
{:error, "must not be a number"}
Examples when using the `:equal_to` option:
iex> Vex.Validators.Number.validate(3.14, equal_to: 1.41)
{:error, "must be a number equal to 1.41"}
iex> Vex.Validators.Number.validate(3.14, equal_to: 3.14)
:ok
iex> Vex.Validators.Number.validate(3.14, equal_to: 6.28)
{:error, "must be a number equal to 6.28"}
Examples when using the `:greater_than` option:
iex> Vex.Validators.Number.validate(3.14, greater_than: 1.41)
:ok
iex> Vex.Validators.Number.validate(3.14, greater_than: 3.14)
{:error, "must be a number greater than 3.14"}
iex> Vex.Validators.Number.validate(3.14, greater_than: 6.28)
{:error, "must be a number greater than 6.28"}
Examples when using the `:greater_than_or_equal_to` option:
iex> Vex.Validators.Number.validate(3.14, greater_than_or_equal_to: 1.41)
:ok
iex> Vex.Validators.Number.validate(3.14, greater_than_or_equal_to: 3.14)
:ok
iex> Vex.Validators.Number.validate(3.14, greater_than_or_equal_to: 6.28)
{:error, "must be a number greater than or equal to 6.28"}
Examples when using the `:less_than` option:
iex> Vex.Validators.Number.validate(3.14, less_than: 1.41)
{:error, "must be a number less than 1.41"}
iex> Vex.Validators.Number.validate(3.14, less_than: 3.14)
{:error, "must be a number less than 3.14"}
iex> Vex.Validators.Number.validate(3.14, less_than: 6.28)
:ok
Examples when using the `:less_than_or_equal_to` option:
iex> Vex.Validators.Number.validate(3.14, less_than_or_equal_to: 1.41)
{:error, "must be a number less than or equal to 1.41"}
iex> Vex.Validators.Number.validate(3.14, less_than_or_equal_to: 3.14)
:ok
iex> Vex.Validators.Number.validate(3.14, less_than_or_equal_to: 6.28)
:ok
Examples when using the combinations of the above options:
iex> Vex.Validators.Number.validate("not_a_number", is: true, greater_than: 0, less_than_or_equal_to: 3.14)
{:error, "must be a number"}
iex> Vex.Validators.Number.validate(0, is: true, greater_than: 0, less_than_or_equal_to: 3.14)
{:error, "must be a number greater than 0"}
iex> Vex.Validators.Number.validate(1.41, is: true, greater_than: 0, less_than_or_equal_to: 3.14)
:ok
iex> Vex.Validators.Number.validate(3.14, is: true, greater_than: 0, less_than_or_equal_to: 3.14)
:ok
iex> Vex.Validators.Number.validate(6.28, is: true, greater_than: 0, less_than_or_equal_to: 3.14)
{:error, "must be a number less than or equal to 3.14"}
## Custom Error Messages
Custom error messages (in EEx format), provided as :message, can use the following values:
iex> Vex.Validators.Number.__validator__(:message_fields)
[
value: "Bad value",
is: "Is number",
equal_to: "Equal to number",
greater_than: "Greater than number",
greater_than_or_equal_to: "Greater than or equal to number",
less_than: "Less than number",
less_than_or_equal_to: "Less than or equal to number"
]
An example:
iex> Vex.Validators.Number.validate(3.14, less_than: 1.41,
...> message: "<%= inspect value %> should be less than <%= less_than %>")
{:error, "3.14 should be less than 1.41"}
"""
use Vex.Validator
@option_keys [
:is,
:equal_to,
:greater_than,
:greater_than_or_equal_to,
:less_than,
:less_than_or_equal_to
]
@message_fields [
value: "Bad value",
is: "Is number",
equal_to: "Equal to number",
greater_than: "Greater than number",
greater_than_or_equal_to: "Greater than or equal to number",
less_than: "Less than number",
less_than_or_equal_to: "Less than or equal to number"
]
def validate(value, options) when is_boolean(options) do
validate(value, is: options)
end
def validate(value, options) when is_list(options) do
unless_skipping value, options do
Enum.reduce_while(options, :ok, fn
{k, o}, _ when k in @option_keys ->
case do_validate(value, k, o) do
:ok ->
{:cont, :ok}
{:error, default_message} ->
fields =
options
|> Keyword.take(@option_keys)
|> Keyword.put(:value, value)
|> Keyword.put(:less_than, options[:less_than])
error = {:error, message(options, default_message, fields)}
{:halt, error}
end
_, _ ->
{:cont, :ok}
end)
end
end
defp do_validate(_, _, nil), do: :ok
defp do_validate(v, :is, o) when is_number(v) === o, do: :ok
defp do_validate(_, :is, true), do: {:error, "must be a number"}
defp do_validate(_, :is, false), do: {:error, "must not be a number"}
defp do_validate(_, k, o) when not is_number(o),
do: raise("Invalid value #{inspect(o)} for option #{k}")
defp do_validate(v, :equal_to, o) when is_number(v) and v == o, do: :ok
defp do_validate(_, :equal_to, o), do: {:error, "must be a number equal to #{o}"}
defp do_validate(v, :greater_than, o) when is_number(v) and v > o, do: :ok
defp do_validate(_, :greater_than, o), do: {:error, "must be a number greater than #{o}"}
defp do_validate(v, :greater_than_or_equal_to, o) when is_number(v) and v >= o, do: :ok
defp do_validate(_, :greater_than_or_equal_to, o),
do: {:error, "must be a number greater than or equal to #{o}"}
defp do_validate(v, :less_than, o) when is_number(v) and v < o, do: :ok
defp do_validate(_, :less_than, o), do: {:error, "must be a number less than #{o}"}
defp do_validate(v, :less_than_or_equal_to, o) when is_number(v) and v <= o, do: :ok
defp do_validate(_, :less_than_or_equal_to, o),
do: {:error, "must be a number less than or equal to #{o}"}
end
|
lib/vex/validators/number.ex
| 0.93086 | 0.782787 |
number.ex
|
starcoder
|
defmodule RGBMatrix.Animation.Config do
@moduledoc """
Provides a behaviour and macros for defining animation configurations.
"""
alias __MODULE__.FieldType
require Logger
@field_types %{
integer: FieldType.Integer,
option: FieldType.Option
}
@typedoc """
A struct containing runtime configuration for a specific animation.
Example:
RGBMatrix.Animation.HueWave.Config
RGBMatrix.Animation.SolidReactive.Config
Configs should not be accessed or modified directly. Use the functions
`Xebow.get_animation_config/0` and `Xebow.update_animation_config/1` for
access and modification.
"""
@type t :: struct
@typedoc """
A keyword list containing the configuration fields for an animation type.
It provides the defaults for each field, the available parameters to configure
(such as `:default`, `:min`, `:options`, and so on). It can provide `:doc`, a
keyword list, for documentation such as a human-readable `:name` and
`:description`.
The keys are defined by the first atom, the name, provided to an Animation's
`field` definition(s). The values are
`t:RGBMatrix.Animation.Config.FieldType.t/0` types.
The documentation is optional and will be initialized to an empty list if
omitted.
"""
@type schema :: keyword(FieldType.t())
@typedoc """
A map used during creation of an `Animation.<type>.Config`.
The keys are defined by the first atom, the name, provided to an Animation's
`field` definition(s) and must match the field being defined.
The value should be appropriate for the specified field.
"""
@type creation_params :: %{optional(atom) => FieldType.value()}
@typedoc """
A tuple of the form, `{name, field}`. `name` is one of the valid
`FieldType` names and `field` is a
`t:RGBMatrix.Animation.Config.FieldType.t/0` struct.
"""
@type schema_field :: {name :: atom, FieldType.t()}
@typedoc """
A map used to update an `Animation.<type>.Config`.
The keys are defined by the first atom, the name, provided to an Animation's
`field` definition(s) and must match the field(s) being updated. The key may
be a string or an atom.
The value should be appropriate for the field.
"""
@type update_params :: %{(atom | String.t()) => any}
@callback schema() :: schema
@callback new(%{optional(atom) => FieldType.value()}) :: t
@callback update(t, %{optional(atom | String.t()) => any}) :: t
@doc """
Returns a map of field types provided by the Config module
"""
@spec field_types :: %{atom => FieldType.submodule()}
def field_types, do: @field_types
defmacro __before_compile__(env) do
schema = Module.get_attribute(env.module, :fields)
keys = Keyword.keys(schema)
schema = Macro.escape(schema)
config_module = __MODULE__
quote do
defmodule Config do
@moduledoc false
@behaviour unquote(config_module)
@enforce_keys unquote(keys)
defstruct unquote(keys)
@impl true
def schema do
unquote(schema)
end
@impl true
def new(params \\ %{}) do
schema = schema()
unquote(config_module).new_config(__MODULE__, schema, params)
end
@impl true
def update(config, params) do
schema = schema()
unquote(config_module).update_config(config, schema, params)
end
end
end
end
@doc """
Creates a new `t:t/0` struct belonging to the provided
`Animation.<type>.Config` module.
The provided Config must be defined through the `use Animation` and `field`
macros in an `Animation.<type>` module.
The params provided are a map of `t:creation_params/0`.
Returns a `t:t/0` struct.
Example:
iex> module = RGBMatrix.Animation.HueWave.Config
iex> schema = module.schema()
iex> params = %{direction: :up, width: 30}
iex> RGBMatrix.Animation.Config.new_config(module, schema, params)
%RGBMatrix.Animation.HueWave.Config{direction: :up, speed: 4, width: 30}
The above example shows setting the direction and width to non-default values.
Any invalid keys in the `t:creation_params/0` map will cause that param to be
ignored. Invalid values for fields will be ignored. In both cases, the default
provided to the type will be used as the initial value for that field.
All errors will be logged.
"""
@spec new_config(
module :: module,
schema :: schema,
params :: creation_params
) :: t
def new_config(module, schema, params) do
schema =
schema
|> Enum.map(fn schema_field -> validate_field(schema_field, params) end)
|> Map.new()
struct!(module, schema)
end
@doc """
Updates the provided `t:t/0` struct using the provided schema and params.
The params are a map of `t:update_params/0`.
Configs must be retrieved through the use of `Xebow.get_animation_config/0`,
which will return both the config and the schema.
Returns the updated `t:t/0` struct.
Example:
iex> {config, schema} = Xebow.get_animation_config()
iex> params = %{"direction" => "left", speed: 8}
iex> RGBMatrix.Animation.Config.update_config(config, schema, params)
%RGBMatrix.Animation.HueWave.Config{direction: :left, speed: 8, width: 20}
The above example shows updating the direction and speed.
Any errors encountered during update are logged, and the struct is returned
unchanged.
"""
@spec update_config(
config :: t,
schema :: schema,
params :: update_params
) :: t
def update_config(config, schema, params) do
params
|> Enum.reduce(config, fn param, config ->
cast_and_update_field(param, config, schema)
end)
end
@spec cast_and_update_field(
param :: {atom | String.t(), any},
config :: t,
schema :: schema
) :: t
defp cast_and_update_field({key, value} = _param, config, schema) do
with {:ok, key} <- create_atom_key(key),
{:ok, %type_module{} = type} <- fetch_type_from_schema(schema, key),
{:ok, value} <- type_module.cast(type, value) do
Map.put(config, key, value)
else
{:error, reason} ->
field_warn(reason, key, value)
config
end
end
@spec validate_field(schema_field, creation_params) :: {atom, FieldType.value()}
defp validate_field({key, %type_module{} = type} = _schema_field, params) do
value = Map.get(params, key, type.default)
case type_module.validate(type, value) do
:ok ->
{key, value}
{:error, reason} ->
field_warn(reason, key, value)
{key, type.default}
end
end
@spec create_atom_key(key :: atom) ::
{:ok, key :: atom} | {:error, :undefined_field}
defp create_atom_key(key) when is_atom(key), do: {:ok, key}
@spec create_atom_key(key :: String.t()) ::
{:ok, key :: atom} | {:error, :undefined_field}
defp create_atom_key(key) when is_binary(key) do
{:ok, String.to_existing_atom(key)}
rescue
ArgumentError ->
{:error, :undefined_field}
end
@spec fetch_type_from_schema(schema, atom) ::
{:ok, FieldType.t()} | {:error, :undefined_field}
defp fetch_type_from_schema(schema, key) do
case Keyword.fetch(schema, key) do
{:ok, type} -> {:ok, type}
:error -> {:error, :undefined_field}
end
end
@spec field_warn(
reason :: FieldType.error(),
key :: atom | String.t(),
value :: any
) :: :ok
defp field_warn(:wrong_type, key, value) do
Logger.warn("#{inspect(value)} is not the correct type for #{key}")
end
defp field_warn(:undefined_field, key, _value) do
Logger.warn("#{inspect(key)} is an undefined field identifier")
end
defp field_warn(:invalid_value, key, value) do
Logger.warn("#{inspect(value)} is an invalid value for #{key}")
end
end
|
lib/rgb_matrix/animation/config.ex
| 0.945223 | 0.661622 |
config.ex
|
starcoder
|
defmodule Prometheus.Model do
@moduledoc """
Helpers for working with Prometheus data model. For advanced users.
`Prometheus.Collector` example demonstrates how to use this module.
"""
use Prometheus.Erlang, :prometheus_model_helpers
@doc """
Creates Metric Family of `type`, `name` and `help`.
`collector.collect_metrics/2` callback will be called and expected to
return individual metrics list.
"""
delegate create_mf(name, help, type, collector, collector_data)
@doc """
Creates gauge metrics from `mdata` {label, value} tuple list.
iex(11)> Prometheus.Model.gauge_metrics([{[host: "example.com"], 100}])
[{:Metric, [{:LabelPair, "host", "example.com"}], {:Gauge, 100}, :undefined,
:undefined, :undefined, :undefined, :undefined}]
"""
delegate gauge_metrics(mdata)
@doc """
Creates gauge metric with `labels` and `value`.
iex(13)> Prometheus.Model.gauge_metric([host: "example.com"], 100)
{:Metric, [{:LabelPair, "host", "example.com"}], {:Gauge, 100}, :undefined,
:undefined, :undefined, :undefined, :undefined}
"""
delegate gauge_metric(labels \\ [], value)
@doc """
Creates untyped metrics from `mdata` {label, value} tuple list.
iex(11)> Prometheus.Model.untyped_metrics([{[host: "example.com"], 100}])
[{:Metric, [{:LabelPair, "host", "example.com"}], :undefined,
:undefined, :undefined, {:Untyped, 100}, :undefined, :undefined}]
"""
delegate untyped_metrics(mdata)
@doc """
Creates untyped metric with `labels` and `value`.
iex(13)> Prometheus.Model.untyped_metric([host: "example.com"], 100)
{:Metric, [{:LabelPair, "host", "example.com"}], :undefined,
:undefined, :undefined, {:Untyped, 100}, :undefined, :undefined}
"""
delegate untyped_metric(labels \\ [], value)
@doc """
Creates counter metrics from `mdata` {labels, value} tuple list.
iex(14)> Prometheus.Model.counter_metrics([{[host: "example.com"], 100}])
[{:Metric, [{:LabelPair, "host", "example.com"}], :undefined, {:Counter, 100},
:undefined, :undefined, :undefined, :undefined}]
"""
delegate counter_metrics(mdata)
@doc """
Creates counter metric with `labels` and `value`.
iex(15)> Prometheus.Model.counter_metric([host: "example.com"], 100)
{:Metric, [{:LabelPair, "host", "example.com"}], :undefined, {:Counter, 100},
:undefined, :undefined, :undefined, :undefined}
"""
delegate counter_metric(labels \\ [], value)
@doc """
Creates summary metrics from `mdata` {labels, count, sum} tuple list.
iex(7)> Prometheus.Model.summary_metrics([{[{:method, :get}], 2, 10.5}])
[{:Metric, [{:LabelPair, "method", "get"}], :undefined, :undefined,
{:Summary, 2, 10.5, []}, :undefined, :undefined, :undefined}]
"""
delegate summary_metrics(mdata)
@doc """
Creates summary metric with `labels`, `count`, and `sum`.
iex(3)> Prometheus.Model.summary_metric([{:method, :get}], 2, 10.5)
{:Metric, [{:LabelPair, "method", "get"}], :undefined, :undefined,
{:Summary, 2, 10.5, []}, :undefined, :undefined, :undefined}
"""
delegate summary_metric(labels \\ [], count, sum)
@doc """
Creates histogram metrics from `mdata` {labels, buckets, count, sum} tuple list.
iex(2)> Prometheus.Model.histogram_metrics([{[{:method, :get}],
...(2)> [{2, 1}, {5, 1}, {:infinity, 2}],
...(2)> 2, 10.5}])
[{:Metric, [{:LabelPair, "method", "get"}], :undefined, :undefined, :undefined,
:undefined,
{:Histogram, 2, 10.5,
[{:Bucket, 1, 2}, {:Bucket, 1, 5}, {:Bucket, 2, :infinity}]}, :undefined}]
"""
delegate histogram_metrics(mdata)
@doc """
Creates histogram metric with `labels`, `buckets`, `count`, and `sum`.
iex(4)> Prometheus.Model.histogram_metric([{:method, :get}],
...(4)> [{2, 1}, {5, 1}, {:infinity, 2}],
...(4)> 2, 10.5)
{:Metric, [{:LabelPair, "method", "get"}], :undefined, :undefined, :undefined,
:undefined,
{:Histogram, 2, 10.5,
[{:Bucket, 1, 2}, {:Bucket, 1, 5}, {:Bucket, 2, :infinity}]}, :undefined}
Buckets is a list of pairs {upper_bound, cumulative_count}.
Cumulative count is a sum of all cumulative_counts of previous buckets + counter of
current bucket.
"""
delegate histogram_metric(labels \\ [], buckets, count, sum)
end
|
astreu/deps/prometheus_ex/lib/prometheus/model.ex
| 0.934425 | 0.583648 |
model.ex
|
starcoder
|
defmodule Problem11 do
def solve do
grid = getGrid()
x = Enum.to_list 0..19
y = Enum.to_list 0..19
Enum.map( x, fn
x -> Enum.map y, fn
y -> targetToProducts grid, x, y
end
end )
|> List.flatten
|> Enum.reduce(0, fn x, acc -> if (x > acc), do: x, else: acc end)
end
defp getGrid do
File.read!("./lib/problem011Grid.txt")
|> String.split("\n")
|> Enum.map(fn str -> String.split str, " " end)
|> Enum.filter(fn [""] -> false; _ -> true end)
|> Enum.map(fn list ->
Enum.map(list, fn str -> String.to_integer str end)
end)
|> Enum.map(fn list -> List.to_tuple list end)
|> List.to_tuple
end
defp targetToProducts(grid, x, y) do
rows = [
getNeighborsRight(grid, x, y),
getNeighborsDown(grid, x, y),
getNeighborsDiagonalLeft(grid, x, y),
getNeighborsDiagonalRight(grid, x, y),
]
Enum.map(rows, fn
{:ok, a, b, c, d} -> a * b * c * d
{:error, _} -> 0
end)
end
defp getNeighborsRight(grid, x, y) do
try do
target = grid
|> elem(y)
|> elem(x)
first = grid
|> elem(y)
|> elem(x+1)
second = grid
|> elem(y)
|> elem(x+2)
third = grid
|> elem(y)
|> elem(x+3)
{:ok, target, first, second, third}
rescue
_ -> {:error, "Out of grid bounds"}
end
end
defp getNeighborsDown(grid, x, y) do
try do
target = grid
|> elem(y)
|> elem(x)
first = grid
|> elem(y+1)
|> elem(x)
second = grid
|> elem(y+2)
|> elem(x)
third = grid
|> elem(y+3)
|> elem(x)
{:ok, target, first, second, third}
rescue
_ -> {:error, "Out of grid bounds"}
end
end
defp getNeighborsDiagonalLeft(grid, x, y) do
try do
target = grid
|> elem(y)
|> elem(x)
first = grid
|> elem(y-1)
|> elem(x+1)
second = grid
|> elem(y-2)
|> elem(x+2)
third = grid
|> elem(y-3)
|> elem(x+3)
{:ok, target, first, second, third}
rescue
_ -> {:error, "Out of grid bounds"}
end
end
defp getNeighborsDiagonalRight(grid, x, y) do
try do
target = grid
|> elem(y)
|> elem(x)
first = grid
|> elem(y+1)
|> elem(x+1)
second = grid
|> elem(y+2)
|> elem(x+2)
third = grid
|> elem(y+3)
|> elem(x+3)
{:ok, target, first, second, third}
rescue
_ -> {:error, "Out of grid bounds"}
end
end
end
IO.puts "Problem 11 : #{Problem11.solve}"
|
elixir/lib/problem011.ex
| 0.613005 | 0.640748 |
problem011.ex
|
starcoder
|
defmodule Membrane.Element.Base.Mixin.SourceBehaviour do
@moduledoc """
Module defining behaviour for source and filter elements.
When used declares behaviour implementation, provides default callback definitions
and imports macros.
For more information on implementing elements, see `Membrane.Element.Base`.
"""
alias Membrane.{Buffer, Caps, Element}
alias Element.{CallbackContext, Pad}
alias Element.Base.Mixin.CommonBehaviour
@doc """
Callback that defines what source pads may be ever available for this
element type.
The default name for generic source pad, in elements that just produce some
buffers is `:source`.
"""
@callback known_source_pads() :: [Element.source_pad_specs_t()]
@doc """
Callback that is called when buffers should be emitted by the source or filter.
It will be called only for source pads in the pull mode, as in their case demand
is triggered by the sinks of the subsequent elements.
In source elements, appropriate amount of data should be sent here. If it happens
not to be yet available, element should store unsupplied demand and supply it
when possible.
In filter elements, this callback should usually return `:demand` action with
size sufficient (at least approximately) for supplying incoming demand. This
will result with calling `c:Membrane.Element.Base.Filter.handle_process/4` or
`c:Membrane.Element.Base.Sink.handle_write/4`, which is to supply
the demand. If it does not, or does only partially,
`c:Membrane.Element.Base.Mixin.SourceBehaviour.handle_demand/5` is called
again, until there is any data available on the sink pad.
For sources in the push mode, element should generate buffers without this
callback.
"""
@callback handle_demand(
pad :: Pad.name_t(),
size :: non_neg_integer,
unit :: Buffer.Metric.unit_t(),
context :: CallbackContext.Demand.t(),
state :: Element.state_t()
) :: CommonBehaviour.callback_return_t()
@doc """
Macro that defines known source pads for the element type.
Allows to use `one_of/1` and `range/2` functions from `Membrane.Caps.Matcher`
without module prefix.
It automatically generates documentation from the given definition
and adds compile-time caps specs validation.
"""
defmacro def_known_source_pads(raw_source_pads) do
source_pads =
raw_source_pads
|> Bunch.Macro.inject_calls([
{Caps.Matcher, :one_of},
{Caps.Matcher, :range}
])
quote do
@doc """
Returns all known source pads for #{inspect(__MODULE__)}
They are the following:
#{unquote(source_pads) |> Membrane.Core.Helper.Doc.generate_known_pads_docs()}
"""
@spec known_source_pads() :: [Membrane.Element.source_pad_specs_t()]
@impl true
def known_source_pads(), do: unquote(source_pads)
@after_compile {__MODULE__, :__membrane_source_caps_specs_validation__}
def __membrane_source_caps_specs_validation__(env, _bytecode) do
pads_list = env.module.known_source_pads() |> Enum.to_list() |> Keyword.values()
for {_, _, caps_spec} <- pads_list do
with :ok <- caps_spec |> Caps.Matcher.validate_specs() do
:ok
else
{:error, reason} -> raise "Error in source caps spec: #{inspect(reason)}"
end
end
end
end
end
defmacro __using__(_) do
quote location: :keep do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__), only: [def_known_source_pads: 1]
end
end
end
|
lib/membrane/element/base/mixin/source_behaviour.ex
| 0.854445 | 0.424591 |
source_behaviour.ex
|
starcoder
|
defmodule Calixir.SampleDates do
@moduledoc """
The main purpose of this module is to provide test data for functions
written in Elixir or Erlang to convert dates from one calendar into the
corresponding dates in another calendar.
This module provides the following calendrica-4.0 dates as Elixir data:
*Sample Dates*: they contain 33 dates from various years in seven files
(see DR4, pp. 447 - 453):
`dates1.csv`, `dates2.csv`, `dates3.csv`, `dates4.csv`,
`dates3.csv`, `dates6.csv`, `dates7.csv`
Both the year dates and the sample dates cover the same calendars.
Thus they can be combined into a single large data table
that contains 33 rows from the `dates*.csv` files and
365/366 rows per year from the `201*.csv` files.
One cell or entry of this table contains the date of one calendar
on a specific day.
## Date Structures
There are a couple of nested data structures you need to understand
in order to use the functions. From smallest to largest:
### `calendar`
This is an atom, that represents the name of a calendar.
### `date`
This is a number or a tuple.
A `date` expresses a single date of some calendar.
It is only useful if combined with a calendar.
### `caldate`
This is a tuple, beginning with a calendar, followed by a date
of that calendar:
`caldate = {:calendar, {:calendar_field1, :calendar_field2, ...}}`, i.e.
`caldate = {:gregorian, {2010, 1, 1}}`
### `caldate_row`
This is a list of `caldate`s of a specific day across all the calendars
discussed in DR4.
`[caldate_1, caldate_2, caldate_3, ...]`, i.e.
`[{:fixed, 2345678}, {:gregorian, {2010, 1, 1}}, ...]`
### `caldate_table`
This is the table of all `caldate_row`s.
It is the largest data structure that contains all the others.
**NOTE:** All data in the `caldate_table` are **static**.
The examples given only work with these data.
They don't perfom date calculations or date conversions!
The sample data have been precaculated by the functions of the
original Lisp program `calendrica-4.0.cl` and provided in the
files mentioned above.
"""
import Calixir.SampleDatesTableMaker
@sample_files Enum.map(1..7, &("dates#{&1}.csv"))
@sample_dict sample_dict_from_files(@sample_files)
@caldate_table @sample_dict.dates
@caldate_structs @sample_dict.caldate_structs
@calendars @sample_dict.calendars
@doc """
Returns a list of the date structures used in calendrica-4.0.
Each `date_struct` is a list, beginning with a calendar and followed
by one or more fields defining a date of that calendar.
In other words: The `date_struct` defines a `caldate`:
date_struct: `{:gregorian, {:gregorian_year, :gregorian_month, :gregorian_day}}`
caldate: `{:gregorian, {2010, 1, 1}}`
date_struct: `{:fixed, :fixed}`
caldate: `{:fixed, 2345678}`
The date structs are derived from the two header lines (calendars and fields)
of the files containing the year dates and the sample dates.
"""
def caldate_structs, do: @caldate_structs
@doc """
Returns the complete list of caldates.
"""
def caldate_table, do: @caldate_table
@doc """
Returns a list of the calendars used in calendrica-4.0 in the order
in which they appear in the sample files.
**NOTE**: Not all results provided by this function are calendars proper.
Some are just sub-structures (like `chinese_day_name`) or repeating data
(like `weekday`), or astro data (like `solstice`).
The calendars are derived from the first two header lines of the files
containing the year dates and the sample dates.
"""
def calendars, do: @calendars |> Enum.uniq
@doc """
Outputs a list of the calendars used in calendrica-4.0
in the alphabetic order.
```
akan_name
arithmetic_french
arithmetic_persian
armenian
astro_bahai
astro_hindu_lunar
astro_hindu_solar
astronomical_easter
aztec_tonalpohualli
aztec_xihuitl
babylonian
bahai
bali_pawukon
chinese
chinese_day_name
coptic
dawn
easter
egyptian
ephem_corr
eqn_of_time
ethiopic
fixed
french
gregorian
hebrew
hindu_lunar
hindu_solar
icelandic
islamic
iso
jd
julian
lunar_alt
lunar_lat
lunar_long
mayan_haab
mayan_long_count
mayan_tzolkin
midday
mjd
moonrise
moonset
new_moon_after
next_zhongqi
observational_hebrew
observational_islamic
old_hindu_lunar
old_hindu_solar
olympiad
orthodox_easter
persian
roman
samaritan
saudi_islamic
set
solar_long
solstice
tibetan
unix
weekday
```
"""
def calendars_to_io() do
calendars()
|> Enum.sort
|> Enum.map(&(IO.puts(&1)))
end
@doc """
Returns all the calendar dates at fixed date`fixed`.
"""
def caldates_at_fixed(fixed) do
Enum.find(caldate_table(), &({:fixed, fixed} in &1))
end
@doc """
Returns all the calendar dates at Gregorian date`{year, month, day}`.
"""
def caldates_at_gregorian(year, month, day) do
Enum.find(caldate_table(), &({:gregorian, {year, month, day}} in &1))
end
@doc """
Returns all the calendar dates at Julian Day Number `jd`.
"""
def caldates_at_jd(jd) do
Enum.find(caldate_table(), &({:jd, jd} in &1))
end
@doc """
Returns all caldates of `calendar`.
"""
def caldates_of_calendar(calendar) do
Enum.map(caldate_table(), &(caldate_in_row(calendar, &1)))
end
@doc """
Returns the `calendar` part of the calendar date structurte `caldate`.
"""
def calendar_of_caldate({calendar, _date} = _caldate), do: calendar
@doc """
Returns a list of pairs (= tuples) with a calendar date of `calendar1`
and the corresponding calendar date of `calendar2`.
Used to create date pairs for testing date-to-date conversions, i.e.:
`calendar_to_calendar(:fixed, :gregorian)`
`calendar_to_calendar(:gregorian, :hebrew)`
"""
def calendar_to_calendar(calendar1, calendar2) do
dates1 = dates_of_calendar(calendar1)
dates2 = dates_of_calendar(calendar2)
Enum.zip(dates1, dates2)
end
@doc """
Returns the `date` part of the calendar date structure `caldate`.
Single-value dates are returned as atoms.
Multi-value dates are returned as tuples.
"""
def date_of_caldate({_calendar, date} = _caldate), do: date
@doc """
Returns all the dates of `calendar`.
"""
def dates_of_calendar(calendar) do
Enum.map(caldate_table(), &(date_in_row(calendar, &1)))
end
@doc """
Returns a list of pairs (= tuples) with a calendar date of a fixed date
(aka Rata Die or R.D. of DR4) and the equivalent calendar date of `calendar`.
Per default, the function returns the first 33 entries of the data table.
These 33 rows are the data used in the Sample Data of DR4 445ff.
This is a convenience function.
"""
def fixed_with(calendar) do
dates1 = dates_of_calendar(:fixed)
dates2 = dates_of_calendar(calendar)
Enum.zip(dates1, dates2) |> Enum.take(33)
end
@doc """
Returns the `caldate` structure of `calendar` in the `caldate_row`.
"""
def caldate_in_row(calendar, caldate_row) do
Enum.find(caldate_row, &(elem(&1, 0) == calendar))
end
@doc """
Returns the date of the `caldate` of `calendar` in the `caldate_row`.
"""
def date_in_row(calendar, caldate_row) do
calendar
|> caldate_in_row(caldate_row)
|> date_of_caldate
end
@doc """
Returns a list of pairs (= tuples) with a calendar date of the Julian Day
Number and the equivalent calendar date of `calendar`.
This is a convenience function.
"""
def jd_with(calendar) do
dates1 = dates_of_calendar(:jd)
dates2 = dates_of_calendar(calendar)
Enum.zip(dates1, dates2)
end
end
|
lib/calixir/sample_dates.ex
| 0.925919 | 0.903847 |
sample_dates.ex
|
starcoder
|
defmodule Versioning.Adapter.Date do
@moduledoc """
A versioning adapter for date-based versions.
Under the hood, this adapter uses the `Date` module. For details on the rules
that are used for parsing and comparison, please see the `Date` module.
## Example
defmodule MyApp.Versioning do
use Versioning.Schema, adapter: Versioning.Adapter.Date
version "2019-01-01" do
type "Post" do
change(MyApp.Change)
end
end
end
"""
@behaviour Versioning.Adapter
@doc """
Parses date based versions using ISO8601 formatting.
## Example
iex> Versioning.Adapters.Date.parse("2019-01-01")
{:ok, ~D[2019-01-01]}
iex> Versioning.Adapters.Date.parse("foo")
:error
"""
@impl Versioning.Adapter
@spec parse(binary() | Date.t()) :: :error | {:ok, Date.t()}
def parse(version) when is_binary(version) do
case Date.from_iso8601(version) do
{:ok, _} = result -> result
_ -> :error
end
end
def parse(%Date{} = version) do
{:ok, version}
end
def parse(_) do
:error
end
@doc """
Compares date based versions using ISO8601 formatting.
Returns `:gt` if the first verison is greater than the second, and `:lt` for
vice-versa. If the two versions are equal, `:eq` is returned. Returns `:error`
if the version cannot be parsed.
## Example
iex> Versioning.Adapters.Date.compare("2019-01-01", "2018-12-31")
:gt
iex> Versioning.Adapters.Date.compare("2018-12-31", "2019-01-01")
:lt
iex> Versioning.Adapters.Date.compare("2019-01-01", "2019-01-01")
:eq
iex> Versioning.Adapters.Date.compare("foo", "bar")
:error
"""
@impl Versioning.Adapter
@spec compare(binary() | Date.t(), binary() | Date.t()) :: :gt | :lt | :eq | :error
def compare(version1, version2) when is_binary(version1) and is_binary(version2) do
with {:ok, version1} <- parse(version1),
{:ok, version2} <- parse(version2) do
compare(version1, version2)
end
end
def compare(%Date{} = version1, %Date{} = version2) do
Date.compare(version1, version2)
rescue
_ -> :error
end
def compare(_version1, _version2) do
:error
end
end
|
lib/versioning/adapter/date.ex
| 0.899261 | 0.491151 |
date.ex
|
starcoder
|
defmodule Day12 do
@moduledoc """
--- Day 12: Digital Plumber ---
Walking along the memory banks of the stream, you find a small village that is experiencing a little confusion: some
programs can't communicate with each other.
Programs in this village communicate using a fixed system of pipes. Messages are passed between programs using these
pipes, but most programs aren't connected to each other directly. Instead, programs pass messages between each other
until the message reaches the intended recipient.
For some reason, though, some of these messages aren't ever reaching their intended recipient, and the programs
suspect that some pipes are missing. They would like you to investigate.
You walk through the village and record the ID of each program and the IDs with which it can communicate directly
(your puzzle input). Each program has one or more programs with which it can communicate, and these pipes are
bidirectional; if 8 says it can communicate with 11, then 11 will say it can communicate with 8.
You need to figure out how many programs are in the group that contains program ID 0.
For example, suppose you go door-to-door like a travelling salesman and record the following list:
0 <-> 2
1 <-> 1
2 <-> 0, 3, 4
3 <-> 2, 4
4 <-> 2, 3, 6
5 <-> 6
6 <-> 4, 5
In this example, the following programs are in the group that contains program ID 0:
Program 0 by definition.
Program 2, directly connected to program 0.
Program 3 via program 2.
Program 4 via program 2.
Program 5 via programs 6, then 4, then 2.
Program 6 via programs 4, then 2.
Therefore, a total of 6 programs are in this group; all but program 1, which has a pipe that connects it to itself.
How many programs are in the group that contains program ID 0?
--- Part Two ---
There are more programs than just the ones in the group containing program ID 0. The rest of them have no way of
reaching that group, and still might have no way of reaching each other.
A group is a collection of programs that can all communicate via pipes either directly or indirectly. The programs
you identified just a moment ago are all part of the same group. Now, they would like you to determine the total
number of groups.
In the example above, there were 2 groups: one consisting of programs 0,2,3,4,5,6, and the other consisting solely
of program 1.
"""
def test_a do
di=common_a("res/day12_test.input")
:digraph_utils.reachable([0], di)
end
def part_a do
di = common_a("res/day12.input")
:digraph_utils.reachable([0], di) |>
length()
end
def test_b do
di=common_a("res/day12_test.input")
:digraph_utils.components(di)
end
def part_b do
di=common_a("res/day12.input")
:digraph_utils.components(di) |>
length()
end
defp common_a(file) do
di=:digraph.new()
File.read!(file) |>
String.split("\n") |>
Enum.map(fn(string) -> String.split(string, [",", " "], trim: true) end) |>
List.foldl(di, fn(instruction, d) -> process_instruction(instruction, d, :undefined) end)
end
defp process_instruction([], d, _) do
d
end
defp process_instruction([vertex, "<->"|tail], d, _) do
vertex_int = String.to_integer(vertex)
vertex_int=:digraph.add_vertex(d, vertex_int)
process_instruction(tail, d, vertex_int)
end
defp process_instruction([connected_vertex|tail], d, vertex) do
connected_vertex_int = String.to_integer(connected_vertex)
connected_vertex_int=:digraph.add_vertex(d, connected_vertex_int)
:digraph.add_edge(d, vertex, connected_vertex_int)
process_instruction(tail, d, vertex)
end
end
|
lib/day12.ex
| 0.749821 | 0.838481 |
day12.ex
|
starcoder
|
defmodule Advent.Day12 do
defmodule Ship do
defstruct [:location, :direction]
def new do
%__MODULE__{location: {0, 0}, direction: {1, 0}}
end
def action(%__MODULE__{location: {x, y}} = ship, {:move, {dx, dy}}) do
%{ship | location: {x + dx, y + dy}}
end
def action(%__MODULE__{direction: direction} = ship, {:rotate, angle}) do
%{ship | direction: Advent.Day12.rotate(direction, angle)}
end
def action(%__MODULE__{direction: {rx, ry}} = ship, {:forward, units}) do
action(ship, {:move, {rx * units, ry * units}})
end
end
defmodule WaypointShip do
defstruct [:location, :waypoint]
def new do
%__MODULE__{location: {0, 0}, waypoint: {10, 1}}
end
def action(%__MODULE__{waypoint: {x, y}} = ship, {:move, {dx, dy}}) do
%{ship | waypoint: {x + dx, y + dy}}
end
def action(%__MODULE__{waypoint: waypoint} = ship, {:rotate, angle}) do
%{ship | waypoint: Advent.Day12.rotate(waypoint, angle)}
end
def action(
%__MODULE__{location: {x, y}, waypoint: {wx, wy}} = ship,
{:forward, times}
) do
{dx, dy} = {wx * times, wy * times}
%{ship | location: {x + dx, y + dy}}
end
end
def rotate({rx, ry}, angle) do
case angle do
90 -> {-ry, rx}
180 -> {-rx, -ry}
270 -> {ry, -rx}
end
end
def manhattan_distance(%{location: {x, y}}) do
abs(x) + abs(y)
end
defp input_to_directions(input) do
for line <- String.split(input, "\n", trim: true) do
{action, units} = String.split_at(line, 1)
units = String.to_integer(units)
case action do
"N" -> {:move, {0, units}}
"S" -> {:move, {0, -units}}
"E" -> {:move, {units, 0}}
"W" -> {:move, {-units, 0}}
"L" -> {:rotate, units}
"R" -> {:rotate, 360 - units}
"F" -> {:forward, units}
end
end
end
def part_1(input) do
input_to_directions(input)
|> Enum.reduce(Ship.new(), &Ship.action(&2, &1))
|> manhattan_distance()
end
def part_2(input) do
input_to_directions(input)
|> Enum.reduce(WaypointShip.new(), &WaypointShip.action(&2, &1))
|> manhattan_distance()
end
end
|
shritesh+elixir/lib/advent/day_12.ex
| 0.801975 | 0.691452 |
day_12.ex
|
starcoder
|
defmodule ElixirDbf.Table do
@moduledoc """
ElixirDbf table module
"""
@empty_stream {"", ""}
@enforce_keys [:header, :rows]
defstruct [:header, :rows]
alias ElixirDbf.Header
def parse_row({:ok, stream}, columns, encoding) do
ElixirDbf.Row.parse(stream, columns, encoding)
after
{:ok, @empty_stream} = StringIO.close(stream)
end
def parse_row(block, columns, encoding) do
block
|> StringIO.open()
|> parse_row(columns, encoding)
end
def read_rows(file, header, encoding, prev_block, rows \\ [])
def read_rows(_file, _header, _encoding, :eof, rows), do: Enum.reverse(rows)
def read_rows(_file, _header, _encoding, <<26>>, rows), do: Enum.reverse(rows)
def read_rows(file, header, encoding, :start, rows) do
next_block = IO.binread(file, header.record_size)
read_rows(file, header, encoding, next_block, rows)
end
def read_rows(file, header, encoding, prev_block, rows) do
row = parse_row(prev_block, header.columns, encoding || header.encoding)
next_block = IO.binread(file, header.record_size)
read_rows(file, header, encoding, next_block, [row | rows])
end
def read_rows_count(file, header, count, encoding, prev_block, rows \\ [])
def read_rows_count(_file, _header, 0, _encoding, _prev_block, rows), do: Enum.reverse(rows)
def read_rows_count(_file, _header, _count, _encoding, :eof, rows), do: Enum.reverse(rows)
def read_rows_count(_file, _header, _count, _encoding, <<26>>, rows), do: Enum.reverse(rows)
def read_rows_count(file, header, count, encoding, :start, rows) do
next_block = IO.binread(file, header.record_size)
read_rows_count(file, header, count, encoding, next_block, rows)
end
def read_rows_count(file, header, count, encoding, prev_block, rows) do
row = parse_row(prev_block, header.columns, encoding || header.encoding)
next_block = IO.binread(file, header.record_size)
read_rows_count(file, header, count - 1, encoding, next_block, [row | rows])
end
def fetch_data(file, count \\ nil, encoding) do
header = Header.parse(file)
rows =
case count do
nil -> read_rows(file, header, encoding, :start)
count -> read_rows_count(file, header, count, encoding, :start)
end
data = %__MODULE__{rows: rows, header: header}
if length(rows) == header.records do
{:ok, data}
else
{:error, :damaged, data}
end
after
:ok = File.close(file)
end
def read(path, encoding \\ nil) do
case File.open(path) do
{:error, reason} ->
{:error, reason}
{:ok, file} ->
fetch_data(file, encoding)
end
end
def read_count(path, count, encoding \\ nil) do
case File.open(path) do
{:error, reason} ->
{:error, reason}
{:ok, file} ->
fetch_data(file, count, encoding)
end
end
end
|
lib/elixir_dbf/table.ex
| 0.612657 | 0.45532 |
table.ex
|
starcoder
|
defmodule CryptoCompare do
@moduledoc """
Provides a basic HTTP interface to allow easy communication with the CryptoCompare API, by wrapping `HTTPoison`
** For now only HTTP REST API is available**.
Work on Websocket one is in progress.
[API Documentation](https://www.cryptocompare.com/api/#-api-data-)
## Example:
```elixir
iex(1)> CryptoCompare.coin_list
{:ok,
%{DCT: %{Algorithm: "N/A", CoinName: "Decent", FullName: "Decent (DCT)",
FullyPremined: "0", Id: "25721", ImageUrl: "/media/351389/dct.png",
Name: "DCT", PreMinedValue: "N/A", ProofType: "PoS", SortOrder: "702",
TotalCoinSupply: "73197775", TotalCoinsFreeFloat: "N/A",
Url: "/coins/dct/overview"},
PX: %{Algorithm: "SHA256", CoinName: "PXcoin", FullName: "PXcoin (PX)", ...},
"STR*": %{Algorithm: "Scrypt", CoinName: "StarCoin", ...},
BSTK: %{Algorithm: "PoS", ...}, SUR: %{...}, ...}}
```
"""
alias CryptoCompare.Util.{Api, ApiMini}
@doc """
Get general info for all the coins available on the website.
## Example:
```elixir
iex(1)> CryptoCompare.coin_list
{:ok,
%{DCT: %{Algorithm: "N/A", CoinName: "Decent", FullName: "Decent (DCT)",
FullyPremined: "0", Id: "25721", ImageUrl: "/media/351389/dct.png",
Name: "DCT", PreMinedValue: "N/A", ProofType: "PoS", SortOrder: "702",
TotalCoinSupply: "73197775", TotalCoinsFreeFloat: "N/A",
Url: "/coins/dct/overview"},
PX: %{Algorithm: "SHA256", CoinName: "PXcoin", FullName: "PXcoin (PX)", ...},
"STR*": %{Algorithm: "Scrypt", CoinName: "StarCoin", ...},
BSTK: %{Algorithm: "PoS", ...}, SUR: %{...}, ...}}
```
"""
@spec coin_list() :: {:ok, map} | {:error, any}
def coin_list, do: Api.get_body("coinlist")
@doc """
Get the latest price for a list of one or more currencies. Really fast, 20-60 ms. Cached each 10 seconds.
## Example:
```elixir
iex(1)> CryptoCompare.price("ETH", "BTC")
{:ok, %{BTC: 0.07356}}
```
"""
@spec price(String.t, String.t | [String.t]) :: {:ok, map} | {:error, any}
def price(fsym, tsyms) when is_list(tsyms), do: price(fsym, Enum.join(tsyms, ","))
@doc """
Get the latest price for a list of one or more currencies. Really fast, 20-60 ms. Cached each 10 seconds.
**Optional parameters:**
- `e` - String. Name of exchange. Default: CCCAGG
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
## Example:
```elixir
iex(1)> CryptoCompare.price("ETH", ["BTC", "LTC"])
{:ok, %{BTC: 0.07357, LTC: 5.3}}
```
With specified exchange:
```elixir
iex(1)> CryptoCompare.price("ETH", ["USD", "EUR"], [e: "Coinbase"])
{:ok, %{EUR: 254, USD: 301.91}}
```
```elixir
iex(17)> CryptoCompare.price("ETH", ["BTC", "LTC"], [extraParams: "my super app"])
{:ok, %{BTC: 0.07327, LTC: 5.25}}
```
"""
@spec price(String.t, String.t | [String.t], [tuple]) :: {:ok, map} | {:error, any}
def price(fsym, tsyms, params \\ [])
def price(fsym, tsyms, params) when is_list(tsyms), do: price(fsym, Enum.join(tsyms, ","), params)
def price(fsym, tsyms, params), do: ApiMini.get_body("price", [fsym: fsym, tsyms: tsyms] ++ params)
@doc """
Get a matrix of currency prices. For several symbols.
**Optional parameters:**
- `e` - String. Name of exchange. Default: CCCAGG
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
## Example:
```elixir
iex(1)> CryptoCompare.pricemulti(["ETH", "DASH"], ["BTC", "USD"])
{:ok, %{DASH: %{BTC: 0.08289, USD: 337.4}, ETH: %{BTC: 0.07306, USD: 297.98}}}
```
"""
@spec pricemulti(String.t | [String.t], String.t | [String.t], [tuple]) :: {:ok, map} | {:error, any}
def pricemulti(fsyms, tsyms, params \\ [])
def pricemulti(fsyms, tsyms, params) when is_list(fsyms), do: pricemulti(Enum.join(fsyms, ","), tsyms, params)
def pricemulti(fsyms, tsyms, params) when is_list(tsyms), do: pricemulti(fsyms, Enum.join(tsyms, ","), params)
def pricemulti(fsyms, tsyms, params), do: ApiMini.get_body("pricemulti", [fsyms: fsyms, tsyms: tsyms] ++ params)
@doc """
Get all the current trading info (price, vol, open, high, low etc) of any list of cryptocurrencies
in any other currency that you need.If the crypto does not trade directly into the toSymbol requested,
BTC will be used for conversion. This API also returns Display values for all the fields.
If the opposite pair trades we invert it (eg.: BTC-XMR).
**Optional parameters:**
- `e` - String. Name of exchange. Default: CCCAGG
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
## Example:
```elixir
iex(16)> CryptoCompare.pricemultifull(["ETH", "DASH"], ["BTC", "USD"], [extraParams: "my app"])
{:ok,
%{DISPLAY: %{DASH: %{BTC: %{CHANGE24HOUR: "Ι 0.00083",
CHANGEPCT24HOUR: "1.01", FROMSYMBOL: "DASH", HIGH24HOUR: "Ι 0.08821",
LASTMARKET: "Kraken", LASTTRADEID: 1505742927.3238,
LASTUPDATE: "Just now", LASTVOLUME: "DASH 1",
LASTVOLUMETO: "Ι 0.08374", LOW24HOUR: "Ι 0.08130",
MARKET: "CryptoCompare Index", MKTCAP: "Ι 628.13 K",
OPEN24HOUR: "Ι 0.08218", PRICE: "Ι 0.08301",
SUPPLY: "DASH 7,566,875.1", TOSYMBOL: "Ι",
VOLUME24HOUR: "DASH 66,103.7", VOLUME24HOURTO: "Ι 5,598.87"},
USD: %{CHANGE24HOUR: "$ 36.1", CHANGEPCT24HOUR: "11.99",
FROMSYMBOL: "DASH", HIGH24HOUR: "$ 340.91", LASTMARKET: "HitBTC",
LASTTRADEID: 34140863, LASTUPDATE: "Just now",
LASTVOLUME: "DASH 0.05400", LASTVOLUMETO: "$ 18.16",
LOW24HOUR: "$ 296.75", MARKET: "CryptoCompare Index",
MKTCAP: "$ 2,551.47 M", OPEN24HOUR: "$ 301.09", PRICE: "$ 337.19",
SUPPLY: "DASH 7,566,875.1", TOSYMBOL: "$",
VOLUME24HOUR: "DASH 33,876.6", VOLUME24HOURTO: "$ 10,855,035"}},
ETH: %{BTC: %{CHANGE24HOUR: "Ι 0.0041", CHANGEPCT24HOUR: "5.93",
FROMSYMBOL: "Ξ", HIGH24HOUR: "Ι 0.07573", LASTMARKET: "Poloniex",
LASTTRADEID: 34391845, LASTUPDATE: "Just now", LASTVOLUME: "Ξ 3",
LASTVOLUMETO: "Ι 0.2188", LOW24HOUR: "Ι 0.06866",
MARKET: "CryptoCompare Index", MKTCAP: "Ι 6,903.97 K",
OPEN24HOUR: "Ι 0.06883", PRICE: "Ι 0.07291",
SUPPLY: "Ξ 94,691,674.1", TOSYMBOL: "Ι",
VOLUME24HOUR: "Ξ 440,392.5", VOLUME24HOURTO: "Ι 31,872.6"},
USD: %{CHANGE24HOUR: "$ 43.02", CHANGEPCT24HOUR: "16.97",
FROMSYMBOL: "Ξ", HIGH24HOUR: "$ 301.17", LASTMARKET: "Gemini",
LASTTRADEID: 1729460950, LASTUPDATE: "Just now", LASTVOLUME: "Ξ 1.7",
LASTVOLUMETO: "$ 504.56", LOW24HOUR: "$ 251.78",
MARKET: "CryptoCompare Index", MKTCAP: "$ 28.08 B",
OPEN24HOUR: "$ 253.55", PRICE: "$ 296.57", SUPPLY: "Ξ 94,691,674.1",
TOSYMBOL: "$", VOLUME24HOUR: "Ξ 1,000,576.4",
VOLUME24HOURTO: "$ 279,431,015.1"}}},
RAW: %{DASH: %{BTC: %{CHANGE24HOUR: 8.299999999999974e-4,
CHANGEPCT24HOUR: 1.0099780968605467, FLAGS: "4", FROMSYMBOL: "DASH",
HIGH24HOUR: 0.08821, LASTMARKET: "Kraken",
LASTTRADEID: 1505742927.3238, LASTUPDATE: 1505742927,
LASTVOLUME: 1.00468613, LASTVOLUMETO: 0.0837405889355,
LOW24HOUR: 0.0813, MARKET: "CCCAGG", MKTCAP: 628126.2997194666,
OPEN24HOUR: 0.08218, PRICE: 0.08301, SUPPLY: 7566875.07191262,
TOSYMBOL: "BTC", TYPE: "5", VOLUME24HOUR: 66103.69737247002,
VOLUME24HOURTO: 5598.867303056865},
USD: %{CHANGE24HOUR: 36.10000000000002,
CHANGEPCT24HOUR: 11.989770500514805, FLAGS: "2", FROMSYMBOL: "DASH",
HIGH24HOUR: 340.91, LASTMARKET: "HitBTC", LASTTRADEID: 34140863,
LASTUPDATE: 1505742916, LASTVOLUME: 0.054, LASTVOLUMETO: 18.15534,
LOW24HOUR: 296.75, MARKET: "CCCAGG", MKTCAP: 2551474605.4982166,
OPEN24HOUR: 301.09, PRICE: 337.19, SUPPLY: 7566875.07191262,
TOSYMBOL: "USD", TYPE: "5", VOLUME24HOUR: 33876.57931777,
VOLUME24HOURTO: 10855035.024934988}},
ETH: %{BTC: %{CHANGE24HOUR: 0.00408, CHANGEPCT24HOUR: 5.927647827981985,
FLAGS: "4", FROMSYMBOL: "ETH", HIGH24HOUR: 0.07573,
LASTMARKET: "Poloniex", LASTTRADEID: 34391845, LASTUPDATE: 1505742928,
LASTVOLUME: 3.00073068, LASTVOLUMETO: 0.21879227, LOW24HOUR: 0.06866,
MARKET: "CCCAGG", MKTCAP: 6903969.958106048, OPEN24HOUR: 0.06883,
PRICE: 0.07291, SUPPLY: 94691674.0928, TOSYMBOL: "BTC", TYPE: "5",
VOLUME24HOUR: 440392.4897805099, VOLUME24HOURTO: 31872.558630222768},
USD: %{CHANGE24HOUR: 43.01999999999998,
CHANGEPCT24HOUR: 16.967067639518824, FLAGS: "4", FROMSYMBOL: "ETH",
HIGH24HOUR: 301.17, LASTMARKET: "Gemini", LASTTRADEID: 1729460950,
LASTUPDATE: 1505742937, LASTVOLUME: 1.7, LASTVOLUMETO: 504.56,
LOW24HOUR: 251.78, MARKET: "CCCAGG", MKTCAP: 28082709785.7017,
OPEN24HOUR: 253.55, PRICE: 296.57, SUPPLY: 94691674.0928,
TOSYMBOL: "USD", TYPE: "5", VOLUME24HOUR: 1000576.356339929,
VOLUME24HOURTO: 279431015.074645}}}}}
```
"""
@spec pricemultifull(String.t | [String.t], String.t | [String.t], [tuple]) :: {:ok, map} | {:error, any}
def pricemultifull(fsyms, tsyms, params \\ [])
def pricemultifull(fsyms, tsyms, params) when is_list(fsyms), do: pricemultifull(Enum.join(fsyms, ","), tsyms, params)
def pricemultifull(fsyms, tsyms, params) when is_list(tsyms), do: pricemultifull(fsyms, Enum.join(tsyms, ","), params)
def pricemultifull(fsyms, tsyms, params), do: ApiMini.get_body("pricemultifull", [fsyms: fsyms, tsyms: tsyms] ++ params)
@doc """
Compute the current trading info (price, vol, open, high, low etc) of the requested pair as a volume weighted average based on the markets requested.
**Optional parameters:**
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
## Example:
```elixir
iex(1)> CryptoCompare.generate_avg("BTC", "USD", ["Coinbase", "Bitfinex"])
{:ok,
%{
DISPLAY: %{CHANGE24HOUR: "$ 425", CHANGEPCT24HOUR: "11.53", FROMSYMBOL: "Ι",
HIGH24HOUR: "$ 4,130", LASTMARKET: "Coinbase", LASTTRADEID: 21066901,
LASTUPDATE: "Just now", LASTVOLUME: "Ι 3.16", LASTVOLUMETO: "$ 12,981.3",
LOW24HOUR: "$ 3,678", MARKET: "CUSTOMAGG", OPEN24HOUR: "$ 3,685",
PRICE: "$ 4,110", TOSYMBOL: "$", VOLUME24HOUR: "Ι 14,474.5",
VOLUME24HOURTO: "$ 56,142,934.5"},
RAW: %{CHANGE24HOUR: 425, CHANGEPCT24HOUR: 11.533242876526458, FLAGS: 0,
FROMSYMBOL: "BTC", HIGH24HOUR: 4130, LASTMARKET: "Coinbase",
LASTTRADEID: 21066901, LASTUPDATE: 1505744225, LASTVOLUME: 3.15847893,
LASTVOLUMETO: 12981.348402299998, LOW24HOUR: 3678, MARKET: "CUSTOMAGG",
OPEN24HOUR: 3685, PRICE: 4110, TOSYMBOL: "USD",
VOLUME24HOUR: 14474.464341350002, VOLUME24HOURTO: 56142934.480225}
}
}
```
"""
@spec generate_avg(String.t, String.t, String.t | [String.t], [tuple]) :: {:ok, map} | {:error, any}
def generate_avg(fsym, tsym, markets, params \\ [])
def generate_avg(fsym, tsym, markets, params) when is_list(markets), do: generate_avg(fsym, tsym, Enum.join(markets, ","), params)
def generate_avg(fsym, tsym, markets, params), do: ApiMini.get_body("generateAvg", [fsym: fsym, tsym: tsym, markets: markets] ++ params)
@doc """
Get day average price.
The values are based on hourly vwap data and the average can be calculated in different waysIt uses BTC conversion
if data is not available because the coin is not trading in the specified currency.
If tryConversion is set to false it will give you the direct data. If no toTS is given it will automatically do the current day.
Also for different timezones use the UTCHourDiff paramThe calculation types are:
HourVWAP - a VWAP of the hourly close price,
MidHighLow - the average between the 24 H high and low.
VolFVolT - the total volume from / the total volume to (only avilable with tryConversion set to false so only for direct trades but the value should be the most accurate price)
**Optional parameters:**
- `e` - String. Name of exchange. Default: CCCAGG
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
- `avgType` - String. Default: `HourVWAP`
- `UTCHourDiff` - int. Default: `0`
- `toTs` - timestamp. Hour unit
## Example
```elixir
iex(1)> CryptoCompare.day_avg("BTC", "ETH")
{:ok, %{ConversionType: %{conversionSymbol: "", type: "invert"}, ETH: 13.66}}
```
"""
@spec day_avg(String.t, String.t, [tuple]) :: {:ok, map} | {:error, any}
def day_avg(fsym, tsym, params \\ []), do: ApiMini.get_body("dayAvg", [fsym: fsym, tsym: tsym] ++ params)
@doc """
Get the price of any cryptocurrency in any other currency that you need at a given timestamp.
The price comes from the daily info - so it would be the price at the end of the day GMT based on the requested TS.
If the crypto does not trade directly into the toSymbol requested, BTC will be used for conversion.
Tries to get direct trading pair data, if there is none or it is more than 30 days before the ts requested, it uses BTC conversion.
If the opposite pair trades we invert it (eg.: BTC-XMR)
**Optional parameters:**
- `ts` - Timestamp.
- `markets` - String. Name of exchanges, include multiple Default: `CCAGG`
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
## Example:
```elixir
iex(3)> CryptoCompare.price_historical("ETH", ["BTC"])
{:ok, %{ETH: %{BTC: 0.0725}}}
```
"""
@spec price_historical(String.t, String.t | [String.t], [tuple]) :: {:ok, map} | {:error, any}
def price_historical(fsym, tsyms, params \\ [])
def price_historical(fsym, tsyms, params) when is_list(tsyms), do: price_historical(fsym, Enum.join(tsyms, ","), params)
def price_historical(fsym, tsyms, params), do: ApiMini.get_body("pricehistorical", [fsym: fsym, tsyms: tsyms] ++ params)
@doc """
Get data for a currency pair.
It returns general block explorer information, aggregated data and individual data for each exchange available.
This api is getting abused and will be moved to a min-api path in the near future. Please try not to use it.
## Example:
```elixir
iex(2)> CryptoCompare.coin_snapshot("BTC", "USD")
{:ok,
%{AggregatedData: %{FLAGS: "4", FROMSYMBOL: "BTC", HIGH24HOUR: "4051.38",
LASTMARKET: "Coinbase", LASTTRADEID: "21143529", LASTUPDATE: "1505915744",
LASTVOLUME: "0.00025062", LASTVOLUMETO: "0.9974676", LOW24HOUR: "3839.51",
MARKET: "CCCAGG", OPEN24HOUR: "3961.07", PRICE: "3980.47", TOSYMBOL: "USD",
TYPE: "5", VOLUME24HOUR: "92013.70078287183",
VOLUME24HOURTO: "362226520.4356543"}, Algorithm: "SHA256",
BlockNumber: 486154, BlockReward: 12.5,
Exchanges: [
%{FLAGS: "4", FROMSYMBOL: "BTC", HIGH24HOUR: "4049",
LASTTRADEID: "21143529", LASTUPDATE: "1505915744",
LASTVOLUME: "0.00025062", LASTVOLUMETO: "0.9974676", LOW24HOUR: "3850.01",
MARKET: "Coinbase", OPEN24HOUR: "3960.09", PRICE: "3980", TOSYMBOL: "USD",
TYPE: "2", VOLUME24HOUR: "10388.061612519965",
VOLUME24HOURTO: "40860804.10243919"},
%{FLAGS: "4", FROMSYMBOL: "BTC", HIGH24HOUR: "4190", LASTTRADEID: "3274802",
LASTUPDATE: "1505915715", LASTVOLUME: "0.05115365",
LASTVOLUMETO: "209.72996500000002", LOW24HOUR: "4005", MARKET: "Cexio",
OPEN24HOUR: "4115.2", ...}
], NetHashesPerSecond: 7898572058.405353,
ProofType: "PoW", TotalCoinsMined: 1.65769e7}}
```
"""
@spec coin_snapshot(String.t, String.t) :: {:ok, map} | {:error, any}
def coin_snapshot(fsym, tsym), do: Api.get_body("coinsnapshot", [fsym: fsym, tsym: tsym])
@doc """
Get the general, subs (used to connect to the streamer and to figure
out what exchanges we have data for and what are the exact coin pairs of the coin)
and the aggregated prices for all pairs available.
## Example:
```elixir
iex(4)> CryptoCompare.coin_snapshot_full_by_id(1182)
{:ok,
%{General: %{AffiliateUrl: "https://bitcoin.org/en/", Algorithm: "SHA256",
BaseAngularUrl: "/coins/btc/", BlockNumber: 486154, BlockReward: 12.5,
BlockRewardReduction: "50%", BlockTime: 600, DangerTop: "",
Description: "something", DifficultyAdjustment: "2016 blocks", DocumentType: "Webpagecoinp",
Features: "something", H1Text: "Bitcoin (BTC)", Id: "1182", ImageUrl: "/media/19633/btc.png",
InfoTop: "", LastBlockExplorerUpdateTS: 1505915570, Name: "Bitcoin",
NetHashesPerSecond: 7898572058.405353, PreviousTotalCoinsMined: 0.0,
ProofType: "PoW",
Sponsor: %{ImageUrl: "/media/11417633/utrust_sponsor.png",
Link: "https://utrust.io", TextTop: "Sponsored by"},
StartDate: "03/01/2009", Symbol: "BTC",
Technology: "something", TotalCoinSupply: "21000000", TotalCoinsMined: 1.65769e7,
Twitter: "@bitcoin", Url: "/coins/btc/", WarningTop: "",
Website: "<a href='https://bitcoin.org/en/' target='_blank'>Bitcoin</a>"},
ICO: %{Status: "N/A", WhitePaper: "-"},
SEO: %{BaseImageUrl: "https://www.cryptocompare.com",
BaseUrl: "https://www.cryptocompare.com", OgImageHeight: "300",
OgImageUrl: "/media/19633/btc.png", OgImageWidth: "300",
PageDescription: "Live Bitcoin prices from all markets and BTC coin market Capitalization. Stay up to date with the latest Bitcoin price movements and forum discussion. Check out our snapshot charts and see when there is an opportunity to buy or sell Bitcoin.",
PageTitle: "Bitcoin (BTC) - Live Bitcoin price and market cap"},
StreamerDataRaw: [...],
Subs: ["2~BTCE~BTC~CNH", "2~LocalBitcoins~BTC~GEL", ...]}}
```
"""
@spec coin_snapshot_full_by_id(integer | String.t) :: {:ok, map} | {:error, any}
def coin_snapshot_full_by_id(id), do: Api.get_body("coinsnapshotfullbyid", [id: id])
@doc """
Used to get all the mining equipment available on the website. It returns an array of mining equipment objects
## Example:
```elixir
iex(1)> CryptoCompare.mining_equipment
{:ok,
%{CoinData: %{BTC: %{BlockNumber: 486156, BlockReward: 12.5,
BlockRewardReduction: "50%", BlockTime: 600,
DifficultyAdjustment: "2016 blocks",
NetHashesPerSecond: 7898572058.405353, PreviousTotalCoinsMined: 0.0,
PriceUSD: 4034.81, Symbol: "BTC", TotalCoinsMined: 16576950.0},
DASH: %{BlockNumber: 740410, BlockReward: 3.6029519103467464,
BlockRewardReduction: "50%", BlockTime: 37, DifficultyAdjustment: "DGW",
NetHashesPerSecond: 229045990915876.0,
PreviousTotalCoinsMined: 7570895.96624418, PriceUSD: 342.84,
Symbol: "DASH", TotalCoinsMined: 7570899.56919609},
Message: "Mining contracts data successfully returned",
MiningData: %{"35580": %{AffiliateURL: "https://mineshop.eu/monero-xmr-miners/monero-miner-gpu-mining-detail",
Algorithm: "CryptoNight", Company: "MineShop", Cost: "2142.90",
CurrenciesAvailable: "XMR",
CurrenciesAvailableLogo: "/media/19969/xmr.png",
CurrenciesAvailableName: "Monero", Currency: "USD", EquipmentType: "Rig",
HashesPerSecond: "3200", Id: "35580",
LogoUrl: "/media/352238/eth_rig_125.png",
Name: "Monero Mining Rig 3200 H/s", ParentId: "35553",
PowerConsumption: "800", Recommended: false, Sponsored: false,
Url: "/mining/mineshop/monero-mining-rig-3200hs/"},
"2476": %{...}, ...}, Response: "Success", Type: 100}}
```
"""
@spec mining_equipment() :: {:ok, map} | {:error, any}
def mining_equipment, do: Api.get_body("miningequipment")
@doc """
Returns all the mining contracts in a JSON array.
## Example:
```elixir
iex(3)> CryptoCompare.mining_contracts
{:ok,
%{CoinData: %{BCH: %{BlockNumber: 0, BlockReward: 0.0,
BlockRewardReduction: nil, BlockTime: 600, DifficultyAdjustment: nil,
NetHashesPerSecond: 0.0, PreviousTotalCoinsMined: 0.0, PriceUSD: 507.16,
Symbol: "BCH", TotalCoinsMined: 16598463.0},
BTC: %{BlockNumber: 486156, BlockReward: 12.5, BlockRewardReduction: "50%",
BlockTime: 600, DifficultyAdjustment: "2016 blocks",
NetHashesPerSecond: 7898572058.405353, PreviousTotalCoinsMined: 0.0,
PriceUSD: 4036.23, Symbol: "BTC", TotalCoinsMined: 16576950.0},
DASH: %{BlockNumber: 740410, BlockReward: 3.6029519103467464,
BlockRewardReduction: "50%", BlockTime: 37, DifficultyAdjustment: "DGW",
NetHashesPerSecond: 229045990915876.0,
PreviousTotalCoinsMined: 7570895.96624418, PriceUSD: 342.99,
Symbol: "DASH", TotalCoinsMined: 7570899.56919609},
ETH: %{BlockNumber: 4294988, BlockReward: 5.0, BlockRewardReduction: "",
BlockTime: 19, DifficultyAdjustment: "Per 1 Block",
NetHashesPerSecond: 104406325175528.05, PreviousTotalCoinsMined: 0.0,
PriceUSD: 292.86, Symbol: "ETH", TotalCoinsMined: 94731137.8428},
LTC: %{BlockNumber: 1280725, BlockReward: 25.0,
BlockRewardReduction: "50%", BlockTime: 150,
DifficultyAdjustment: "2016 blocks",
NetHashesPerSecond: 23936316649465.7,
PreviousTotalCoinsMined: 53016132.3718871, PriceUSD: 54.13,
Symbol: "LTC", TotalCoinsMined: 53016232.37188706},
XMR: %{BlockNumber: 1402642, BlockReward: 6.819603784146,
BlockRewardReduction: "-", BlockTime: 120,
DifficultyAdjustment: "2 blocks", NetHashesPerSecond: 241354987.75833,
PreviousTotalCoinsMined: 0.0, PriceUSD: 97.87, Symbol: "XMR",
TotalCoinsMined: 15104905.567139952},
ZEC: %{BlockNumber: 187966, BlockReward: 10.0, BlockRewardReduction: nil,
BlockTime: 150, DifficultyAdjustment: nil,
NetHashesPerSecond: 283238210.0, PreviousTotalCoinsMined: 0.0,
PriceUSD: 193.75, Symbol: "ZEC", TotalCoinsMined: 2224581.25}},
Message: "Mining contracts data successfully returned",
MiningData: %{"25743": %{AffiliateURL: "http://bit.ly/2tudp6y",
Algorithm: "X11", Company: "HashCoins", ContractLength: "360",
Cost: "32", CurrenciesAvailable: "DASH",
CurrenciesAvailableLogo: "/media/20626/dash.png",
CurrenciesAvailableName: "DigitalCash", Currency: "USD",
FeePercentage: "0", FeeValue: "0", FeeValueCurrency: "USD",
HashesPerSecond: "10000000", Id: "25743",
LogoUrl: "/media/350644/hashflare.png",
Name: "Mining Contract Dash Small", ParentId: "2363", Recommended: false,
Sponsored: false, Url: "/mining/hashcoins/mining-contract-x11-small/"},
"25745": %{AffiliateURL: "http://bit.ly/2tudp6y", ...}, "15709": %{...},
...}, Response: "Success", Type: 100}}
```
"""
@spec mining_contracts() :: {:ok, map} | {:error, any}
def mining_contracts, do: Api.get_body("miningcontracts")
@doc """
Get open, high, low, close, volumefrom and volumeto from the each minute historical data.
This data is only stored for 7 days, if you need more,use the hourly or daily path.
It uses BTC conversion if data is not available because the coin is not trading in the specified currency
**Optional parameters:**
- `toTs` - Timestamp.
- `e` - String. Name of exchange. Default: `CCAGG`
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
- `aggregate` - Integer. Default to `1`
- `limit` - Integer. Default to `1440`
## Example:
```elixir
iex(3)> CryptoCompare.histo_minute("BTC", "ETH", [limit: 3])
{:ok,
%{Aggregated: false, ConversionType: %{conversionSymbol: "", type: "invert"},
Data: [%{close: 13.6, high: 13.59, low: 13.6, open: 13.6, time: 1505984700,
volumefrom: 7.79, volumeto: 106.01},
%{close: 13.6, high: 13.59, low: 13.6, open: 13.6, time: 1505984760,
volumefrom: 7.15, volumeto: 97.24},
%{close: 13.61, high: 13.6, low: 13.61, open: 13.6, time: 1505984820,
volumefrom: 18.72, volumeto: 255.08},
%{close: 13.61, high: 13.61, low: 13.61, open: 13.61, time: 1505984880,
volumefrom: 6.07, volumeto: 82.56}], FirstValueInArray: true,
Response: "Success", TimeFrom: 1505984700, TimeTo: 1505984880, Type: 100}}
```
"""
@spec histo_minute(String.t, String.t, [tuple]) :: {:ok, map} | {:error, any}
def histo_minute(fsym, tsym, params \\ []), do: ApiMini.get_body("histominute", [fsym: fsym, tsym: tsym] ++ params)
@doc """
Get open, high, low, close, volumefrom and volumeto from the each hour historical data.
It uses BTC conversion if data is not available because the coin is not trading in the specified currency.
**Optional parameters:**
- `toTs` - Timestamp.
- `e` - String. Name of exchange. Default: `CCAGG`
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
- `aggregate` - Integer. Default to `1`
- `limit` - Integer. Default to `168`
## Example:
```elixir
iex(5)> CryptoCompare.histo_hour("BTC", "ETH", [limit: 3])
{:ok,
%{Aggregated: false, ConversionType: %{conversionSymbol: "", type: "invert"},
Data: [%{close: 13.7, high: 13.67, low: 13.72, open: 13.7, time: 1505973600,
volumefrom: 493.02, volumeto: 6750.16},
%{close: 13.63, high: 13.63, low: 13.76, open: 13.7, time: 1505977200,
volumefrom: 951.78, volumeto: 13014.42},
%{close: 13.6, high: 13.58, low: 13.64, open: 13.63, time: 1505980800,
volumefrom: 1000.15, volumeto: 13602.59},
%{close: 13.6, high: 13.59, low: 13.61, open: 13.6, time: 1505984400,
volumefrom: 171.78, volumeto: 2336}], FirstValueInArray: true,
Response: "Success", TimeFrom: 1505973600, TimeTo: 1505984400, Type: 100}}
```
"""
@spec histo_hour(String.t, String.t, [tuple]) :: {:ok, map} | {:error, any}
def histo_hour(fsym, tsym, params \\ []), do: ApiMini.get_body("histohour", [fsym: fsym, tsym: tsym] ++ params)
@doc """
Get open, high, low, close, volumefrom and volumeto daily historical data.
The values are based on 00:00 GMT time.
It uses BTC conversion if data is not available because the coin is not trading in the specified currency.
**Optional parameters:**
- `toTs` - Timestamp.
- `e` - String. Name of exchange. Default: `CCAGG`
- `extraParams` - String. Name of your application
- `sign` - bool. If set to true, the server will sign the requests.
- `tryConversion` - bool. If set to false, it will try to get values without using any conversion at all. Default: `true`
- `aggregate` - Integer. Default to `1`
- `limit` - Integer. Default to `30`
- `allData` - Boolean. Get all data. Default: `false`
## Example:
```elixir
iex(1)> CryptoCompare.histo_day("BTC", "ETH", [limit: 3])
{:ok,
%{Aggregated: false, ConversionType: %{conversionSymbol: "", type: "invert"},
Data: [%{close: 13.74, high: 13.23, low: 14.31, open: 14.31,
time: 1505692800, volumefrom: 34011.1, volumeto: 465408.96},
%{close: 13.79, high: 13.67, low: 13.94, open: 13.74, time: 1505779200,
volumefrom: 21632, volumeto: 298705.56},
%{close: 13.68, high: 13.67, low: 13.85, open: 13.79, time: 1505865600,
volumefrom: 16536.62, volumeto: 227858.3},
%{close: 13.61, high: 13.59, low: 13.76, open: 13.68, time: 1505952000,
volumefrom: 5880.9, volumeto: 80399.15}], FirstValueInArray: true,
Response: "Success", TimeFrom: 1505692800, TimeTo: 1505952000, Type: 100}}
```
"""
@spec histo_day(String.t, String.t, [tuple]) :: {:ok, map} | {:error, any}
def histo_day(fsym, tsym, params \\ []), do: ApiMini.get_body("histoday", [fsym: fsym, tsym: tsym] ++ params)
@doc """
Get top pairs by volume for a currency (always uses our aggregated data).
The number of pairs you get is the minimum of the limit you set (default 5) and the total number of pairs available
**Optional parameters:**
- `tsym` - String. To symbol
- `limit` - Integer. Default to `5`
- `sign` - Boolean. If set to true, the server will sign the request. Default: `false`
## Example:
```elixir
iex(1)> CryptoCompare.top_pairs("BTC")
{:ok,
%{Data: [%{exchange: "CCCAGG", fromSymbol: "BTC", toSymbol: "JPY",
volume24h: 136451.73538353332, volume24hTo: 59735591768.351654},
%{exchange: "CCCAGG", fromSymbol: "BTC", toSymbol: "USD",
volume24h: 90057.92590708924, volume24hTo: 353713017.2572782},
%{exchange: "CCCAGG", fromSymbol: "BTC", toSymbol: "KRW",
volume24h: 14891.462995631156, volume24hTo: 65391518794.84038},
%{exchange: "CCCAGG", fromSymbol: "BTC", toSymbol: "CNY",
volume24h: 13383.281520579989, volume24hTo: 312930536.6705389},
%{exchange: "CCCAGG", fromSymbol: "BTC", toSymbol: "EUR",
volume24h: 12987.115042599999, volume24hTo: 43056437.811124615}],
Response: "Success"}}
```
"""
@spec top_pairs(String.t, [tuple]) :: {:ok, map} | {:error, any}
def top_pairs(fsym, params \\ []), do: ApiMini.get_body("top/pairs", [fsym: fsym] ++ params)
end
|
lib/crypto_compare.ex
| 0.899938 | 0.827271 |
crypto_compare.ex
|
starcoder
|
defmodule GraphQL.QueryRegistry do
@moduledoc """
Functions to handle query registries.
A query registry stores several `GraphQL.Query` structs, so they
can be combined into a single query before the execution.
"""
alias GraphQL.{Client, Query}
@enforce_keys [:name]
defstruct name: nil, queries: [], variables: [], resolvers: []
@typedoc """
A resolver is a function that must accept two arguments:
- a `GraphQL.Response` struct
- an accumulator, that can be of any type
It also must return the updated value of the accumulator.
"""
@type resolver :: (Response.t(), any() -> any())
@typedoc """
A struct that keeps the information about several queries, variables and
resolvers.
The `name` field will be used as the name of the final query or mutation.
The `queries` field is a list of `GraphQL.Query` structs, that
will be merged before execution.
The `variables` is a map with all _values_ of variables that will be sent
to the server along with the GraphQL body.
The `resolver` is a list of `t:resolver()` functions that can be used to
produce the side effects in an accumulator.
"""
@type t :: %__MODULE__{
name: String.t(),
queries: [Query.t()],
variables: [map()],
resolvers: list()
}
@doc """
Creates a new QueryRegistry struct with the given name.
"""
@spec new(String.t()) :: t()
def new(name) do
%__MODULE__{name: name}
end
@doc """
Add a query to the a query registry
"""
@spec add_query(t(), Query.t(), map()) :: t()
def add_query(%__MODULE__{} = registry, %Query{} = query, variables \\ nil) do
updated_variables =
if variables == %{} || variables == nil do
registry.variables
else
[variables | registry.variables]
end
%__MODULE__{registry | queries: [query | registry.queries], variables: updated_variables}
end
@doc """
Add a new resolver into a query registry
"""
@spec add_resolver(t(), resolver()) :: t()
def add_resolver(%__MODULE__{} = registry, function) when is_function(function, 2) do
add_resolvers(registry, [function])
end
@doc """
Add a list of resolvers into a query registry
"""
@spec add_resolvers(t(), [resolver()]) :: t()
def add_resolvers(%__MODULE__{} = registry, resolvers) do
%__MODULE__{registry | resolvers: registry.resolvers ++ resolvers}
end
@doc """
Executes the given query registry, using the given accumulator `acc` and the given options
"""
@spec execute(t(), any(), Keyword.t()) :: any()
def execute(registry, acc, options \\ []) do
case prepare_query(registry) do
{:ok, {query, variables, resolvers}} ->
result =
query
|> Client.execute(variables, options)
|> resolve(resolvers, acc)
{:ok, result}
error ->
error
end
end
defp prepare_query(%__MODULE__{} = registry) do
case registry.queries do
[] ->
{:error, "no queries available"}
_not_empty ->
case Query.merge_many(registry.queries, registry.name) do
{:ok, query} ->
variables = merge_variables(registry.variables)
{:ok, {query, variables, registry.resolvers}}
error ->
error
end
end
end
defp merge_variables([]), do: %{}
defp merge_variables(variables) do
Enum.reduce(variables, &Map.merge/2)
end
defp resolve(response, resolvers, initial_acc) do
Enum.reduce(resolvers, initial_acc, fn resolver, acc ->
resolver.(response, acc)
end)
end
end
|
lib/graphql/query_registry.ex
| 0.917958 | 0.684093 |
query_registry.ex
|
starcoder
|
defmodule StathamLogger.DatadogFormatter do
@moduledoc """
Datadog-specific formatting
Adheres to the
[default standard attribute list](https://docs.datadoghq.com/logs/processing/attributes_naming_convention/#default-standard-attribute-list).
Some code is borrowed from Nebo15 [logger_json](https://github.com/Nebo15/logger_json/blob/master/lib/logger_json/formatters/datadog_logger.ex):
"""
import Jason.Helpers, only: [json_map: 1]
@skipped_metadata_keys [:domain, :erl_level, :gl, :time]
def format_event(level, message, timestamp, raw_metadata, sanitized_metadata) do
Map.merge(
%{
logger:
json_map(
thread_name: inspect(Map.get(raw_metadata, :pid)),
method_name: method_name(raw_metadata)
),
message: IO.chardata_to_string(message),
syslog:
json_map(
hostname: node_hostname(),
severity: Atom.to_string(level),
timestamp: format_timestamp(timestamp)
)
},
format_metadata(sanitized_metadata, raw_metadata)
)
end
defp format_metadata(metadata, raw_metadata) do
metadata
|> skip_metadata_keys()
|> maybe_put(:error, format_process_crash(raw_metadata))
end
defp skip_metadata_keys(metadata) do
metadata
|> Map.drop(@skipped_metadata_keys)
end
defp method_name(metadata) do
function = Map.get(metadata, :function)
module = Map.get(metadata, :module)
format_function(module, function)
end
defp node_hostname do
{:ok, hostname} = :inet.gethostname()
to_string(hostname)
end
defp format_initial_call(nil), do: nil
defp format_initial_call({module, function, arity}), do: format_function(module, function, arity)
defp format_function(nil, function), do: function
defp format_function(module, function), do: "#{module}.#{function}"
defp format_function(module, function, arity), do: "#{module}.#{function}/#{arity}"
def maybe_put(map, _key, nil), do: map
def maybe_put(map, key, value), do: Map.put(map, key, value)
def format_process_crash(metadata) do
if crash_reason = Map.get(metadata, :crash_reason) do
initial_call = Map.get(metadata, :initial_call)
json_map(
initial_call: format_initial_call(initial_call),
reason: format_crash_reason(crash_reason)
)
end
end
defp format_timestamp({date, time}) do
[format_date(date), ?T, format_time(time), ?Z]
|> IO.iodata_to_binary()
end
defp format_time({hh, mi, ss, ms}) do
[pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)]
end
defp format_date({yy, mm, dd}) do
[Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)]
end
defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)]
defp pad2(int), do: Integer.to_string(int)
defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)]
defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)]
defp pad3(int), do: Integer.to_string(int)
defp format_crash_reason({:throw, reason}) do
Exception.format(:throw, reason)
end
defp format_crash_reason({:exit, reason}) do
Exception.format(:exit, reason)
end
defp format_crash_reason({%{} = exception, stacktrace}) do
Exception.format(:error, exception, stacktrace)
end
defp format_crash_reason(other) do
inspect(other)
end
end
|
lib/datadog_formatter.ex
| 0.749271 | 0.426859 |
datadog_formatter.ex
|
starcoder
|
defmodule Xandra.Cluster do
@moduledoc """
A `DBConnection.Pool` pool that implements clustering support.
This module implements a `DBConnection.Pool` pool that implements support for
connecting to multiple nodes and executing queries on such nodes based on a
given "strategy".
## Usage
To use this pool, the `:pool` option in `Xandra.start_link/1` needs to be set
to `Xandra.Cluster`. `Xandra.Cluster` is a "proxy" pool in the sense that it
only proxies requests to other underlying pools of Xandra connections; the
underlying pool can be specified with the `:underlying_pool` option. When you
start a `Xandra.Cluster` connection, it will start one pool
(`:underlying_pool`) of connections to each of the nodes specified in
`:nodes`. The default `:underlying_pool` is `DBConnection.Connection`, which
means by default only a single connection to each specified node will be
established.
Note that regardless of the underlying pool, `Xandra.Cluster` will establish
one extra connection to each node in the specified list of nodes (used for
internal purposes).
Here is an example of how one could use `Xandra.Cluster` to connect to
multiple nodes, while using `:poolboy` for pooling the connections to each
node:
Xandra.start_link([
nodes: ["cassandra1.example.net", "cassandra2.example.net"],
pool: Xandra.Cluster,
underlying_pool: DBConnection.Poolboy,
pool_size: 10,
])
The code above will establish a pool of ten connections to each of the nodes
specified in `:nodes`, for a total of twenty connections going out of the
current machine, plus two extra connections (one per node) used for internal
purposes.
Once a `Xandra.Cluster` pool is started, queries executed through such pool
will be "routed" to nodes in the provided list of nodes; see the "Load
balancing strategies" section below.
## Load balancing strategies
For now, there are two load balancing "strategies" implemented:
* `:random` - it will choose one of the connected nodes at random and
execute the query on that node.
* `:priority` - it will choose a node to execute the query according
to the order nodes appear in `:nodes`.
## Disconnections and reconnections
`Xandra.Cluster` also supports nodes disconnecting and reconnecting: if Xandra
detects one of the nodes in `:nodes` going down, it will not execute queries
against it anymore, but will start executing queries on it as soon as it
detects such node is back up.
If all specified nodes happen to be down when a query is executed, a
`Xandra.ConnectionError` with reason `{:cluster, :not_connected}` will be
returned.
## Options
These are the options that `Xandra.start_link/1` accepts when
`pool: Xandra.Cluster` is passed to it:
* `:underlying_pool` - (module) the `DBConnection.Pool` pool used to pool
connections to each of the specified nodes.
* `:load_balancing` - (atom) load balancing "strategy". Defaults to `:random`.
To pass options to the underlying pool, you can just pass them alongside other
options to `Xandra.start_link/1`.
"""
use GenServer
@behaviour DBConnection.Pool
@default_pool_module DBConnection.Connection
@default_load_balancing :random
alias __MODULE__.{ControlConnection, StatusChange}
alias Xandra.ConnectionError
require Logger
defstruct [
:options,
:node_refs,
:load_balancing,
:pool_supervisor,
:pool_module,
pools: %{}
]
def ensure_all_started(options, type) do
{pool_module, options} = Keyword.pop(options, :underlying_pool, @default_pool_module)
pool_module.ensure_all_started(options, type)
end
def child_spec(module, options, child_options) do
Supervisor.Spec.worker(__MODULE__, [module, options], child_options)
end
def start_link(Xandra.Connection, options) do
{pool_module, options} = Keyword.pop(options, :underlying_pool, @default_pool_module)
{load_balancing, options} = Keyword.pop(options, :load_balancing, @default_load_balancing)
{nodes, options} = Keyword.pop(options, :nodes)
{name, options} = Keyword.pop(options, :name)
state = %__MODULE__{
options: Keyword.delete(options, :pool),
load_balancing: load_balancing,
pool_module: pool_module
}
GenServer.start_link(__MODULE__, {state, nodes}, name: name)
end
def init({%__MODULE__{options: options} = state, nodes}) do
{:ok, pool_supervisor} = Supervisor.start_link([], strategy: :one_for_one, max_restarts: 0)
node_refs = start_control_connections(nodes, options)
{:ok, %{state | node_refs: node_refs, pool_supervisor: pool_supervisor}}
end
def checkout(cluster, options) do
case GenServer.call(cluster, :checkout) do
{:ok, pool_module, pool} ->
with {:ok, pool_ref, module, state} <- pool_module.checkout(pool, options) do
{:ok, {pool_module, pool_ref}, module, state}
end
{:error, :empty} ->
action = "checkout from cluster #{inspect(name())}"
{:error, ConnectionError.new(action, {:cluster, :not_connected})}
end
end
def checkin({pool_module, pool_ref}, state, options) do
pool_module.checkin(pool_ref, state, options)
end
def disconnect({pool_module, pool_ref}, error, state, options) do
pool_module.disconnect(pool_ref, error, state, options)
end
def stop({pool_module, pool_ref}, error, state, options) do
pool_module.stop(pool_ref, error, state, options)
end
def activate(cluster, node_ref, address, port) do
GenServer.cast(cluster, {:activate, node_ref, address, port})
end
def update(cluster, status_change) do
GenServer.cast(cluster, {:update, status_change})
end
def handle_call(:checkout, _from, %__MODULE__{} = state) do
%{
node_refs: node_refs,
load_balancing: load_balancing,
pool_module: pool_module,
pools: pools
} = state
if Enum.empty?(pools) do
{:reply, {:error, :empty}, state}
else
pool = select_pool(load_balancing, pools, node_refs)
{:reply, {:ok, pool_module, pool}, state}
end
end
def handle_cast({:activate, node_ref, address, port}, %__MODULE__{} = state) do
{:noreply, start_pool(state, node_ref, address, port)}
end
def handle_cast({:update, %StatusChange{} = status_change}, %__MODULE__{} = state) do
{:noreply, toggle_pool(state, status_change)}
end
defp start_control_connections(nodes, options) do
cluster = self()
Enum.map(nodes, fn {address, port} ->
node_ref = make_ref()
ControlConnection.start_link(cluster, node_ref, address, port, options)
{node_ref, nil}
end)
end
defp start_pool(state, node_ref, address, port) do
%{
options: options,
node_refs: node_refs,
pool_module: pool_module,
pool_supervisor: pool_supervisor,
pools: pools
} = state
options = [address: address, port: port] ++ options
child_spec = pool_module.child_spec(Xandra.Connection, options, id: address)
case Supervisor.start_child(pool_supervisor, child_spec) do
{:ok, pool} ->
node_refs = List.keystore(node_refs, node_ref, 0, {node_ref, address})
%{state | node_refs: node_refs, pools: Map.put(pools, address, pool)}
{:error, {:already_started, _pool}} ->
Logger.warn(fn ->
"Xandra cluster #{inspect(name())} " <>
"received request to start another connection pool " <>
"to the same address: #{inspect(address)}"
end)
state
end
end
defp name() do
case Process.info(self(), :registered_name) |> elem(1) do
[] -> self()
name -> name
end
end
defp toggle_pool(state, %{effect: "UP", address: address}) do
%{pool_supervisor: pool_supervisor, pools: pools} = state
case Supervisor.restart_child(pool_supervisor, address) do
{:error, reason} when reason in [:not_found, :running, :restarting] ->
state
{:ok, pool} ->
%{state | pools: Map.put(pools, address, pool)}
end
end
defp toggle_pool(state, %{effect: "DOWN", address: address}) do
%{pool_supervisor: pool_supervisor, pools: pools} = state
Supervisor.terminate_child(pool_supervisor, address)
%{state | pools: Map.delete(pools, address)}
end
defp select_pool(:random, pools, _node_refs) do
{_address, pool} = Enum.random(pools)
pool
end
defp select_pool(:priority, pools, node_refs) do
Enum.find_value(node_refs, fn {_node_ref, address} ->
Map.get(pools, address)
end)
end
end
|
lib/xandra/cluster.ex
| 0.91462 | 0.645323 |
cluster.ex
|
starcoder
|
defmodule Commanded.EventStore.Adapters.Extreme do
@moduledoc """
Adapter to use <NAME>'s [Event Store](https://eventstore.org/), via the
Extreme TCP client, with Commanded.
"""
@behaviour Commanded.EventStore
require Logger
alias Commanded.EventStore.{
EventData,
RecordedEvent,
SnapshotData
}
alias Commanded.EventStore.Adapters.Extreme.{
Config,
PubSub,
Subscription,
SubscriptionsSupervisor
}
alias Commanded.EventStore.TypeProvider
alias Extreme.Msg, as: ExMsg
@event_store Commanded.EventStore.Adapters.Extreme.EventStore
@serializer Config.serializer()
@spec append_to_stream(String.t(), non_neg_integer, list(EventData.t())) ::
{:ok, stream_version :: non_neg_integer}
| {:error, :wrong_expected_version}
| {:error, term}
def append_to_stream(stream_uuid, expected_version, events) do
stream = stream_name(stream_uuid)
Logger.debug(fn ->
"Extreme event store attempting to append to stream \"#{stream}\" #{inspect(length(events))} event(s)"
end)
add_to_stream(stream, expected_version, events)
end
@spec stream_forward(String.t(), non_neg_integer, non_neg_integer) ::
Enumerable.t() | {:error, :stream_not_found} | {:error, term}
def stream_forward(stream_uuid, start_version \\ 0, read_batch_size \\ 1_000)
def stream_forward(stream_uuid, start_version, read_batch_size) do
stream = stream_name(stream_uuid)
start_version = normalize_start_version(start_version)
case execute_read(stream, start_version, read_batch_size, :forward) do
{:error, reason} ->
{:error, reason}
{:ok, events, true} ->
events
{:ok, events, false} ->
Stream.concat(
events,
execute_stream_forward(stream, start_version + length(events), read_batch_size)
)
end
end
@spec subscribe(String.t()) :: :ok | {:error, term}
def subscribe(stream_uuid) do
with {:ok, _} <- Registry.register(PubSub, stream_uuid, []) do
:ok
else
reply -> reply
end
end
@spec subscribe_to_all_streams(String.t(), pid, Commanded.EventStore.start_from()) ::
{:ok, subscription :: pid}
| {:error, :subscription_already_exists}
| {:error, term}
def subscribe_to_all_streams(subscription_name, subscriber, start_from \\ :origin)
def subscribe_to_all_streams(subscription_name, subscriber, start_from) do
stream = "$streams"
case SubscriptionsSupervisor.start_subscription(
stream,
subscription_name,
subscriber,
start_from
) do
{:ok, subscription} -> {:ok, subscription}
{:error, {:already_started, _}} -> {:error, :subscription_already_exists}
end
end
@spec ack_event(pid, RecordedEvent.t()) :: :ok
def ack_event(subscription, %RecordedEvent{event_number: event_number}) do
Subscription.ack(subscription, event_number)
end
@spec unsubscribe_from_all_streams(String.t()) :: :ok
def unsubscribe_from_all_streams(subscription_name) do
SubscriptionsSupervisor.stop_subscription(subscription_name)
end
@spec read_snapshot(String.t()) :: {:ok, SnapshotData.t()} | {:error, :snapshot_not_found}
def read_snapshot(source_uuid) do
stream = snapshot_stream(source_uuid)
Logger.debug(fn -> "Extreme event store read snapshot from stream: #{inspect(stream)}" end)
case read_backward(stream, -1, 1) do
{:ok, [recorded_event]} ->
{:ok, to_snapshot_data(recorded_event)}
{:error, :stream_not_found} ->
{:error, :snapshot_not_found}
err ->
Logger.error(fn -> "Extreme event store error reading snapshot: #{inspect(err)}" end)
err
end
end
@spec record_snapshot(SnapshotData.t()) :: :ok | {:error, term}
def record_snapshot(%SnapshotData{} = snapshot) do
event_data = to_event_data(snapshot)
stream = snapshot_stream(snapshot.source_uuid)
Logger.debug(fn -> "Extreme event store record snapshot to stream: #{inspect(stream)}" end)
case add_to_stream(stream, :any_version, [event_data]) do
{:ok, _} -> :ok
err -> err
end
end
@spec delete_snapshot(String.t()) :: :ok | {:error, term}
def delete_snapshot(source_uuid) do
stream = snapshot_stream(source_uuid)
case Extreme.execute(@event_store, delete_stream_msg(stream, false)) do
{:ok, _} -> :ok
err -> err
end
end
def delete_stream(source_uuid) do
stream = stream_name(source_uuid)
case Extreme.execute(@event_store, delete_stream_msg(stream, false)) do
{:ok, _} -> :ok
err -> err
end
end
defp execute_stream_forward(stream, start_version, read_batch_size) do
Stream.resource(
fn -> {start_version, false} end,
fn {next_version, halt?} = acc ->
case halt? do
true ->
{:halt, acc}
false ->
case execute_read(stream, next_version, read_batch_size, :forward) do
{:ok, events, end_of_stream?} ->
acc = {next_version + length(events), end_of_stream?}
{events, acc}
end
end
end,
fn _ -> :ok end
)
end
defp snapshot_stream(source_uuid), do: "snapshot-" <> source_uuid
defp stream_name(stream), do: stream
defp normalize_start_version(0), do: 0
defp normalize_start_version(start_version), do: start_version - 1
defp to_snapshot_data(%RecordedEvent{data: snapshot} = event) do
data =
snapshot.source_type
|> String.to_existing_atom()
|> struct(with_atom_keys(snapshot.data))
|> Commanded.Serialization.JsonDecoder.decode()
%SnapshotData{snapshot | data: data, created_at: event.created_at}
end
defp with_atom_keys(map) do
Enum.reduce(Map.keys(map), %{}, fn key, m ->
Map.put(m, String.to_existing_atom(key), Map.get(map, key))
end)
end
defp to_event_data(%SnapshotData{} = snapshot) do
%EventData{
event_type: TypeProvider.to_string(snapshot),
data: snapshot
}
end
defp add_to_stream(stream, expected_version, events) do
case Extreme.execute(@event_store, write_events(stream, expected_version, events)) do
{:ok, response} ->
{:ok, response.last_event_number + 1}
{:error, :WrongExpectedVersion, detail} ->
Logger.warn(fn ->
"Extreme eventstore wrong expected version \"#{expected_version}\" due to: #{
inspect(detail)
}"
end)
{:error, :wrong_expected_version}
err ->
err
end
end
defp delete_stream_msg(stream, hard_delete) do
ExMsg.DeleteStream.new(
event_stream_id: stream,
expected_version: -2,
require_master: false,
hard_delete: hard_delete
)
end
defp read_backward(stream, start_version, count) do
execute_read!(stream, start_version, count, :backward)
end
defp execute_read!(stream, start_version, count, direction) do
case execute_read(stream, start_version, count, direction) do
{:ok, events, _} -> {:ok, events}
err -> err
end
end
defp execute_read(stream, start_version, count, direction, read_events \\ []) do
remaining_count = count - length(read_events)
read_request = read_events(stream, start_version, remaining_count, direction)
case Extreme.execute(@event_store, read_request) do
{:ok, %ExMsg.ReadStreamEventsCompleted{} = result} ->
%ExMsg.ReadStreamEventsCompleted{
is_end_of_stream: end_of_stream?,
events: events
} = result
read_events = read_events ++ events
if end_of_stream? || length(read_events) == count do
recorded_events = Enum.map(read_events, &to_recorded_event/1)
{:ok, recorded_events, end_of_stream?}
else
# can occur with soft deleted streams
start_version =
case direction do
:forward -> result.next_event_number
:backward -> result.last_event_number
end
execute_read(stream, start_version, remaining_count, direction, read_events)
end
{:error, :NoStream, _} ->
{:error, :stream_not_found}
err ->
err
end
end
def to_recorded_event(%ExMsg.ResolvedIndexedEvent{event: event, link: nil}),
do: to_recorded_event(event, event.event_number + 1)
def to_recorded_event(%ExMsg.ResolvedIndexedEvent{event: event, link: link}),
do: to_recorded_event(event, link.event_number + 1)
def to_recorded_event(%ExMsg.ResolvedEvent{event: event}),
do: to_recorded_event(event, event.event_number + 1)
def to_recorded_event(%ExMsg.EventRecord{} = event),
do: to_recorded_event(event, event.event_number + 1)
def to_recorded_event(%ExMsg.EventRecord{} = event, event_number) do
%ExMsg.EventRecord{
event_id: event_id,
event_type: event_type,
created_epoch: created_epoch,
data: data,
metadata: metadata
} = event
data = deserialize(data, type: event_type)
metadata =
case metadata do
none when none in [nil, ""] -> %{}
metadata -> deserialize(metadata)
end
{causation_id, metadata} = Map.pop(metadata, "$causationId")
{correlation_id, metadata} = Map.pop(metadata, "$correlationId")
%RecordedEvent{
event_id: UUID.binary_to_string!(event_id),
event_number: event_number,
stream_id: to_stream_id(event),
stream_version: event_number,
causation_id: causation_id,
correlation_id: correlation_id,
event_type: event_type,
data: data,
metadata: metadata,
created_at: to_naive_date_time(created_epoch)
}
end
defp to_stream_id(%ExMsg.EventRecord{event_stream_id: event_stream_id}) do
event_stream_id
|> String.split("-")
|> Enum.drop(1)
|> Enum.join("-")
end
defp to_naive_date_time(millis_since_epoch) do
secs_since_epoch = round(Float.floor(millis_since_epoch / 1000))
millis = :erlang.rem(millis_since_epoch, 1000)
epoch_secs = :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
erl_date = :calendar.gregorian_seconds_to_datetime(epoch_secs + secs_since_epoch)
NaiveDateTime.from_erl!(erl_date, {millis * 1000, 3})
end
defp read_events(stream, from_event_number, max_count, direction) do
msg_type =
if :forward == direction, do: ExMsg.ReadStreamEvents, else: ExMsg.ReadStreamEventsBackward
msg_type.new(
event_stream_id: stream,
from_event_number: from_event_number,
max_count: max_count,
resolve_link_tos: true,
require_master: false
)
end
defp serialize(data), do: @serializer.serialize(data)
defp deserialize(data, opts \\ []), do: @serializer.deserialize(data, opts)
defp add_causation_id(metadata, causation_id),
do: add_to_metadata(metadata, "$causationId", causation_id)
defp add_correlation_id(metadata, correlation_id),
do: add_to_metadata(metadata, "$correlationId", correlation_id)
defp add_to_metadata(metadata, key, value) when is_nil(metadata),
do: add_to_metadata(%{}, key, value)
defp add_to_metadata(metadata, _key, value) when is_nil(value), do: metadata
defp add_to_metadata(metadata, key, value), do: Map.put(metadata, key, value)
defp write_events(stream_id, expected_version, events) do
expected_version =
case expected_version do
:any_version -> -2
_ -> expected_version - 1
end
proto_events =
Enum.map(events, fn event ->
metadata =
event.metadata
|> add_causation_id(event.causation_id)
|> add_correlation_id(event.correlation_id)
ExMsg.NewEvent.new(
event_id: UUID.uuid4() |> UUID.string_to_binary!(),
event_type: event.event_type,
data_content_type: 0,
metadata_content_type: 0,
data: serialize(event.data),
metadata: serialize(metadata)
)
end)
ExMsg.WriteEvents.new(
event_stream_id: stream_id,
expected_version: expected_version,
events: proto_events,
require_master: false
)
end
end
|
lib/extreme.ex
| 0.787032 | 0.520496 |
extreme.ex
|
starcoder
|
require Utils
require Program
defmodule D13 do
@moduledoc """
--- Day 13: Care Package ---
As you ponder the solitude of space and the ever-increasing three-hour roundtrip for messages between you and Earth, you notice that the Space Mail Indicator Light is blinking. To help keep you sane, the Elves have sent you a care package.
It's a new game for the ship's arcade cabinet! Unfortunately, the arcade is all the way on the other end of the ship. Surely, it won't be hard to build your own - the care package even comes with schematics.
The arcade cabinet runs Intcode software like the game the Elves sent (your puzzle input). It has a primitive screen capable of drawing square tiles on a grid. The software draws tiles to the screen with output instructions: every three output instructions specify the x position (distance from the left), y position (distance from the top), and tile id. The tile id is interpreted as follows:
0 is an empty tile. No game object appears in this tile.
1 is a wall tile. Walls are indestructible barriers.
2 is a block tile. Blocks can be broken by the ball.
3 is a horizontal paddle tile. The paddle is indestructible.
4 is a ball tile. The ball moves diagonally and bounces off objects.
Start the game. How many block tiles are on the screen when the game exits?
--- Part Two ---
The game didn't run because you didn't put in any quarters. Unfortunately, you did not bring any quarters. Memory address 0 represents the number of quarters that have been inserted; set it to 2 to play for free.
The arcade cabinet has a joystick that can move left and right. The software reads the position of the joystick with input instructions:
If the joystick is in the neutral position, provide 0.
If the joystick is tilted to the left, provide -1.
If the joystick is tilted to the right, provide 1.
The arcade cabinet also has a segment display capable of showing a single number that represents the player's current score. When three output instructions specify X=-1, Y=0, the third output instruction is not a tile; the value instead specifies the new score to show in the segment display. For example, a sequence of output values like -1,0,12345 would show 12345 as the player's current score.
Beat the game by breaking all the blocks. What is your score after the last block is broken?
"""
@behaviour Day
def print_game_board(output) do
output
|> Enum.chunk_every(3)
|> Enum.reduce(%{}, fn [type, y, x], map -> Map.put(map, {x, y}, type) end)
|> Enum.group_by(fn {{_x, y}, _v} -> y end, fn {{x, _y}, v} -> {x, v} end)
|> Enum.sort()
|> Enum.map(fn {_y, list} ->
list
|> Enum.sort()
|> Enum.map(fn {_x, v} ->
case v do
0 -> "β¬"
1 -> "π«"
2 -> "β¬"
3 -> "π©"
4 -> "π΅"
end
end)
|> Enum.join()
end)
|> Enum.join("\n")
|> IO.puts()
end
def solve(input) do
input = input |> Utils.to_ints()
program = Program.new(input)
%Program{output: board} = Program.run(program)
part_1 =
board
|> Enum.chunk_every(3)
|> Enum.map(fn [x, _, _] -> x end)
|> Enum.count(fn x -> x == 2 end)
# set paddle to full width
part_2_entry =
input
|> Enum.chunk_every(40, 1)
|> Enum.find_index(fn
[1 | rest] -> Enum.sort(rest) == List.duplicate(0, 37) ++ [1, 3] and List.last(rest) == 1
_ -> false
end)
part_2_hack = [1] ++ List.duplicate(3, 38) ++ [1]
part_2_hacked = Program.hack(program, part_2_entry, part_2_hack)
# add quarters
part_2_hacked = Program.hack(part_2_hacked, 0, [2])
# set input
part_2_hacked = %{part_2_hacked | input: List.duplicate(0, 100_000)}
%Program{output: [part_2 | _]} = Program.run(part_2_hacked)
{
part_1,
part_2
}
end
end
|
lib/days/13.ex
| 0.705582 | 0.723432 |
13.ex
|
starcoder
|
defmodule Relay.ProtobufUtil do
@moduledoc """
Utility functions for working with Protobuf structs, primarily when using the
Google.Protobuf types.
"""
alias Google.Protobuf.{Any, BoolValue, Struct, ListValue, Value}
defp oneof_actual_vals(message_props, struct) do
# Copy/pasta-ed from:
# https://github.com/tony612/protobuf-elixir/blob/a4389fe18edc70430563d8591aa05bd3dba60adc/lib/protobuf/encoder.ex#L153-L160
Enum.reduce(message_props.oneof, %{}, fn {oneof_field, _}, acc ->
case Map.get(struct, oneof_field) do
{field, value} -> Map.put(acc, field, value)
nil -> acc
end
end)
end
@doc """
Pack a Protobuf struct into a Google.Protobuf.Struct type.
This packing assumes that the Struct will be unpacked into a Protobuf type on
the "other side of the wire", rather than a language-specific type. Because of
this, Protobuf fields with default or null values will not be included in the
produced Struct.
The Protobuf struct will be validated before packing.
"""
@spec mkstruct(struct) :: Struct.t()
def mkstruct(%mod{} = struct) do
Protobuf.Validator.validate!(struct)
message_props = mod.__message_props__()
oneofs = oneof_actual_vals(message_props, struct)
fields =
Enum.reduce(message_props.field_props, %{}, fn {_, field_prop}, acc ->
source = if field_prop.oneof, do: oneofs, else: struct
value = Map.get(source, field_prop.name_atom)
default = Protobuf.Builder.field_default(message_props.syntax, field_prop)
case value do
nil -> acc
^default -> acc
_ -> Map.put(acc, field_prop.name, struct_value(value))
end
end)
Struct.new(fields: fields)
end
defp struct_value(number) when is_number(number), do: value(:number_value, number)
defp struct_value(string) when is_binary(string), do: value(:string_value, string)
defp struct_value(bool) when is_boolean(bool), do: value(:bool_value, bool)
defp struct_value(%Struct{} = struct), do: value(:struct_value, struct)
defp struct_value(%_{} = struct), do: value(:struct_value, mkstruct(struct))
defp struct_value(list) when is_list(list) do
values = list |> Enum.map(fn element -> struct_value(element) end)
value(:list_value, ListValue.new(values: values))
end
defp value(kind, val), do: Value.new(kind: {kind, val})
@doc """
Encode a Protobuf struct into a Google.Protobuf.Any type.
"""
@spec mkany(String.t(), struct) :: Any.t()
def mkany(type_url, %mod{} = value), do: Any.new(type_url: type_url, value: mod.encode(value))
@doc """
Utility function for working with Google.Protobuf.<type>Values wrapper types.
Types such as `BoolValue` exist to make values "nullable" or unset. In a
protobuf, primitives have default values and so can't be "unset". By wrapping
primitive types in these wrapper types (which is what this function does),
these values can be unset.
"""
@spec mkvalue(nil) :: nil
def mkvalue(nil), do: nil
@spec mkvalue(boolean) :: BoolValue.t()
def mkvalue(value) when is_boolean(value), do: BoolValue.new(value: value)
end
|
lib/relay/protobuf_util.ex
| 0.791982 | 0.494995 |
protobuf_util.ex
|
starcoder
|
defmodule Plug.Router do
@moduledoc ~S"""
A DSL to define a routing algorithm that works with Plug.
It provides a set of macros to generate routes. For example:
defmodule AppRouter do
use Plug.Router
plug :match
plug :dispatch
get "/hello" do
send_resp(conn, 200, "world")
end
match _ do
send_resp(conn, 404, "oops")
end
end
Each route needs to return a connection, as per the Plug spec.
A catch all `match` is recommended to be defined, as in the example
above, otherwise routing fails with a function clause error.
The router is a plug, which means it can be invoked as:
AppRouter.call(conn, AppRouter.init([]))
Notice the router contains a plug pipeline and by default it requires
two plugs: `match` and `dispatch`. `match` is responsible for
finding a matching route which is then forwarded to `dispatch`.
This means users can easily hook into the router mechanism and add
behaviour before match, before dispatch or after both.
## Routes
get "/hello" do
send_resp(conn, 200, "world")
end
In the example above, a request will only match if it is
a `GET` request and the route "/hello". The supported
HTTP methods are `get`, `post`, `put`, `patch`, `delete`
and `options`.
A route can also specify parameters which will then be
available in the function body:
get "/hello/:name" do
send_resp(conn, 200, "hello #{name}")
end
Routes allow for globbing which will match the remaining parts
of a route and can be available as a parameter in the function
body, also note that a glob can't be followed by other segments:
get "/hello/*_rest" do
send_resp(conn, 200, "matches all routes starting with /hello")
end
get "/hello/*glob" do
send_resp(conn, 200, "route after /hello: #{inspect glob}")
end
Finally, a general `match` function is also supported:
match "/hello" do
send_resp(conn, 200, "world")
end
A `match` will match any route regardless of the HTTP method.
Check `match/3` for more information on how route compilation
works and a list of supported options.
## Error handling
In case something wents wrong in a request, the router allows
the developer to customize what is rendered via the `handle_errors/2`
callback:
defmodule AppRouter do
use Plug.Router
plug :match
plug :dispatch
get "/hello" do
send_resp(conn, 200, "world")
end
defp handle_errors(conn, %{kind: _kind, reason: _reason, stack: _stack}) do
send_resp(conn, conn.status, "Something went wrong")
end
end
The callback receives a connection and a map containing the exception
kind (throw, error or exit), the reason (an exception for errors or
a term for others) and the stacktrace. After the callback is invoked,
the error is re-raised.
It is advised to do as little work as possible when handling errors
and avoid accessing data like parameters and session, as the parsing
of those is what could have led the error to trigger in the first place.
Also notice that those pages are going to be shown in production. If
you are looking for error handling to help during development, consider
using `Plug.Debugger`.
## Routes compilation
All routes are compiled to a match function that receives
three arguments: the method, the request path split on "/"
and the connection. Consider this example:
match "/foo/bar", via: :get do
send_resp(conn, 200, "hello world")
end
It is compiled to:
defp match("GET", ["foo", "bar"], conn) do
send_resp(conn, 200, "hello world")
end
This opens up a few possibilities. First, guards can be given
to match:
match "/foo/:bar" when size(bar) <= 3, via: :get do
send_resp(conn, 200, "hello world")
end
Second, a list of splitten paths (which is the compiled result)
is also allowed:
match ["foo", bar], via: :get do
send_resp(conn, 200, "hello world")
end
After a match is found, the block given as `do/end` is stored
as a function in the connection. This function is then retrieved
and invoked in the `dispatch` plug.
"""
@doc false
defmacro __using__(_) do
quote location: :keep do
import Plug.Router
@before_compile Plug.Router
use Plug.Builder
def call(conn, opts) do
try do
plug_builder_call(conn, opts)
catch
kind, reason ->
Plug.Router.__catch__(conn, kind, reason, System.stacktrace, &handle_errors/2)
end
end
defp match(conn, _opts) do
Plug.Conn.put_private(conn,
:plug_route,
do_match(conn.method, conn.path_info, conn.host))
end
defp dispatch(%Plug.Conn{assigns: assigns} = conn, _opts) do
Map.get(conn.private, :plug_route).(conn)
end
defp handle_errors(conn, assigns) do
send_resp(conn, conn.status, "Something went wrong")
end
defoverridable [match: 2, dispatch: 2, call: 2, handle_errors: 2]
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
import Plug.Router, only: []
end
end
@already_sent {:plug_conn, :sent}
@doc false
def __catch__(conn, kind, reason, stack, handle_errors) do
receive do
@already_sent ->
send self(), @already_sent
after
0 ->
reason = Exception.normalize(kind, reason, stack)
conn
|> Plug.Conn.put_status(status(kind, reason))
|> handle_errors.(%{kind: kind, reason: reason, stack: stack})
end
:erlang.raise(kind, reason, stack)
end
defp status(:error, error), do: Plug.Exception.status(error)
defp status(:throw, _throw), do: 500
defp status(:exit, _exit), do: 500
## Match
@doc """
Main API to define routes.
It accepts an expression representing the path and many options
allowing the match to be configured.
## Examples
match "/foo/bar", via: :get do
send_resp(conn, 200, "hello world")
end
## Options
`match/3` and the others route macros accepts the following options:
* `:host` - the host which the route should match. Defaults to `nil`,
meaning no host match, but can be a string like "example.com" or a
string ending with ".", like "subdomain." for a subdomain match
* `:via` - matches the route against some specific HTTP methods
* `:do` - contains the implementation to be invoked in case
the route matches
"""
defmacro match(path, options, contents \\ []) do
compile(nil, path, options, contents)
end
@doc """
Dispatches to the path only if it is get request.
See `match/3` for more examples.
"""
defmacro get(path, options, contents \\ []) do
compile(:get, path, options, contents)
end
@doc """
Dispatches to the path only if it is post request.
See `match/3` for more examples.
"""
defmacro post(path, options, contents \\ []) do
compile(:post, path, options, contents)
end
@doc """
Dispatches to the path only if it is put request.
See `match/3` for more examples.
"""
defmacro put(path, options, contents \\ []) do
compile(:put, path, options, contents)
end
@doc """
Dispatches to the path only if it is patch request.
See `match/3` for more examples.
"""
defmacro patch(path, options, contents \\ []) do
compile(:patch, path, options, contents)
end
@doc """
Dispatches to the path only if it is delete request.
See `match/3` for more examples.
"""
defmacro delete(path, options, contents \\ []) do
compile(:delete, path, options, contents)
end
@doc """
Dispatches to the path only if it is options request.
See `match/3` for more examples.
"""
defmacro options(path, options, contents \\ []) do
compile(:options, path, options, contents)
end
@doc """
Forwards requests to another Plug. The path_info of the forwarded
connection will exclude the portion of the path specified in the
call to `forward`.
## Examples
forward "/users", to: UserRouter
## Options
`forward` accepts the following options:
* `:to` - a Plug where the requests will be forwarded
* `:host` - a string representing the host or subdomain, exactly like in
`match/3`
All remaining options are passed to the underlying plug.
"""
defmacro forward(path, options) when is_binary(path) do
quote bind_quoted: [path: path, options: options] do
{target, options} = Keyword.pop(options, :to)
{options, plug} = Keyword.split(options, [:host])
if is_nil(target) or !is_atom(target) do
raise ArgumentError, message: "expected :to to be an alias or an atom"
end
@plug_forward_target target
@plug_forward_opts target.init(plug)
match path <> "/*glob", options do
Plug.Router.Utils.forward(var!(conn), var!(glob), @plug_forward_target, @plug_forward_opts)
end
end
end
## Match Helpers
@doc false
def __route__(method, path, guards, options) do
{method, guards} = build_methods(List.wrap(method || options[:via]), guards)
{_vars, match} = Plug.Router.Utils.build_match(path)
{method, match, build_host(options[:host]), guards}
end
# Entry point for both forward and match that is actually
# responsible to compile the route.
defp compile(method, expr, options, contents) do
{body, options} =
cond do
b = contents[:do] ->
{b, options}
options[:do] ->
Keyword.pop(options, :do)
true ->
raise ArgumentError, message: "expected :do to be given as option"
end
{path, guards} = extract_path_and_guards(expr)
quote bind_quoted: [method: method,
path: path,
options: options,
guards: Macro.escape(guards, unquote: true),
body: Macro.escape(body, unquote: true)] do
{method, match, host, guards} = Plug.Router.__route__(method, path, guards, options)
defp do_match(unquote(method), unquote(match), unquote(host)) when unquote(guards) do
fn var!(conn) -> unquote(body) end
end
end
end
# Convert the verbs given with :via into a variable
# and guard set that can be added to the dispatch clause.
defp build_methods([], guards) do
{quote(do: _), guards}
end
defp build_methods([method], guards) do
{Plug.Router.Utils.normalize_method(method), guards}
end
defp build_methods(methods, guards) do
methods = Enum.map methods, &Plug.Router.Utils.normalize_method(&1)
var = quote do: method
guards = join_guards(quote(do: unquote(var) in unquote(methods)), guards)
{var, guards}
end
defp join_guards(fst, true), do: fst
defp join_guards(fst, snd), do: (quote do: unquote(fst) and unquote(snd))
defp build_host(host) do
cond do
is_nil host -> quote do: _
String.last(host) == "." -> quote do: unquote(host) <> _
is_binary host -> host
end
end
# Extract the path and guards from the path.
defp extract_path_and_guards({:when, _, [path, guards]}), do: {extract_path(path), guards}
defp extract_path_and_guards(path), do: {extract_path(path), true}
defp extract_path({:_, _, var}) when is_atom(var), do: "/*_path"
defp extract_path(path), do: path
end
|
lib/plug/router.ex
| 0.878145 | 0.503174 |
router.ex
|
starcoder
|
defmodule Kernel.SpecialForms do
@moduledoc """
In this module we define Elixir special forms. Those are called
special forms because they cannot be overridden by the developer
and sometimes have lexical scope (like `alias`, `import`, etc).
This module also documents Elixir's pseudo variables (`__MODULE__`,
`__FILE__`, `__ENV__` and `__CALLER__`). Pseudo variables return
information about Elixir's compilation environment and can only
be read, never assigned to.
Finally, it also documents 3 special forms (`__block__`,
`__scope__` and `__aliases__`), which are not intended to be
called directly by the developer but they appear in quoted
contents since they are essential in Elixir's constructions.
"""
@doc """
Defines a new tuple.
## Examples
:{}.(1,2,3)
{ 1, 2, 3 }
"""
defmacro :{}.(args)
@doc """
Defines a new list.
## Examples
:[].(1,2,3)
[ 1, 2, 3 ]
"""
defmacro :[].(args)
@doc """
Defines a new bitstring.
## Examples
:<<>>.(1,2,3)
<< 1, 2, 3 >>
"""
defmacro :<<>>.(args)
@doc """
`alias` is used to setup atom aliases, often useful with modules names.
## Examples
`alias` can be used to setup an alias for any module:
defmodule Math do
alias MyKeyword, as: Keyword
end
In the example above, we have set up `MyOrdict` to be alias
as `Keyword`. So now, any reference to `Keyword` will be
automatically replaced by `MyKeyword`.
In case one wants to access the original `Keyword`, it can be done
by accessing Elixir:
Keyword.values #=> uses MyKeyword.values
Elixir.Keyword.values #=> uses Keyword.values
Notice that calling `alias` without the `as:` option automatically
sets an alias based on the last part of the module. For example:
alias Foo.Bar.Baz
Is the same as:
alias Foo.Bar.Baz, as: Baz
## Lexical scope
`import`, `require` and `alias` are called directives and all
have lexical scope. This means you can set up aliases inside
specific functions and it won't affect the overall scope.
"""
defmacro alias(module, opts)
@doc """
`require` is used to require the presence of external
modules so macros can be invoked.
## Examples
Notice that usually modules should not be required before usage,
the only exception is if you want to use the macros from a module.
In such cases, you need to explicitly require them.
Let's suppose you created your own `if` implementation in the module
`MyMacros`. If you want to invoke it, you need to first explicitly
require the `MyMacros`:
defmodule Math do
require MyMacros
MyMacros.if do_something, it_works
end
An attempt to call a macro that was not loaded will raise an error.
## Alias shortcut
`require` also accepts `as:` as an option so it automatically sets
up an alias. Please check `alias` for more information.
"""
defmacro require(module, opts)
@doc """
`import` allows one to easily access functions or macros from
others modules without using the qualified name.
## Examples
If you want to use the `values` function from `Keyword` several times
in your module and you don't want to always type `Keyword.values`,
you can simply import it:
defmodule Math do
import Keyword, only: [values: 1]
def some_function do
# call values(orddict)
end
end
In this case, we are importing only the function `values` (with arity 1)
from `Keyword`. Although `only` is optional, its usage is recommended.
`except` could also be given as an option. If no option is given, all
functions and macros are imported.
In case you want to import only functions or macros, you can pass a
first argument selecting the scope:
import :macros, MyMacros
And you can then use `only` or `except` to filter the macros being
included.
## Lexical scope
It is important to notice that `import` is lexical. This means you
can import specific macros inside specific functions:
defmodule Math do
def some_function do
# 1) Disable `if/2` from Kernel
import Kernel, except: [if: 2]
# 2) Require the new `if` macro from MyMacros
import MyMacros
# 3) Use the new macro
if do_something, it_works
end
end
In the example above, we imported macros from `MyMacros`, replacing
the original `if/2` implementation by our own during that
specific function. All other functions in that module will still
be able to use the original one.
## Alias/Require shortcut
All imported modules are also required by default. `import`
also accepts `as:` as an option so it automatically sets up
an alias. Please check `alias` for more information.
"""
defmacro import(module, opts)
@doc """
Returns the current environment information as a `Macro.Env`
record. In the environment you can access the current filename,
line numbers, set up aliases, the current function and others.
"""
defmacro __ENV__
@doc """
Returns the current module name as an atom or nil otherwise.
Although the module can be accessed in the __ENV__, this macro
is a convenient shortcut.
"""
defmacro __MODULE__
@doc """
Returns the current file name as a binary.
Although the file can be accessed in the __ENV__, this macro
is a convenient shortcut.
"""
defmacro __FILE__
@doc """
Allows you to get the representation of any expression.
## Examples
quote do: sum(1, 2, 3)
#=> { :sum, 0, [1, 2, 3] }
## Homoiconicity
Elixir is an homoiconic language. Any Elixir program can be
represented using its own data structures. The building block
of Elixir homoiconicity is a tuple with three elements, for example:
{ :sum, 1, [1, 2, 3] }
The tuple above represents a function call to sum passing 1, 2 and
3 as arguments. The tuple elements are:
* The first element of the tuple is always an atom or
another tuple in the same representation;
* The second element of the tuple is always an integer
representing the line number;
* The third element of the tuple are the arguments for the
function call. The third argument may be an atom, meaning
that it may be a variable.
## Macro literals
Besides the tuple described above, Elixir has a few literals that
when quoted return themselves. They are:
:sum #=> Atoms
1 #=> Integers
2.0 #=> Floats
[1,2] #=> Lists
"binaries" #=> Binaries
{key, value} #=> Tuple with two elements
## Hygiene
Elixir macros are hygienic regarding to variables. This means
a variable defined in a macro cannot affect the scope where
the macro is included. Consider the following example:
defmodule Hygiene do
defmacro no_interference do
quote do: a = 1
end
end
require Hygiene
a = 10
Hygiene.no_interference
a #=> 10
In the example above, `a` returns 10 even if the macro
is apparently setting it to 1 because the variables defined
in the macro does not affect the context the macro is
executed. If you want to set or get a variable, you can do
it with the help of the `var!` macro:
defmodule NoHygiene do
defmacro interference do
quote do: var!(a) = 1
end
end
require NoHygiene
a = 10
NoHygiene.interference
a #=> 11
Notice that aliases are not hygienic in Elixir, ambiguity
must be solved by prepending Elixir:
quote do
Elixir.Foo #=> Access the root Foo
Foo #=> Access the Foo alias in the current module
(if any is set), then fallback to Elixir.Foo
end
## Options
* `:hygiene` - When false, disables hygiene;
* `:unquote` - When false, disables unquoting. Useful when you have a quote
inside another quote and want to control which quote is able to unquote;
* `:location` - When set to `:keep`, keeps the current line and file on quotes.
Read the Stacktrace information section below for more information;
## Stacktrace information
One of Elixir goals is to provide proper stacktrace whenever there is an
exception. In order to work properly with macros, the default behavior
in quote is to set the line to 0. When a macro is invoked and the quoted
expressions is expanded, 0 is replaced by the line of the call site.
This is a good behavior for the majority of the cases, except if the macro
is defining new functions. Consider this example:
defmodule MyServer do
use GenServer.Behavior
end
`GenServer.Behavior` defines new functions in our `MyServer` module.
However, if there is an exception in any of these functions, we want
the stacktrace to point to the `GenServer.Behavior` and not the line
that calls `use GenServer.Behavior`. For this reason, there is an
option called `:location` that when set to `:keep` keeps these proper
semantics:
quote location: :keep do
def handle_call(request, _from, state) do
{ :reply, :undef, state }
end
end
It is important to warn though that `location: :keep` evaluates the
code as if it was defined inside `GenServer.Behavior` file, in
particular, the macro `__FILE__` will always point to
`GenServer.Behavior` file.
"""
defmacro quote(opts, do: contents)
@doc """
Unquotes the given expression from inside a macro.
## Examples
Imagine the situation you have a variable `name` and
you want to inject it inside some quote. The first attempt
would be:
value = 13
quote do: sum(1, value, 3)
Which would then return:
{ :sum, 0, [1, { :value, 0, quoted }, 3] }
Which is not the expected result. For this, we use unquote:
value = 13
quote do: sum(1, unquote(value), 3)
#=> { :sum, 0, [1, 13, 3] }
"""
defmacro unquote(expr)
@doc """
Unquotes the given list expanding its arguments. Similar
to unquote.
## Examples
values = [2,3,4]
quote do: sum(1, unquote_splicing(values), 5)
#=> { :sum, 0, [1, 2, 3, 4, 5] }
"""
defmacro unquote_splicing(expr)
@doc """
List comprehensions allow you to quickly build a list from another list:
lc n inlist [1,2,3,4], do: n * 2
#=> [2,4,6,8]
A comprehension accepts many generators and also filters. Generators
are defined using both `inlist` and `inbits` operators, allowing you
to loop lists and bitstrings:
# A list generator:
lc n inlist [1,2,3,4], do: n * 2
#=> [2,4,6,8]
# A bit string generator:
lc <<n>> inbits <<1,2,3,4>>, do: n * 2
#=> [2,4,6,8]
# A generator from a variable:
list = [1,2,3,4]
lc n inlist list, do: n * 2
#=> [2,4,6,8]
# A comprehension with two generators
lc x inlist [1,2], y inlist [2,3], do: x*y
#=> [2,3,4,6]
Filters can also be given:
# A comprehension with a generator and a filter
lc n inlist [1,2,3,4,5,6], rem(n, 2) == 0, do: n
#=> [2,4,6]
Bit string generators are quite useful when you need to
organize bit string streams:
iex> pixels = <<213,45,132,64,76,32,76,0,0,234,32,15>>
iex> lc <<r:8,g:8,b:8>> inbits pixels, do: {r,g,b}
[{213,45,132},{64,76,32},{76,0,0},{234,32,15}]
"""
defmacro lc(args)
@doc """
Defines a bit comprehension. It follows the same syntax as
a list comprehension but expects each element returned to
be a bitstring. For example, here is how to remove all
spaces from a string:
bc <<c>> inbits " hello world ", c != ? , do: <<c>>
"helloworld"
"""
defmacro bc(args)
@doc """
This is the special form used whenever we have a block
of expressions in Elixir. This special form is private
and should not be invoked directly:
quote do: (1; 2; 3)
#=> { :__block__, 0, [1,2,3] }
"""
defmacro __block__(args)
@doc """
This is the special form used whenever we have to temporarily
change the scope information of a block. Used when `quote` is
invoked with `location: :keep` to execute a given block as if
it belonged to another file.
quote location: :keep, do: 1
#=> { :__scope__, 1,[[file: "iex"],[do: 1]] }
Check `quote/1` for more information.
"""
defmacro __scope__(opts, args)
@doc """
This is the special form used to hold aliases information.
At compilation time, it is usually compiled to an atom:
quote do: Foo.Bar
{ :__aliases__, 0, [:Foo,:Bar] }
"""
defmacro __aliases__(args)
end
|
lib/elixir/lib/kernel/special_forms.ex
| 0.916692 | 0.425725 |
special_forms.ex
|
starcoder
|
defprotocol Observable do
@moduledoc """
Defines the subscribe function to subscribe to a calculation. The
observer must follow the `Observer` protocol to be signalled about
new values, errors and the completion of the calculation.
"""
@spec subscribe(Observable.t, Observer.t) :: {any, Disposable.t}
def subscribe(observable, observer)
end
defprotocol Observer do
@moduledoc """
Defines the functions for providing a new value, to signal an error
and to signal the completion of the observed calculation.
Calls to the observer follow the regular sequence
on_next* (on_error | on_completed)?
It is the taks of `on_error` and `on_completed` to free up
all internal resources. In particular the subscription needs
to be closed. This can be done by calling `unsubscribe`.
"""
@spec on_next(Observer.t, any) :: :ok
def on_next(observer, value)
@spec on_error(Observer.t, any) :: :ok
def on_error(observer, exception)
@spec on_completed(Observer.t, Observable.t) :: :ok
def on_completed(observer, observable)
end
defprotocol Subscription do
@moduledoc """
Defines the protocol for subscriptions providing a little
bit more functionality as an `Disposable` from .NET-Framework.
"""
@spec unsubscribe(Subscription.t) :: :ok
def unsubscribe(subscription)
@spec is_unsubscribed?(Subscription.t) :: boolean
def is_unsubscribed?(subscription)
end
defprotocol Runnable do
@moduledoc """
Defines a protocol for starting a sequence of events. The basic idea
is that only after calling `run` the sending of events starts, at least
for "cold" observables, which need to be started explicitely.
There are usually two basic modes of implementing `run`:
* do some side effect for producing events, e.g. fork a new process sending
events. This is the usual implementation for sources of an event sequence
* in an inbetween node, you will simply call `run` on your sources
Some the `Rx` functions will call `run` to start immedietely the event sequence.
In particular functions like `to_list` or `stream` do it inside their implementation.
Where do we need an implementation of `Runnable`?
* `Observables` need to implement `Runnable` such that we can start them from
the outside on request.
* The source connection between two `Observables` (i.e. `Rx_Impl` for now) needs
to contain a `Runnable` as first component.
"""
@doc "Run start an event sequence. It returns its input parameters"
@spec run(Runnable.t) :: Runnable.t
def run(runnable)
end
|
lib/reaxive/rx_proto.ex
| 0.866076 | 0.657748 |
rx_proto.ex
|
starcoder
|
defmodule ExWire.DEVp2p do
@moduledoc """
Functions that deal directly with the DEVp2p Wire Protocol.
For more information, please see:
https://github.com/ethereum/wiki/wiki/%C3%90%CE%9EVp2p-Wire-Protocol
"""
alias ExWire.{Config, Packet}
defmodule Session do
@moduledoc """
Module to hold struct for a DEVp2p Wire Protocol session.
The session should be active when `Hello` messages have been exchanged.
See https://github.com/ethereum/wiki/wiki/%C3%90%CE%9EVp2p-Wire-Protocol#session-management
"""
alias ExWire.Packet.Hello
@type handshake_status :: boolean | ExWire.Packet.Hello.t()
@type t :: %Session{hello_sent: handshake_status, hello_received: handshake_status}
defstruct hello_sent: false, hello_received: false
@doc """
Checks whether or not the session is active.
A session is only active if the handshake is complete and if there are overlapping capabilities, meaning
that some of the sub-protocols are the same (e.g. eth 62)
## Examples
iex> hello_received = %ExWire.Packet.Hello{caps: [{"eth", 62}]}
iex> hello_sent = %ExWire.Packet.Hello{caps: [{"eth", 62}]}
iex> ExWire.DEVp2p.Session.active?(%ExWire.DEVp2p.Session{hello_received: hello_received, hello_sent: hello_sent})
true
"""
@spec active?(t) :: boolean()
def active?(%Session{hello_received: false}), do: false
def active?(%Session{hello_sent: false}), do: false
def active?(session = %Session{hello_sent: %Hello{}, hello_received: %Hello{}}) do
compatible_capabilities?(session)
end
@spec disconnect(t) :: Session.t()
def disconnect(session = %Session{}) do
%{session | hello_sent: false, hello_received: false}
end
@spec compatible_capabilities?(t) :: boolean()
def compatible_capabilities?(session = %Session{}) do
%Session{hello_received: hello_received, hello_sent: hello_sent} = session
intersection =
MapSet.intersection(
to_mapset(hello_received.caps),
to_mapset(hello_sent.caps)
)
!Enum.empty?(intersection)
end
defp to_mapset(list) do
Enum.into(list, MapSet.new())
end
end
@doc """
Convenience function to create an `ExWire.DEVp2p.Session` struct
"""
@spec init_session :: Session.t()
def init_session do
%Session{}
end
@doc """
Function to create a DEVp2p struct needed for a protocol handshake. This
should be an `ExWire.Packet.Hello` struct with the appropriate values filled in.
"""
@spec build_hello :: Packet.Hello.t()
def build_hello do
%Packet.Hello{
p2p_version: Config.p2p_version(),
client_id: Config.client_id(),
caps: Config.caps(),
listen_port: Config.listen_port(),
node_id: Config.node_id()
}
end
@doc """
Function to update `ExWire.DEVp2p.Session` when a handshake is sent. The
handshake should be an `ExWire.Packet.Hello` that we have sent to a peer.
"""
@spec hello_sent(Session.t(), Packet.Hello.t()) :: Session.t()
def hello_sent(session, hello = %Packet.Hello{}) do
%{session | hello_sent: hello}
end
@doc """
Function to update `ExWire.DEVp2p.Session` when a handshake is received. The
handshake should be an `ExWire.Packet.Hello` that we have received from a peer.
"""
@spec hello_received(Session.t(), Packet.Hello.t()) :: Session.t()
def hello_received(session, hello = %Packet.Hello{}) do
%{session | hello_received: hello}
end
@doc """
Function to check whether or not a `ExWire.DEVp2p.Session` is active. See
`ExWire.DEVp2p.Session.active?/1` for more information.
"""
@spec session_active?(Session.t()) :: boolean()
def session_active?(session), do: Session.active?(session)
@spec session_compatible?(Session.t()) :: boolean()
def session_compatible?(session), do: Session.compatible_capabilities?(session)
@doc """
Function to handles other messages related to the DEVp2p protocol that a peer
sends. The messages could be `ExWire.Packet.Disconnect`, `ExWire.Packet.Ping`,
or `ExWire.Packet.Pong`.
An `ExWire.DEVp2p.Session` is required as the first argument in order to
properly update the session based on the message received.
"""
@spec handle_message(Session.t(), struct()) ::
{:error, :handshake_incomplete} | {:ok, Session.t()}
def handle_message(session, packet = %Packet.Hello{}) do
{:ok, hello_received(session, packet)}
end
def handle_message(_session, _message) do
{:error, :handshake_incomplete}
end
end
|
apps/ex_wire/lib/ex_wire/dev_p2p.ex
| 0.718792 | 0.488649 |
dev_p2p.ex
|
starcoder
|
defmodule NervesTime.RTC.DS3231 do
@moduledoc """
DS3231 RTC implementation for NervesTime
To configure NervesTime to use this module, update the `:nerves_time` application
environment like this:
```elixir
config :nerves_time, rtc: NervesTime.RTC.DS3231
```
If not using `"i2c-1"` or the default I2C bus address, specify them like this:
```elixir
config :nerves_time, rtc: {NervesTime.RTC.DS3231, [bus_name: "i2c-2", address: 0x69]}
```
Check the logs for error messages if the RTC doesn't appear to work.
See https://datasheets.maximintegrated.com/en/ds/DS3231.pdf for implementation details.
"""
@behaviour NervesTime.RealTimeClock
require Logger
alias Circuits.I2C
alias NervesTime.RTC.DS3231.{Alarm, Control, Date, Status, Temperature}
@default_bus_name "i2c-1"
@default_address 0x68
@typedoc "This type represents the many registers whose value is a single bit."
@type flag :: 0 | 1
@typedoc false
@type state :: %{
i2c: I2C.bus(),
bus_name: String.t(),
address: I2C.address()
}
@impl NervesTime.RealTimeClock
def init(args) do
bus_name = Keyword.get(args, :bus_name, @default_bus_name)
address = Keyword.get(args, :address, @default_address)
with {:ok, i2c} <- I2C.open(bus_name),
true <- rtc_available?(i2c, address) do
{:ok, %{i2c: i2c, bus_name: bus_name, address: address}}
else
{:error, _} = error ->
error
error ->
{:error, error}
end
end
@impl NervesTime.RealTimeClock
def terminate(_state), do: :ok
@impl NervesTime.RealTimeClock
def set_time(state, now) do
with {:ok, status_data} <- get_status(state.i2c, state.address),
:ok <- set(state.i2c, state.address, 0x0, now, Date),
:ok <- set_status(state.i2c, state.address, %{status_data | osc_stop_flag: 0}) do
state
else
error ->
_ = Logger.error("Error setting DS3231 RTC to #{inspect(now)}: #{inspect(error)}")
state
end
end
@impl NervesTime.RealTimeClock
def get_time(state) do
with {:ok, registers} <- I2C.write_read(state.i2c, state.address, <<0>>, 7),
{:ok, time} <- Date.decode(registers) do
{:ok, time, state}
else
any_error ->
_ = Logger.error("DS3231 RTC not set or has an error: #{inspect(any_error)}")
{:unset, state}
end
end
@doc "Reads the status register."
def get_status(i2c, address), do: get(i2c, address, 0x0F, 1, Status)
@doc "Writes the status register."
def set_status(i2c, address, status), do: set(i2c, address, 0x0F, status, Status)
@doc "Reads the control register."
def get_control(i2c, address), do: get(i2c, address, 0x0E, 1, Control)
@doc "Writes the control register."
def set_control(i2c, address, control), do: set(i2c, address, 0x0E, control, Control)
@doc "Reads an alarm register."
def get_alarm(i2c, address, 1 = _alarm_num), do: get(i2c, address, 0x07, 4, Alarm)
def get_alarm(i2c, address, 2 = _alarm_num), do: get(i2c, address, 0x0B, 3, Alarm)
@doc "Writes an alarm register."
def set_alarm(i2c, address, %{seconds: _} = a1), do: set(i2c, address, 0x07, a1, Alarm)
def set_alarm(i2c, address, a2), do: set(i2c, address, 0x0B, a2, Alarm)
@doc "Reads the temperature register."
def get_temperature(i2c, address), do: get(i2c, address, 0x11, 2, Temperature)
defp set(i2c, address, offset, data, module) do
with {:ok, bin} <- module.encode(data),
:ok <- I2C.write(i2c, address, [offset, bin]) do
:ok
else
{:error, _} = e -> e
e -> {:error, e}
end
end
defp get(i2c, address, offset, length, module) do
with {:ok, bin} <- I2C.write_read(i2c, address, <<offset>>, length),
{:ok, data} <- module.decode(bin) do
{:ok, data}
else
{:error, _} = e -> e
e -> {:error, e}
end
end
defp rtc_available?(i2c, address) do
case I2C.write_read(i2c, address, <<0>>, 1) do
{:ok, <<_::8>>} -> true
{:error, _} -> false
end
end
end
|
lib/nerves_time/rtc/ds3231.ex
| 0.812161 | 0.772874 |
ds3231.ex
|
starcoder
|
defmodule Processes do
use Koans
@intro "Processes"
koan "You are a process" do
assert Process.alive?(self()) == :true
end
koan "You can ask a process to introduce itself" do
information = Process.info(self())
assert information[:status] == :running
end
koan "Processes are referenced by their process ID (pid)" do
assert is_pid(self()) == :true
end
koan "New processes are spawned functions" do
value =
spawn(fn ->
receive do
end
end)
assert is_pid(value) == :true
end
koan "Processes die when their function exits" do
fast_process = spawn(fn -> :timer.sleep(10) end)
slow_process = spawn(fn -> :timer.sleep(1000) end)
# All spawned functions are executed concurrently with the current process.
# You check back on slow_process and fast_process 50ms later. Let's
# see if they are still alive!
:timer.sleep(50)
assert Process.alive?(fast_process) == :false
assert Process.alive?(slow_process) == :true
end
koan "Processes can send and receive messages" do
send(self(), "hola!")
receive do
msg -> assert msg == "hola!"
end
end
koan "A process will wait forever for a message" do
wait_forever = fn ->
receive do
end
end
pid = spawn(wait_forever)
assert Process.alive?(pid) == :true
end
koan "Received messages are queued, first in first out" do
send(self(), "hola!")
send(self(), "como se llama?")
assert_receive "hola!"
assert_receive "como se llama?"
end
koan "A common pattern is to include the sender in the message, so that it can reply" do
greeter = fn ->
receive do
{:hello, sender} -> send(sender, :how_are_you?)
end
end
pid = spawn(greeter)
send(pid, {:hello, self()})
assert_receive :how_are_you?
end
def yelling_echo_loop do
receive do
{caller, value} ->
send(caller, String.upcase(value))
yelling_echo_loop()
end
end
koan "Use tail recursion to receive multiple messages" do
pid = spawn_link(&yelling_echo_loop/0)
send(pid, {self(), "o"})
assert_receive "O"
send(pid, {self(), "hai"})
assert_receive "HAI"
end
def state(value) do
receive do
{caller, :get} ->
send(caller, value)
state(value)
{caller, :set, new_value} ->
state(new_value)
end
end
koan "Processes can be used to hold state" do
initial_state = "foo"
pid =
spawn(fn ->
state(initial_state)
end)
send(pid, {self(), :get})
assert_receive "foo"
send(pid, {self(), :set, "bar"})
send(pid, {self(), :get})
assert_receive "bar"
end
koan "Waiting for a message can get boring" do
parent = self()
spawn(fn ->
receive do
after
5 -> send(parent, {:waited_too_long, "I am impatient"})
end
end)
assert_receive {:waited_too_long, "I am impatient"}
end
koan "Trapping will allow you to react to someone terminating the process" do
parent = self()
pid =
spawn(fn ->
Process.flag(:trap_exit, true)
send(parent, :ready)
receive do
{:EXIT, _pid, reason} -> send(parent, {:exited, reason})
end
end)
receive do
:ready -> true
end
Process.exit(pid, :random_reason)
assert_receive {:exited,:random_reason}
end
koan "Parent processes can trap exits for children they are linked to" do
Process.flag(:trap_exit, true)
spawn_link(fn -> Process.exit(self(), :normal) end)
assert_receive {:EXIT, _pid, :normal}
end
koan "If you monitor your children, you'll be automatically informed of their departure" do
spawn_monitor(fn -> Process.exit(self(), :normal) end)
assert_receive {:DOWN, _ref, :process, _pid, :normal}
end
end
|
lib/koans/15_processes.ex
| 0.669096 | 0.633906 |
15_processes.ex
|
starcoder
|
defmodule Wallaby.Query.ErrorMessage do
@moduledoc false
alias Wallaby.Query
@doc """
Compose an error message based on the error method and query information
"""
@spec message(Query.t, any()) :: String.t
def message(%Query{} = query, :not_found) do
"Expected to find #{found_error_message(query)}"
end
def message(%Query{} = query, :found) do
"Expected not to find #{found_error_message(query)}"
end
def message(%{method: method, selector: selector}, :label_with_no_for) do
"""
The text '#{selector}' matched a label but the label has no 'for'
attribute and can't be used to find the correct #{method(method)}.
You can fix this by including the `for="YOUR_INPUT_ID"` attribute on the
appropriate label.
"""
end
def message(%{method: method, selector: selector}, {:label_does_not_find_field, for_text}) do
"""
The text '#{selector}' matched a label but the label's 'for' attribute
doesn't match the id of any #{method(method)}.
Make sure that id on your #{method(method)} is `id="#{for_text}"`.
"""
end
def message(%{selector: selector}, :button_with_bad_type) do
"""
The text '#{selector}' matched a button but the button has an invalid 'type' attribute.
You can fix this by including `type="[submit|reset|button|image]"` on the appropriate button.
"""
end
def message(_, :cannot_set_text_with_invisible_elements) do
"""
Cannot set the `text` filter when `visible` is set to `false`.
Text is based on visible text on the page. This is a limitation of webdriver.
Since the element isn't visible the text isn't visible. Because of that I
can't apply both filters correctly.
"""
end
def message(_, {:at_number, query}) do
# The query is invalid. the 'at' number requested is not within the results list (1-#{size}).
"""
The element at index #{Query.at_number(query)} is not available because #{result_count(query.result)} #{method(query)} #{result_expectation(query.result)}
"""
end
def message(_, :min_max) do
"""
The query is invalid. Cannot set the minimum greater than the maximum.
"""
end
def message(%{method: method, selector: selector}, :invalid_selector) do
"""
The #{method} '#{selector}' is not a valid query.
"""
end
def help(elements) do
"""
If you expect to find the selector #{times(length(elements))} then you
should include the `count: #{length(elements)}` option in your finder.
"""
end
defp found_error_message(query) do
"""
#{expected_count(query)}, #{visibility(query)} #{method(query)} '#{query.selector}' but #{result_count(query.result)}, #{visibility(query)} #{short_method(query.method, Enum.count(query.result))} #{result_expectation(query.result)}.
"""
end
@doc """
Extracts the selector method from the selector and converts it into a human
readable format
"""
@spec method(Query.t) :: String.t
@spec method({atom(), boolean()}) :: String.t
def method(%Query{conditions: conditions} = query) do
method(query.method, conditions[:count] > 1)
end
def method(_), do: "element"
def method(:css, true), do: "elements that matched the css"
def method(:css, false), do: "element that matched the css"
def method(:select, true), do: "selects"
def method(:select, false), do: "select"
def method(:option, true), do: "option fields"
def method(:option, false), do: "option"
def method(:fillable_field, true), do: "text inputs or textareas"
def method(:fillable_field, false), do: "text input or textarea"
def method(:checkbox, true), do: "checkboxes"
def method(:checkbox, false), do: "checkbox"
def method(:radio_button, true), do: "radio buttons"
def method(:radio_button, false), do: "radio button"
def method(:link, true), do: "links"
def method(:link, false), do: "link"
def method(:xpath, true), do: "elements that matched the xpath"
def method(:xpath, false), do: "element that matched the xpath"
def method(:button, true), do: "buttons"
def method(:button, false), do: "button"
def method(:file_field, true), do: "file fields"
def method(:file_field, false), do: "file field"
def method(:text, true), do: "elements with the text"
def method(:text, false), do: "element with the text"
def short_method(:css, count) when count > 1, do: "elements"
def short_method(:css, count) when count == 0, do: "elements"
def short_method(:css, _), do: "element"
def short_method(:xpath, count) when count == 1, do: "element"
def short_method(:xpath, _), do: "elements"
def short_method(method, count), do: method(method, count != 1)
@doc """
Generates failure conditions based on query conditions.
"""
@spec conditions(Keyword.t) :: list(String.t)
def conditions(opts) do
opts
|> Keyword.delete(:visible)
|> Keyword.delete(:count)
|> Enum.map(&condition/1)
|> Enum.reject(& &1 == nil)
end
@doc """
Converts a condition into a human readable failure message.
"""
@spec condition({atom(), String.t}) :: String.t | nil
def condition({:text, text}) when is_binary(text) do
"text: '#{text}'"
end
def condition(_), do: nil
@doc """
Converts the visibilty attribute into a human readable form.
"""
@spec visibility(Query.t) :: String.t
def visibility(query) do
if Query.visible?(query) do
"visible"
else
"invisible"
end
end
defp result_count([_]), do: "only 1"
defp result_count(result), do: "#{Enum.count(result)}"
defp times(1), do: "1 time"
defp times(count), do: "#{count} times"
defp expected_count(query) do
conditions = query.conditions
cond do
conditions[:count] ->
"#{conditions[:count]}"
conditions[:minimum] && Enum.count(query.result) < conditions[:minimum] ->
"at least #{conditions[:minimum]}"
conditions[:maximum] && Enum.count(query.result) > conditions[:maximum] ->
"no more then #{conditions[:maximum]}"
true -> ""
end
end
def result_expectation(result) when length(result) == 1, do: "was found"
def result_expectation(_), do: "were found"
end
|
lib/wallaby/query/error_message.ex
| 0.837819 | 0.453201 |
error_message.ex
|
starcoder
|
defmodule ExAdmin do
@moduledoc """
ExAdmin is a an auto administration tool for the PhoenixFramework,
providing a quick way to create a CRUD interface for administering
Ecto models with little code and the ability to customize the
interface if desired.
After creating one or more Ecto models, the administration tool can
be used by creating a resource model for each model. The basic
resource file for model ... looks like this:
# web/admin/my_model.ex
defmodule MyProject.ExAdmin.MyModel do
use ExAdmin.Register
register_resource MyProject.MyModel do
end
end
This file can be created manually, or by using the mix task:
mix admin.gen.resource MyModel
ExAdmin adds a menu item for the model in the admin interface, along
with the ability to index, add, edit, show and delete instances of
the model.
Many of the pages in the admin interface can be customized for each
model using a DSL. The following can be customized:
* `index` - Customize the index page
* `show` - Customize the show page
* `form` - Customize the new and edit pages
* `menu` - Customize the menu item
* `controller` - Customize the controller
## Custom Ecto Types
### Map Type
By default, ExAdmin used Poison.encode! to encode `Map` type. To change the
decoding, add override the protocol. For Example:
defimpl ExAdmin.Render, for: Map do
def to_string(map) do
{:ok, encoded} = Poison.encode map
encoded
end
end
As well as handling the encoding to display the data, you will need to handle
the params decoding for the `:create` and `:modify` actions. You have a couple
options for handling this.
* In your changeset, you can update the params field with the decoded value
* Add a controller `before_filter` in your admin resource file.
For example:
register_resource AdminIdIssue.UserSession do
controller do
before_filter :decode, only: [:update, :create]
def decode(conn, params) do
if get_in params, [:usersession, :data] do
params = update_in params, [:usersession, :data], &(Poison.decode!(&1))
end
{conn, params}
end
end
end
## Other Types
To support other Ecto Types, implement the ExAdmin.Render protocol for the
desired type. Here is an example from the ExAdmin code for the `Ecto.Date` type:
defimpl ExAdmin.Render, for: Ecto.Date do
def to_string(dt) do
Ecto.Date.to_string dt
end
end
## Adding Custom CSS or JS to the Layout Head
A configuration item is available to add your own CSS or JS files
to the `<head>` section of ExAdmin's layout file.
Add the following to your project's `config/config.exs` file:
config :ex_admin,
head_template: {ExAdminDemo.AdminView, "admin_layout.html"}
Where:
* `ExAdminDemo.AdminView` is a view in your project
* `admin_layout.html` is a template in `web/templates/admin` directory
For example:
in `web/templates/admin/admin_layout.html.eex`
```html
<link rel='stylesheet' href='<%= static_path(@conn, "/css/admin_custom.css") %>'>
<!--
since this is rendered into the head area, make sure to defer the loading
of your scripts with `async` to not block rendering.
-->
<script async src='<%= static_path(@conn, "/js/app.js") %>'></script>
```
in `priv/static/css/admin_custom.css`
```css
.foo {
color: green !important;
font-weight: 600;
}
```
## Changing the layout footer
Changing the content of the footer can be done through ExAdmin's configuration
options.
Add the following to your project's `config/config.exs` file:
config :ex_admin,
footer: "© Project Name"
## Adding SwitchUser Select Box
At times, you may want an easy way to switch between users while developing and
manually testing an project. ExAdmin supports this feature through configuration
and a plug.
When enabled, a select box is displayed on the top right of each page. When a
new user is selected, the existing user is logged out and the new user automatically
logged in without requiring a password.
Obviously, this is not a feature you will want on a production server. So, to
configure SwitchUser for `:dev` environment:
# web/router.ex
pipeline :protected do
plug :accepts, ["html"]
# ...
if Mix.env == :dev do
plug ExAdmin.Plug.SwitchUser
end
end
# config/dev.exs
config :ex_admin,
logout_user: {Coherence.ControllerHelpers, :logout_user},
login_user: {Coherence.ControllerHelpers, :login_user}
"""
require Logger
use Xain
alias ExAdmin.Utils
import ExAdmin.Utils, only: [titleize: 1, humanize: 1, admin_resource_path: 2]
require ExAdmin.Register
Code.ensure_compiled(ExAdmin.Register)
Module.register_attribute(__MODULE__, :registered, accumulate: true, persist: true)
@default_theme ExAdmin.Theme.AdminLte2
defmacro __using__(_) do
quote do
use ExAdmin.Index
import unquote(__MODULE__)
end
end
# check for old xain.after_callback format and issue a compile time
# exception if not configured correctly.
case Application.get_env(:xain, :after_callback) do
nil ->
nil
{_, _} ->
nil
_ ->
raise ExAdmin.CompileError,
message: "Invalid xain_callback in config. Use {Phoenix.HTML, :raw}"
end
@doc false
def registered, do: Application.get_env(:ex_admin, :modules, []) |> Enum.reverse()
@doc false
def put_data(key, value) do
Agent.update(__MODULE__, &Map.put(&1, key, value))
end
@doc false
def get_data(key) do
Agent.get(__MODULE__, &Map.get(&1, key))
end
@doc false
def get_all_registered do
for reg <- registered() do
case get_registered_resource(reg) do
%{resource_model: rm} = item ->
{rm, item}
%{type: :page} = item ->
{nil, item}
end
end
end
@doc false
def get_registered_resource(name) do
apply(name, :__struct__, [])
end
@doc false
def get_registered do
for reg <- registered() do
get_registered_resource(reg)
end
end
@doc false
def get_registered(resource_model) do
get_all_registered()
|> Keyword.get(resource_model)
end
def get_registered_by_association(resource, assoc_name) do
resource_model = resource.__struct__
assoc_model =
case resource_model.__schema__(:association, assoc_name) do
%{through: [link1, link2]} ->
resource |> Ecto.build_assoc(link1) |> Ecto.build_assoc(link2) |> Map.get(:__struct__)
%{queryable: assoc_model} ->
assoc_model
nil ->
raise ArgumentError.exception(
"Association #{assoc_name} is not found.\n#{inspect(resource_model)}.__schema__(:association, #{
inspect(assoc_name)
}) returns nil"
)
_ ->
raise ArgumentError.exception(
"Association type of #{assoc_name} is not supported. Please, fill an issue."
)
end
Enum.find(get_registered(), %{}, &(Map.get(&1, :resource_model) == assoc_model))
end
@doc false
def get_controller_path(%{} = resource) do
get_controller_path(Map.get(resource, :__struct__))
end
@doc false
def get_controller_path(resource_model) when is_atom(resource_model) do
get_all_registered()
|> Keyword.get(resource_model, %{})
|> Map.get(:controller_route)
end
@doc false
def get_title_actions(%Plug.Conn{private: _private, path_info: _path_info} = conn) do
defn = conn.assigns.defn
fun = defn |> Map.get(:title_actions)
fun.(conn, defn)
end
@doc false
def get_title_actions(name) do
case get_registered(name) do
nil ->
&__MODULE__.default_page_title_actions/2
%{title_actions: actions} ->
actions
end
end
@doc false
def get_name_from_controller(controller) when is_atom(controller) do
get_all_registered()
|> Enum.find_value(fn {name, %{controller: c_name}} ->
if c_name == controller, do: name
end)
end
@doc false
def default_resource_title_actions(
%Plug.Conn{params: params} = conn,
%{resource_model: resource_model} = defn
) do
singular = ExAdmin.Utils.displayable_name_singular(conn) |> titleize
actions = defn.actions
case Utils.action_name(conn) do
:show ->
id = Map.get(params, "id")
Enum.reduce([:edit, :new, :delete], [], fn action, acc ->
if Utils.authorized_action?(conn, action, resource_model) do
[{action, action_button(conn, defn, singular, :show, action, actions, id)} | acc]
else
acc
end
end)
|> add_custom_actions(:show, actions, id)
|> Enum.reverse()
action when action in [:index, :edit] ->
if Utils.authorized_action?(conn, action, resource_model) do
[{:new, action_button(conn, defn, singular, action, :new, actions)}]
else
[]
end
|> add_custom_actions(action, actions)
|> Enum.reverse()
_ ->
[]
end
end
@doc false
def default_page_title_actions(_conn, _) do
[]
end
@doc """
Get current theme name
"""
def theme do
Application.get_env(:ex_admin, :theme, @default_theme)
end
def theme_name(conn) do
conn.assigns.theme.name
end
def action_button(conn, defn, name, _page, action, actions, id \\ nil) do
if action in actions do
if ExAdmin.Utils.authorized_action?(conn, action, defn) do
action_name = defn.action_labels[action] || humanize(action)
[action_link(conn, "#{action_name} #{name}", action, id)]
else
[]
end
else
[]
end
end
defp add_custom_actions(acc, action, actions, id \\ nil)
defp add_custom_actions(acc, _action, [], _id), do: acc
defp add_custom_actions(acc, action, [{action, button} | actions], id) do
import ExAdmin.ViewHelpers
# remove the compiler warning
endpoint()
{fun, _} = Code.eval_quoted(button, [id: id], __ENV__)
cond do
is_function(fun, 1) -> [fun.(id) | acc]
is_function(fun, 0) -> [fun.() | acc]
true -> acc
end
|> add_custom_actions(action, actions, id)
end
defp add_custom_actions(acc, action, [_ | actions], id) do
add_custom_actions(acc, action, actions, id)
end
defp action_link(conn, name, :delete, _id) do
{name, [
href: admin_resource_path(conn, :destroy),
"data-confirm": Utils.confirm_message(),
"data-method": :delete,
rel: :nofollow
]}
end
defp action_link(conn, name, action, _id) do
{name, [href: admin_resource_path(conn, action)]}
end
@doc false
def has_action?(conn, defn, action) do
if ExAdmin.Utils.authorized_action?(conn, action, defn),
do: _has_action?(defn, action),
else: false
end
defp _has_action?(defn, action) do
except = Keyword.get(defn.actions, :except, [])
only = Keyword.get(defn.actions, :only, [])
cond do
action in defn.actions -> true
action in only -> true
action in except -> false
true -> false
end
end
end
|
lib/ex_admin.ex
| 0.929364 | 0.707493 |
ex_admin.ex
|
starcoder
|
defmodule PermastateOperator.Controller.V1.Action do
@moduledoc """
PermastateOperator: Action CRD.
## Kubernetes CRD Spec
Eigr Action CRD
### Examples
```
apiVersion: functions.eigr.io/v1
kind: Action
metadata:
name: shopping-cart
spec:
containers:
- image: my-docker-hub-username/shopping-cart:latest
```
"""
require Logger
use Bonny.Controller
@version "v1"
@rule {"", ["services", "pods", "configmaps"], ["*"]}
@rule {"apps", ["deployments"], ["*"]}
# It would be possible to call @group "permastate.eigr.io"
# However, to maintain compatibility with the original protocol, we will call it cloudstate.io
@group "functions.eigr.io"
@scope :namespaced
@names %{
plural: "actions",
singular: "action",
kind: "Action",
shortNames: ["ac", "act", "action", "actions"]
}
# @additional_printer_columns [
# %{
# name: "test",
# type: "string",
# description: "test",
# JSONPath: ".spec.test"
# }
# ]
def child_spec(_arg) do
%{
id: __MODULE__,
start: {Bonny.Controller, :start_link, [handler: __MODULE__]}
}
end
@doc """
Called periodically for each existing CustomResource to allow for reconciliation.
"""
@spec reconcile(map()) :: :ok | :error
@impl Bonny.Controller
def reconcile(payload) do
track_event(:reconcile, payload)
:ok
end
@doc """
Creates a kubernetes `deployment`, `service` and `configmap` that runs a "Eigr" app.
"""
@spec add(map()) :: :ok | :error
@impl Bonny.Controller
def add(payload) do
track_event(:add, payload)
resources = parse(payload)
with {:ok, _} <- K8s.Client.create(resources.service) |> run(),
{:ok, _} <- K8s.Client.create(resources.configmap) |> run() do
resource_res = K8s.Client.create(resources.statefulset) |> run()
Logger.info("service result: #{inspect(resource_res)}")
case resource_res do
{:ok, _} -> :ok
{:error, error} -> {:error, error}
_ -> {:error}
end
else
{:error, error} -> {:error, error}
end
end
@doc """
Updates `deployment`, `service` and `configmap` resources.
"""
@spec modify(map()) :: :ok | :error
@impl Bonny.Controller
def modify(payload) do
resources = parse(payload)
with {:ok, _} <- K8s.Client.patch(resources.service) |> run(),
{:ok, _} <- K8s.Client.patch(resources.configmap) |> run(),
{:ok, _} <- K8s.Client.patch(resources.statefulset) |> run() do
:ok
else
{:error, error} -> {:error, error}
end
end
@doc """
Deletes `deployment`, `service` and `configmap` resources.
"""
@spec delete(map()) :: :ok | :error
@impl Bonny.Controller
def delete(payload) do
track_event(:delete, payload)
resources = parse(payload)
with {:ok, _} <- K8s.Client.delete(resources.service) |> run(),
{:ok, _} <- K8s.Client.delete(resources.configmap) |> run(),
{:ok, _} <- K8s.Client.delete(resources.statefulset) |> run() do
:ok
else
{:error, error} -> {:error, error}
end
end
defp parse(%{
"kind" => "Action",
"apiVersion" => "functions.eigr.io/v1",
"metadata" => %{"name" => name, "namespace" => ns},
"spec" => %{"containers" => containers}
}) do
deployment = gen_deployment(ns, name, containers)
service = gen_service(ns, name)
configmap = gen_configmap(ns, "proxy")
%{
configmap: configmap,
deployment: deployment,
service: service
}
end
defp gen_configmap(ns, name) do
%{
"apiVersion" => "v1",
"kind" => "ConfigMap",
"metadata" => %{
"namespace" => ns,
"name" => "proxy-cm"
},
"data" => %{
"PROXY_APP_NAME" => name,
"PROXY_CLUSTER_POLLING" => "3000",
"PROXY_CLUSTER_STRATEGY" => "kubernetes-dns",
"PROXY_HEADLESS_SERVICE" => "proxy-headless-svc",
"PROXY_HEARTBEAT_INTERVAL" => "240000",
"PROXY_HTTP_PORT" => "9001",
"PROXY_PORT" => "9000",
"PROXY_ROOT_TEMPLATE_PATH" => "/home/app",
"PROXY_UDS_ADDRESS" => "/var/run/eigr/functions.sock",
"PROXY_UDS_MODE" => "false",
"USER_FUNCTION_HOST" => "127.0.0.1",
"USER_FUNCTION_PORT" => "8080"
}
}
end
defp gen_service(ns, name) do
%{
"apiVersion" => "v1",
"kind" => "Service",
"metadata" => %{
"name" => "proxy-headless-svc",
"namespace" => ns,
"labels" => %{"svc-cluster-name" => "svc-proxy"}
},
"spec" => %{
"clusterIP" => "None",
"selector" => %{"cluster-name" => "proxy"},
"ports" => [
%{"port" => 4369, "name" => "epmd"},
%{"port" => 9000, "name" => "proxy"},
%{"port" => 9001, "name" => "http"}
]
}
}
end
defp gen_deployment(ns \\ "default", name, replicas \\ 1, containers) do
container = List.first(containers)
image = container["image"]
%{
"apiVersion" => "apps/v1",
"kind" => "StatefulSet",
"metadata" => %{
"name" => name,
"namespace" => ns,
"labels" => %{"app" => name, "cluster-name" => "proxy"}
},
"spec" => %{
"selector" => %{
"matchLabels" => %{"app" => name, "cluster-name" => "proxy"}
},
"serviceName" => "proxy-headless-svc",
"replicas" => replicas,
"template" => %{
"metadata" => %{
"annotations" => %{
"prometheus.io/port" => "9001",
"prometheus.io/scrape" => "true"
},
"labels" => %{"app" => name, "cluster-name" => "proxy"}
},
"spec" => %{
"containers" => [
%{
"name" => "massa-proxy",
"image" => "docker.io/eigr/massa-proxy:0.1.31",
"env" => [
%{
"name" => "PROXY_POD_IP",
"value" => "6eycE1E/S341t4Bcto262ffyFWklCWHQIKloJDJYR7Y="
},
%{
"name" => "PROXY_POD_IP",
"valueFrom" => %{"fieldRef" => %{"fieldPath" => "status.podIP"}}
}
],
"ports" => [
%{"containerPort" => 9000},
%{"containerPort" => 9001},
%{"containerPort" => 4369}
],
"livenessProbe" => %{
"failureThreshold" => 10,
"httpGet" => %{
"path" => "/health",
"port" => 9001,
"scheme" => "HTTP"
},
"initialDelaySeconds" => 300,
"periodSeconds" => 3600,
"successThreshold" => 1,
"timeoutSeconds" => 1200
},
"resources" => %{
"limits" => %{
"memory" => "1024Mi"
},
"requests" => %{
"memory" => "70Mi"
}
},
"envFrom" => [
%{
"configMapRef" => %{"name" => "proxy-cm"}
}
]
},
%{
"name" => "user-function",
"image" => image,
"ports" => [
%{"containerPort" => 8080}
]
}
]
}
}
}
}
end
defp run(%K8s.Operation{} = op),
do: K8s.Client.run(op, Bonny.Config.cluster_name())
defp track_event(type, resource),
do: Logger.info("#{type}: #{inspect(resource)}")
end
|
lib/permastate_operator/controllers/v1/action.ex
| 0.732209 | 0.66236 |
action.ex
|
starcoder
|
defmodule Square.CustomerSegments do
@moduledoc """
Documentation for `Square.Segments`.
"""
@doc """
Retrieves the list of customer segments of a business.
```
def list_customer_segments(client, [cursor: nil])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `cursor` | `String` | Query, Optional | A pagination cursor returned by previous calls to __ListCustomerSegments__.<br>Used to retrieve the next set of query results.<br><br>See the [Pagination guide](https://developer.squareup.com/docs/docs/working-with-apis/pagination) for more information. |
### Response Type
[`List Customer Segments Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/list-customer-segments-response.md)
### Example Usage
iex> Square.client |> Square.CustomerSegments.list_customer_segments()
"""
@spec list_customer_segments(Tesla.Client.t(), list) :: {:error, any} | {:ok, Tesla.Env.t()}
def list_customer_segments(client, params \\ []),
do: Tesla.get(client, "customers/segments", query: params)
@doc """
Retrieves a specific customer segment as identified by the `segment_id` value.
```
def retrieve_customer_segment(segment_id:)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `segment_id` | `String` | Template, Required | The Square-issued ID of the customer segment. |
### Response Type
[`Retrieve Customer Segment Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/retrieve-customer-segment-response.md)
### Example Usage
iex> segment_id = "segment_id4"
iex> Square.client |> Square.CustomerSegments.retrieve_customer_segment(segment_id)
"""
@spec retrieve_customer_segment(Tesla.Client.t(), binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def retrieve_customer_segment(client, segment_id),
do: Tesla.get(client, "customers/segments/#{segment_id}")
end
|
lib/api/customer_segments_api.ex
| 0.906528 | 0.78083 |
customer_segments_api.ex
|
starcoder
|
defmodule Scidata.CIFAR10 do
@moduledoc """
Module for downloading the [CIFAR10 dataset](https://www.cs.toronto.edu/~kriz/cifar.html).
"""
require Scidata.Utils
alias Scidata.Utils
@base_url "https://www.cs.toronto.edu/~kriz/"
@dataset_file "cifar-10-binary.tar.gz"
@train_images_shape {50000, 3, 32, 32}
@train_labels_shape {50000}
@test_images_shape {10000, 3, 32, 32}
@test_labels_shape {10000}
@doc """
Downloads the CIFAR10 training dataset or fetches it locally.
## Options
* `:transform_images` - A function that transforms images, defaults to
`& &1`.
It accepts a tuple like `{binary_data, tensor_type, data_shape}` which
can be used for converting the `binary_data` to a tensor with a function
like:
fn {labels_binary, type, _shape} ->
labels_binary
|> Nx.from_binary(type)
|> Nx.new_axis(-1)
|> Nx.equal(Nx.tensor(Enum.to_list(0..9)))
|> Nx.to_batched_list(32)
end
* `:transform_labels` - similar to `:transform_images` but applied to
dataset labels
## Examples
iex> Scidata.CIFAR10.download()
{{<<59, 43, 50, 68, 98, 119, 139, 145, 149, 149, 131, 125, 142, 144, 137, 129,
137, 134, 124, 139, 139, 133, 136, 139, 152, 163, 168, 159, 158, 158, 152,
148, 16, 0, 18, 51, 88, 120, 128, 127, 126, 116, 106, 101, 105, 113, 109,
112, ...>>, {:u, 8}, {50000, 3, 32, 32}},
{<<6, 9, 9, 4, 1, 1, 2, 7, 8, 3, 4, 7, 7, 2, 9, 9, 9, 3, 2, 6, 4, 3, 6, 6, 2,
6, 3, 5, 4, 0, 0, 9, 1, 3, 4, 0, 3, 7, 3, 3, 5, 2, 2, 7, 1, 1, 1, ...>>,
{:u, 8}, {50000}}}
"""
def download(opts \\ []) do
download_dataset(:train, opts)
end
@doc """
Downloads the CIFAR10 test dataset or fetches it locally.
Accepts the same options as `download/1`.
"""
def download_test(opts \\ []) do
download_dataset(:test, opts)
end
defp parse_images(content) do
for <<example::size(3073)-binary <- content>>, reduce: {<<>>, <<>>} do
{images, labels} ->
<<label::size(8)-bitstring, image::size(3072)-binary>> = example
{images <> image, labels <> label}
end
end
defp download_dataset(dataset_type, opts) do
transform_images = opts[:transform_images] || (& &1)
transform_labels = opts[:transform_labels] || (& &1)
files = Utils.get!(@base_url <> @dataset_file).body
{imgs, labels} =
files
|> Enum.filter(fn {fname, _} ->
String.match?(
List.to_string(fname),
case dataset_type do
:train -> ~r/data_batch/
:test -> ~r/test_batch/
end
)
end)
|> Enum.map(fn {_, content} -> Task.async(fn -> parse_images(content) end) end)
|> Enum.map(&Task.await(&1, :infinity))
|> Enum.reduce({<<>>, <<>>}, fn {image, label}, {image_acc, label_acc} ->
{image_acc <> image, label_acc <> label}
end)
{transform_images.(
{imgs, {:u, 8},
if(dataset_type == :test, do: @test_images_shape, else: @train_images_shape)}
),
transform_labels.(
{labels, {:u, 8},
if(dataset_type == :test, do: @test_labels_shape, else: @train_labels_shape)}
)}
end
end
|
lib/scidata/cifar10.ex
| 0.848628 | 0.762026 |
cifar10.ex
|
starcoder
|
defmodule Day3 do
@moduledoc """
Documentation for `Day3`.
"""
@doc """
Hello world.
## Examples
iex> Day3.test()
The answers are 253 and 1485
"""
def run() do
read_data()
|> solve()
end
def test(n \\ 5) do
{number_of_bits, _number_of_diagnostics, reports} = read_data()
{number_of_bits, n, Enum.take(reports, n)}
|> IO.inspect(label: "test data")
|> solve()
end
def example() do
number_of_bits = 5
number_of_reports = 12
reports = [0b00100, 0b11110, 0b10110, 0b10111, 0b10101, 0b01111, 0b00111, 0b11100, 0b10000, 0b11001, 0b00010, 0b01010]
{number_of_bits, number_of_reports, reports}
|> solve()
end
def read_data() do
lines = File.read!("input.txt")
|> String.trim()
|> String.split("\n")
number_of_bits = String.length(hd(lines))
reports = Enum.map(lines, fn a -> String.to_integer(a,2) end)
{number_of_bits, length(reports), reports}
end
def solve(inhalt) do
ans1 = solve_problem_1(inhalt)
ans2 = solve_problem_2(inhalt)
IO.puts("The answers are #{ans1} and #{ans2}")
end
def solve_problem_1({w,h,d} = _inhalt), do: solve_problem_1(w, h, d, 0, "", "")
def solve_problem_1(width, _height, _list, stage, partial_gamma, partial_epsilon) when width == stage do
# IO.inspect({partial_gamma, partial_epsilon, stage}, label: "problem 1")
String.to_integer(partial_gamma, 2) * String.to_integer(partial_epsilon, 2)
end
def solve_problem_1(width, height, list, stage, partial_gamma, partial_epsilon) when width > stage do
# IO.inspect({partial_gamma, partial_epsilon, stage}, label: "problem 1")
count_1s = Enum.reduce(list, 0, fn diagnostic, count -> count + rem(diagnostic, 2) end)
count_0s = height - count_1s
# IO.inspect({height, count_1s, count_0s}, label: "counts")
new_list = Enum.map(list, fn diagnostic -> div(diagnostic, 2) end)
if count_1s == count_0s do IO.inspect({count_1s, count_0s, stage}, label: "counts are the same") end
new_digit_g = if count_1s > count_0s do "1" else "0" end
new_digit_e = if count_1s > count_0s do "0" else "1" end
solve_problem_1(width, height, new_list, stage + 1, new_digit_g <> partial_gamma, new_digit_e <> partial_epsilon)
end
def solve_problem_2({w,_h,d} = _inhalt), do: generator_rating(d, pow(2, w-1)) * scrubber_rating(d, pow(2, w-1))
def pow(b,e) do
div(Enum.reduce(0..e, 1, fn _v, a -> a*b end), b)
end
def generator_rating(d, m), do: search(:more, d, m)
def scrubber_rating(d, m), do: search(:less, d, m)
def search(_criterion, d, _m) when length(d) == 1, do: hd(d)
def search(criterion, d, m) do
list_of_1s = Enum.filter(d, fn datum -> rem(div(datum, m), 2) == 1 end)
list_of_0s = Enum.filter(d, fn datum -> rem(div(datum, m), 2) == 0 end)
new_list = select(criterion, list_of_1s, list_of_0s)
search(criterion, new_list, div(m,2))
end
def select(:more, l1s, l0s) do
if length(l1s) >= length(l0s) do
l1s
else
l0s
end
end
def select(:less, l1s, l0s) do
# IO.inspect({l0s, l1s}, label: "select :less")
if length(l0s) <= length(l1s) do
l0s
else
l1s
end
end
end
|
apps/day3/lib/day3.ex
| 0.812607 | 0.595845 |
day3.ex
|
starcoder
|
defmodule Wobserver.Util.Helper do
@moduledoc ~S"""
Helper functions and JSON encoders.
"""
alias Poison.Encoder
alias Encoder.BitString
defimpl Encoder, for: PID do
@doc ~S"""
JSON encodes a `PID`.
Uses `inspect/1` to turn the `pid` into a String and passes the `options` to `BitString.encode/1`.
"""
@spec encode(pid :: pid, options :: any) :: String.t
def encode(pid, options) do
pid
|> inspect
|> BitString.encode(options)
end
end
defimpl Encoder, for: Port do
@doc ~S"""
JSON encodes a `Port`.
Uses `inspect/1` to turn the `port` into a String and passes the `options` to `BitString.encode/1`.
"""
@spec encode(port :: port, options :: any) :: String.t
def encode(port, options) do
port
|> inspect
|> BitString.encode(options)
end
end
@doc ~S"""
Converts Strings to module names or atoms.
The given `module` string will be turned into atoms that get concatted.
"""
@spec string_to_module(module :: String.t) :: atom
def string_to_module(module) do
first_letter = String.first(module)
case String.capitalize(first_letter) do
^first_letter ->
module
|> String.split(".")
|> Enum.map(&String.to_atom/1)
|> Module.concat
_ ->
module
|> String.to_atom
end
end
@doc ~S"""
Formats function information as readable string.
Only name will be return if only `name` is given.
Example:
```bash
iex> format_function {Logger, :log, 2}
"Logger.log/2"
```
```bash
iex> format_function :format_function
"format_function"
```
```bash
iex> format_function nil
nil
```
"""
@spec format_function(nil | {atom, atom, integer} | atom) :: String.t | nil
def format_function(nil), do: nil
def format_function({module, name, arity}), do: "#{module}.#{name}/#{arity}"
def format_function(name), do: "#{name}"
@doc ~S"""
Parallel map implemented with `Task`.
Maps the `function` over the `enum` using `Task.async/1` and `Task.await/1`.
"""
@spec parallel_map(enum :: list, function :: fun) :: list
def parallel_map(enum, function) do
enum
|> Enum.map(&(Task.async(fn -> function.(&1) end)))
|> Enum.map(&Task.await/1)
end
end
|
lib/wobserver/util/helper.ex
| 0.842086 | 0.793986 |
helper.ex
|
starcoder
|
defmodule Extracker do
@moduledoc """
A fast & scaleable BitTorrent tracker.
"""
@doc """
Set the duration that an announced peer will be kept in the system.
"""
def set_interval(interval) do
Redix.command!(:redix, ["SET", "interval", interval])
:ok
end
@doc """
Announce a peer to the tracker.
"""
def announce(info_hash, peer_id, address, stats, opts \\ [])
def announce(
hash,
id,
{{a, b, c, d}, port},
{ul, dl, left},
opts
) do
validate_info_hash!(hash)
validate_peer_id!(id)
validate_ip_address!({{a, b, c, d}, port})
validate_byte_count!(ul)
validate_byte_count!(dl)
validate_byte_count!(left)
event = Keyword.get(opts, :event, :interval)
numwant = Keyword.get(opts, :numwant, 50)
peer_id = Base.encode16(id, case: :lower)
info_hash = Base.encode16(hash, case: :lower)
now_iso8601 = DateTime.utc_now() |> DateTime.to_iso8601()
config_queries = [
["GET", "interval"]
]
peer_data_queries = [
["SADD", "torrents", info_hash],
["SET", "peer:#{peer_id}:address", "#{:inet.ntoa({a, b, c, d})}:#{port}"],
["SET", "peer:#{peer_id}:last_contacted", now_iso8601]
]
peer_state_queries =
case event do
:interval ->
[]
:completed ->
[
["INCR", "torrent:#{info_hash}:downloaded"],
["SADD", "torrent:#{info_hash}:complete-peers", peer_id],
["SREM", "torrent:#{info_hash}:incomplete-peers", peer_id],
[
"SUNIONSTORE",
"torrent:#{info_hash}:peers",
"torrent:#{info_hash}:incomplete-peers",
"torrent:#{info_hash}:complete-peers"
]
]
:started ->
[
["SADD", "torrent:#{info_hash}:incomplete-peers", peer_id],
["SREM", "torrent:#{info_hash}:complete-peers", peer_id],
[
"SUNIONSTORE",
"torrent:#{info_hash}:peers",
"torrent:#{info_hash}:incomplete-peers",
"torrent:#{info_hash}:complete-peers"
]
]
:stopped ->
[
["SREM", "torrent:#{info_hash}:complete-peers", peer_id],
["SREM", "torrent:#{info_hash}:incomplete-peers", peer_id],
[
"SUNIONSTORE",
"torrent:#{info_hash}:peers",
"torrent:#{info_hash}:incomplete-peers",
"torrent:#{info_hash}:complete-peers"
]
]
end
peer_list_queries = [
["SCARD", "torrent:#{info_hash}:complete-peers"],
["SCARD", "torrent:#{info_hash}:incomplete-peers"],
["SRANDMEMBER", "torrent:#{info_hash}:peers", numwant]
]
redis_results =
Redix.pipeline!(
:redix,
config_queries ++ peer_data_queries ++ peer_state_queries ++ peer_list_queries
)
ids = List.last(redis_results)
address_requests =
Enum.map(ids, fn id_i ->
["GET", "peer:#{id_i}:address"]
end)
addresses =
if Enum.empty?(address_requests) do
[]
else
Redix.pipeline!(:redix, address_requests)
end
peers =
Enum.zip(ids, addresses)
|> Enum.map(fn {id, address} ->
[host_str, port_str] = String.split(address, ":", limit: 2)
{:ok, ip} = :inet.parse_address(String.to_charlist(host_str))
port = String.to_integer(port_str)
%{
peer_id: Base.decode16!(id, case: :lower),
ip: ip,
port: port
}
end)
interval = List.first(redis_results) |> String.to_integer()
complete_count = Enum.at(redis_results, -3)
incomplete_count = Enum.at(redis_results, -2)
{:ok,
%{complete: complete_count, incomplete: incomplete_count, interval: interval, peers: peers}}
end
def announce(_, _, _, _, _) do
{:error, "invalid request"}
end
defp validate_info_hash!(info_hash) do
unless is_binary(info_hash) and byte_size(info_hash) == 20 do
raise "invalid info hash"
end
end
defp validate_peer_id!(peer_id) do
unless is_binary(peer_id) and byte_size(peer_id) == 20 do
raise "invalid peer ID"
end
end
defp validate_ip_address!({{a, b, c, d}, port}) do
unless a in 0..255 and b in 0..255 and c in 0..255 and d in 0..255 and port in 0..65_535 do
raise "invalid IP address"
end
end
defp validate_byte_count!(count) do
unless is_number(count) and count >= 0 do
raise "invalid byte count"
end
end
@doc """
Get complete, incomplete, and all-time-downloaded counts for a torrent.
"""
def scrape(info_hash) do
validate_info_hash!(info_hash)
info_hash = Base.encode16(info_hash, case: :lower)
results =
Redix.pipeline!(:redix, [
["SCARD", "torrent:#{info_hash}:complete-peers"],
["SCARD", "torrent:#{info_hash}:incomplete-peers"],
["GET", "torrent:#{info_hash}:downloaded"]
])
downloaded =
if dl = Enum.at(results, 2) do
String.to_integer(dl)
else
0
end
{:ok,
%{
complete: Enum.at(results, 0),
incomplete: Enum.at(results, 1),
downloaded: downloaded
}}
end
@doc """
Delete all information relevant to a torrent.
"""
def drop(info_hash) do
validate_info_hash!(info_hash)
info_hash = Base.encode16(info_hash, case: :lower)
delete_commands =
Redix.pipeline!(:redix, [
["SMEMBERS", "torrent:#{info_hash}:peers"],
["SREM", "torrents", info_hash],
["DEL", "torrent:#{info_hash}:downloaded"],
["DEL", "torrent:#{info_hash}:complete-peers"],
["DEL", "torrent:#{info_hash}:incomplete-peers"],
["DEL", "torrent:#{info_hash}:peers"]
])
|> List.first()
|> Enum.flat_map(fn peer_id ->
[
["DEL", "peer:#{peer_id}:address"],
["DEL", "peer:#{peer_id}:last_contacted"]
]
end)
if Enum.any?(delete_commands) do
Redix.pipeline!(:redix, delete_commands)
end
:ok
end
@doc """
Remove all expired peers from the server.
"""
def clean(ttl) do
info_hashes = Redix.command!(:redix, ["SMEMBERS", "torrents"])
peer_ids_for_hash =
if Enum.any?(info_hashes) do
Redix.pipeline!(:redix, Enum.map(info_hashes, &["SMEMBERS", "torrent:#{&1}:peers"]))
else
[]
end
peer_ids = Enum.concat(peer_ids_for_hash)
peer_last_contacted_dates =
if Enum.any?(peer_ids) do
Redix.pipeline!(:redix, Enum.map(peer_ids, &["GET", "peer:#{&1}:last_contacted"]))
else
[]
end
|> Enum.map(fn last_contacted ->
if last_contacted do
{:ok, timestamp, _offset} = DateTime.from_iso8601(last_contacted)
timestamp
else
nil
end
end)
timestamps_for_peers = Enum.zip(peer_ids, peer_last_contacted_dates) |> Map.new()
report =
Enum.zip(info_hashes, peer_ids_for_hash)
|> Map.new(fn {info_hash, peer_ids} ->
peer_timestamps = Map.take(timestamps_for_peers, peer_ids)
{info_hash, peer_timestamps}
end)
now = DateTime.utc_now()
expired_peers_by_hash =
Map.new(report, fn {info_hash, peer_timestamps} ->
expired_peers_for_hash =
Enum.filter(peer_timestamps, fn {_peer_id, timestamp} ->
is_nil(timestamp) or not active?(timestamp, now, ttl)
end)
|> Map.new()
|> Map.keys()
{info_hash, expired_peers_for_hash}
end)
|> Enum.filter(fn {_info_hash, expired_peers} ->
Enum.any?(expired_peers)
end)
drops =
Enum.flat_map(expired_peers_by_hash, fn {info_hash, peers} ->
set_drops = [
["SREM", "torrent:#{info_hash}:complete-peers"] ++ peers,
["SREM", "torrent:#{info_hash}:incomplete-peers"] ++ peers
]
peer_drops =
Enum.flat_map(peers, fn peer_id ->
[
["DEL", "peer:#{peer_id}:address"],
["DEL", "peer:#{peer_id}:last_contacted"]
]
end)
set_drops ++ peer_drops
end)
if Enum.any?(drops) do
Redix.pipeline!(:redix, drops)
end
:ok
end
defp active?(timestamp, now, ttl) do
expiration = DateTime.add(timestamp, ttl, :second)
DateTime.compare(now, expiration) in [:lt, :eq]
end
@doc """
Get the number of torrents the server knows about.
"""
def count_torrents do
Redix.command!(:redix, ["SCARD", "torrents"])
end
@doc """
Get the number of peers the server knows about.
"""
def count_peers do
count_commands =
Redix.command!(:redix, ["SMEMBERS", "torrents"])
|> Enum.map(&["SCARD", "torrent:#{&1}:peers"])
if Enum.any?(count_commands) do
Redix.pipeline!(:redix, count_commands)
|> Enum.sum()
else
0
end
end
end
|
lib/extracker.ex
| 0.674587 | 0.451327 |
extracker.ex
|
starcoder
|
defmodule ExJenga.KYC do
@moduledoc """
KYC enabales quering the various registrar of persons in the various countries in East Africa.
Visit https://developer.jengaapi.io/reference#identity-verification to see more details
"""
import ExJenga.JengaBase
alias ExJenga.Signature
@doc """
Makes a request to retrieve KYC details for a given customer
## Example
iex> ExJenga.KYC.request(%{identity: %{documentType: "ALIENID", firstName: "John", lastName: "Doe", dateOfBirth: "1970-01-30", documentNumber: "654321", countryCode: "KE"}})
{:ok, %{"identity" => %{"customer" => %{"fullName" => "<NAME> ", "firstName" => "John", "middlename" => "", "lastName" => "Doe", "ShortName" => "John", "birthDate" => "1900-01-01T00:00:00", "birthCityName" => "", "deathDate" => "", "gender" => "", "faceImage" => "/9j/4AAQSkZJRgABAAEAYABgA+H8qr6n4e1O71SGFbV/sEOF3O6/N/eb71d/FGkaBVXaq9KfRRRRRUMsKSIdyr0r/9k=", "occupation" => "", "nationality" => "Refugee"}, "documentType" => "ALIEN ID", "documentNumber" => "654321", "documentSerialNumber" => "100500425", "documentIssueDate" => "2002-11-29T12:00:00", "documentExpirationDate" => "2004-11-28T12:00:00", "IssuedBy" => "REPUBLIC OF KENYA", "additionalIdentityDetails" => [%{"documentNumber" => "", "documentType" => "", "issuedBy" => ""}], "address" => %{"provinceName" => " ", "districtName" => "", "locationName" => "", "subLocationName" => "", "villageName" => ""}}}}
"""
@spec request(%{
identity: %{
documentType: String.t(),
firstName: String.t(),
lastName: String.t(),
dateOfBirth: String.t(),
documentNumber: String.t(),
countryCode: String.t()
}
}) :: {:error, any()} | {:ok, any()}
def request(
%{
identity: %{
documentType: _documentType,
firstName: _firstName,
lastName: _lastName,
dateOfBirth: _dateOfBirth,
documentNumber: documentNumber,
countryCode: countryCode
}
} = requestBody
) do
message = "#{merchant_code()}#{documentNumber}#{countryCode}"
make_request("/customer/v2/identity/verify", requestBody, [
{"signature", Signature.sign(message)}
])
end
def request(_), do: {:error, "Required Parameters missing, check your request body"}
defp merchant_code do
Application.get_env(:ex_jenga, :jenga)[:username]
end
end
|
lib/ex_jenga/kyc/kyc.ex
| 0.645455 | 0.429489 |
kyc.ex
|
starcoder
|
defmodule Ash.Engine do
@moduledoc """
The Ash engine handles the parallelization/running of requests to Ash.
Much of the complexity of this doesn't come into play for simple requests.
The way it works is that it accepts a list of `Ash.Engine.Request` structs.
Some of values on those structs will be instances of `Ash.Engine.Request.UnresolvedField`.
These unresolved fields can express a dependence on the field values from other requests.
This allows the engine to wait on executing some code until it has its required inputs,
or if all of its dependencies are met, it can execute it immediately. The engine's job is
to resolve its unresolved fields in the proper order, potentially in parallel.
It also has knowledge baked in about certain special fields, like `data` which is the
field we are ultimately trying to resolve, and `query` which is the field that drives authorization
for read requests. Authorization is done on a *per engine request* basis.
As the complexity of a system grows, it becomes very difficult to write code that
is both imperative and performant. This is especially true of a framework that is
designed to be configurable. What exactly is done, as well as the order it is done in,
and wether or not is can be parallelized, varies wildly based on factors like how
the resources are configured and what capabilities the datalayer has. By implementing
a generic "parallel engine", we can let the engine solve for the optimization. We simply
have to express the various operations that must happen, and what other pieces of data
they need in order to happen, and the engine handles the rest.
Eventually, this module may (potentially) be used more explicitly, as a way to construct
"sagas" or "multis" which represent a series of resource actions with linked up inputs.
If each of those resource actions can be broken into its component requests, and the full
set of requests can be processed, we can compose large series' of resource actions without
having to figure out the most optimal way to do it. They will be done as fast as possible.
But we have a long way to go before we get there.
Check out the docs for `Ash.Engine.Request` for some more information. This is a private
interface at the moment, though, so this documentation is just here to explain how it works
it is not intended to give you enough information to use the engine directly.
"""
use GenServer
defstruct [
:api,
:requests,
:verbose?,
:actor,
:authorize?,
:changeset,
:runner_pid,
:local_requests?,
:runner_ref,
request_handlers: %{},
active_requests: [],
completed_requests: [],
errored_requests: [],
data: %{},
errors: []
]
alias Ash.Engine.{Request, Runner}
require Logger
def run(request, api, opts \\ [])
def run([], _api, _opts), do: {:error, :no_requests_provided}
def run(requests, api, opts) do
authorize? = opts[:authorize?]
actor = opts[:actor]
opts = Keyword.put(opts, :callers, [self() | Process.get(:"$callers", [])])
# If the requests are invalid, this is a framework level error
Request.validate_requests!(requests)
requests =
Enum.map(requests, fn request ->
request = %{
request
| authorize?: request.authorize? and authorize?,
actor: actor,
verbose?: opts[:verbose?]
}
Request.add_initial_authorizer_state(request)
end)
runner_ref = make_ref()
transaction_result =
maybe_transact(opts, requests, fn innermost_resource ->
{local_requests, async_requests} = split_local_async_requests(requests)
opts =
opts
|> Keyword.put(:runner_ref, runner_ref)
|> Keyword.put(:requests, async_requests)
|> Keyword.put(:local_requests?, !Enum.empty?(local_requests))
|> Keyword.put(:runner_pid, self())
|> Keyword.put(:api, api)
run_requests(async_requests, local_requests, opts, innermost_resource)
end)
case transaction_result do
{:ok, %{errors: [], resource_notifications: resource_notifications} = result} ->
unsent = Ash.Notifier.notify(resource_notifications)
{:ok, %{result | resource_notifications: unsent}}
{:error, runner} ->
{:error, runner}
end
end
defp run_requests(async_requests, local_requests, opts, innermost_resource) do
if async_requests == [] do
run_and_return_or_rollback(local_requests, opts, innermost_resource)
else
Process.flag(:trap_exit, true)
{:ok, pid} = GenServer.start(__MODULE__, opts)
_ = Process.monitor(pid)
receive do
{:pid_info, pid_info} ->
run_and_return_or_rollback(
local_requests,
opts,
innermost_resource,
pid,
pid_info
)
end
end
end
defp run_and_return_or_rollback(
local_requests,
opts,
innermost_resource,
pid \\ nil,
pid_info \\ %{}
) do
case Runner.run(local_requests, opts[:verbose?], opts[:runner_ref], pid, pid_info) do
%{errors: errors} = runner when errors == [] ->
{:ok, runner}
runner ->
rollback_or_return(innermost_resource, runner)
end
end
defp rollback_or_return(innermost_resource, runner) do
if innermost_resource do
Ash.DataLayer.rollback(innermost_resource, runner)
else
{:error, runner}
end
end
defp maybe_transact(opts, requests, func) do
if opts[:transaction?] do
requests
|> Enum.map(& &1.resource)
|> Enum.uniq()
|> Enum.filter(&Ash.DataLayer.data_layer_can?(&1, :transact))
|> do_in_transaction(func)
else
func.(nil)
end
end
defp do_in_transaction(resources, func, innnermost \\ nil)
defp do_in_transaction([], func, innermost_resource) do
func.(innermost_resource)
end
defp do_in_transaction([resource | rest], func, _innermost) do
Ash.DataLayer.transaction(resource, fn ->
case do_in_transaction(rest, func, resource) do
{:ok, value} ->
value
{:error, error} ->
Ash.DataLayer.rollback(resource, error)
end
end)
end
def init(opts) do
Process.put(:"$callers", opts[:callers])
state =
%__MODULE__{
requests: opts[:requests],
active_requests: Enum.map(opts[:requests], & &1.path),
runner_pid: opts[:runner_pid],
local_requests?: opts[:local_requests?],
verbose?: opts[:verbose?] || false,
api: opts[:api],
actor: opts[:actor],
runner_ref: opts[:runner_ref],
authorize?: opts[:authorize?] || false
}
|> log_engine_init()
{:ok, state, {:continue, :spawn_requests}}
end
def handle_continue(:spawn_requests, state) do
log(state, fn -> "Spawning request processes" end, :debug)
new_state =
Enum.reduce(state.requests, state, fn request, state ->
{:ok, pid} =
GenServer.start(Ash.Engine.RequestHandler,
callers: [self() | Process.get("$callers", [])],
request: request,
verbose?: state.verbose?,
actor?: state.actor,
runner_ref: state.runner_ref,
authorize?: state.authorize?,
engine_pid: self(),
runner_pid: state.runner_pid
)
Process.monitor(pid)
%{
state
| request_handlers: Map.put(state.request_handlers, pid, request.path)
}
end)
pid_info =
Enum.into(new_state.request_handlers, %{}, fn {pid, path} ->
{path, pid}
end)
if new_state.local_requests? do
send(new_state.runner_pid, {:pid_info, pid_info})
end
Enum.each(new_state.request_handlers, fn {pid, _} ->
send(pid, {:pid_info, pid_info})
end)
{:noreply, new_state}
end
def handle_cast(
{:register_dependency, receiver_path, request_handler_pid, dependency},
state
) do
path = :lists.droplast(dependency)
field = List.last(dependency)
case get_request(state, path) do
{:error, _pid, request} ->
case Map.get(request, field) do
%Request.UnresolvedField{} ->
log(state, "#{receiver_path} won't receive #{inspect(request.path)} #{field}")
send_or_cast(
request_handler_pid,
state.runner_pid,
state.runner_ref,
{:wont_receive, receiver_path, request.path, field}
)
value ->
log(
state,
"Already have #{receiver_path} #{inspect(request.path)} #{field}, sending value"
)
send_or_cast(
request_handler_pid,
state.runner_pid,
state.runner_ref,
{:field_value, receiver_path, request.path, field, value}
)
end
{:noreply, state}
_other ->
{:noreply, state}
end
end
def handle_cast(:log_stuck_report, state) do
state.request_handlers
|> Map.keys()
|> Enum.each(&GenServer.cast(&1, :log_stuck_report))
{:noreply, state}
end
def handle_cast({:local_requests_failed, _error}, state) do
{:stop, {:shutdown, state}, state}
end
def handle_cast({:complete, path}, state) do
state
|> move_to_complete(path)
|> maybe_shutdown()
end
def handle_cast(:local_requests_complete, state) do
%{state | local_requests?: false}
|> maybe_shutdown()
end
def handle_cast({:error, error, request_handler_state}, state) do
state
|> log(fn -> "Error received from request_handler #{inspect(error)}" end)
|> move_to_error(request_handler_state.request.path)
|> add_error(request_handler_state.request.path, error)
|> maybe_shutdown()
end
def handle_info({:EXIT, _pid, {:shutdown, {:error, error, request_handler_state}}}, state) do
state
|> log(fn -> "Error received from request_handler #{inspect(error)}" end)
|> move_to_error(request_handler_state.request.path)
|> add_error(request_handler_state.request.path, error)
|> maybe_shutdown()
end
def handle_info({:DOWN, _, _, _pid, {:error, error, %Request{} = request}}, state) do
state
|> log(fn -> "Request exited in failure #{request.name}: #{inspect(error)}" end)
|> move_to_error(request.path)
|> add_error(request.path, error)
|> maybe_shutdown()
end
def handle_info({:DOWN, _, _, pid, reason}, state) do
{_state, _pid, request} = get_request(state, pid)
state
|> log(fn -> "Request exited in failure #{request.name}: #{inspect(reason)}" end)
|> move_to_error(request.path)
|> add_error(request.path, reason)
|> maybe_shutdown()
end
defp send_or_cast(request_handler_pid, runner_pid, runner_ref, message) do
if request_handler_pid == runner_pid do
send(request_handler_pid, {runner_ref, message})
else
GenServer.cast(request_handler_pid, message)
end
end
defp get_request(state, pid) when is_pid(pid) do
path = Map.get(state.request_handlers, pid)
get_request(state, path, pid)
end
defp get_request(state, path, pid \\ nil) do
case get_status(state, path) do
nil ->
nil
status ->
pid = pid || get_pid(state, path)
{status, pid, Enum.find(state.requests, &(&1.path == path))}
end
end
defp get_status(state, path) do
cond do
path in state.active_requests -> :active
path in state.completed_requests -> :complete
path in state.errored_requests -> :error
true -> nil
end
end
defp get_pid(state, path) do
Enum.find_value(state.request_handlers, fn {pid, request_path} ->
if request_path == path do
pid
end
end) || state.runner_pid
end
def put_nested_key(state, [key], value) do
Map.put(state, key, value)
end
def put_nested_key(state, [key | rest], value) do
case Map.fetch(state, key) do
{:ok, nested_state} when is_map(nested_state) ->
Map.put(state, key, put_nested_key(nested_state, rest, value))
:error ->
Map.put(state, key, put_nested_key(%{}, rest, value))
end
end
def put_nested_key(state, key, value) do
Map.put(state, key, value)
end
def fetch_nested_value(state, [key]) when is_map(state) do
Map.fetch(state, key)
end
def fetch_nested_value(%Request.UnresolvedField{}, _), do: :error
def fetch_nested_value(state, [key | rest]) when is_map(state) do
case Map.fetch(state, key) do
{:ok, value} -> fetch_nested_value(value, rest)
:error -> :error
end
end
def fetch_nested_value(state, key) when is_map(state) do
Map.fetch(state, key)
end
defp split_local_async_requests(requests) do
if Enum.any?(requests, fn request ->
Ash.DataLayer.data_layer_can?(request.resource, :transact) &&
Ash.DataLayer.in_transaction?(request.resource)
end) do
{requests, []}
else
{local, async} = Enum.split_with(requests, &must_be_local?/1)
case {local, async} do
{[], [first_async | rest]} ->
{[first_async], rest}
{local, async} ->
{local, async}
end
end
end
defp must_be_local?(request) do
not request.async? ||
not Ash.DataLayer.data_layer_can?(request.resource, :async_engine)
end
defp maybe_shutdown(%{active_requests: [], local_requests?: false} = state) do
log(state, fn -> "shutting down, completion criteria reached" end)
{:stop, {:shutdown, state}, state}
end
defp maybe_shutdown(state) do
{:noreply, state}
end
defp move_to_complete(state, path) do
%{
state
| completed_requests: [path | state.completed_requests],
active_requests: state.active_requests -- [path]
}
end
defp move_to_error(state, path) do
%{
state
| errored_requests: [path | state.completed_requests],
active_requests: state.active_requests -- [path]
}
end
defp log_engine_init(state) do
log(state, fn -> "Initializing Engine with #{Enum.count(state.requests)} requests." end)
end
defp log(state, message, level \\ :info)
defp log(%{verbose?: true} = state, message, level) do
Logger.log(level, fn -> ["Engine: ", message.()] end)
state
end
defp log(state, _, _) do
state
end
defp add_error(state, path, errors) when is_list(errors) do
Enum.reduce(errors, state, &add_error(&2, path, &1))
end
defp add_error(state, path, error) do
path = List.wrap(path)
error = Ash.Error.to_ash_error(error)
%{state | errors: [Map.put(error, :path, path) | state.errors]}
end
end
|
lib/ash/engine/engine.ex
| 0.819821 | 0.520253 |
engine.ex
|
starcoder
|
defmodule Earmark.Transform do
import Earmark.Helpers, only: [replace: 3]
alias Earmark.Options
@compact_tags ~w[a code em strong del]
# https://www.w3.org/TR/2011/WD-html-markup-20110113/syntax.html#void-element
@void_elements ~W(area base br col command embed hr img input keygen link meta param source track wbr)
@moduledoc """
# Transformations
## Structure Conserving Transformers
For the convenience of processing the output of `EarmarkParser.as_ast` we expose two structure conserving
mappers.
### `map_ast`
takes a function that will be called for each node of the AST, where a leaf node is either a quadruple
like `{"code", [{"class", "inline"}], ["some code"], %{}}` or a text leaf like `"some code"`
The result of the function call must be
- for nodes β a quadruple of which the third element will be ignored -- that might change in future,
and will therefore classically be `nil`. The other elements replace the node
- for strings β strings
A third parameter `ignore_strings` which defaults to `false` can be used to avoid invocation of the mapper
function for text nodes
As an example let us transform an ast to have symbol keys
iex(0)> input = [
...(0)> {"h1", [], ["Hello"], %{title: true}},
...(0)> {"ul", [], [{"li", [], ["alpha"], %{}}, {"li", [], ["beta"], %{}}], %{}}]
...(0)> map_ast(input, fn {t, a, _, m} -> {String.to_atom(t), a, nil, m} end, true)
[ {:h1, [], ["Hello"], %{title: true}},
{:ul, [], [{:li, [], ["alpha"], %{}}, {:li, [], ["beta"], %{}}], %{}} ]
**N.B.** If this returning convention is not respected `map_ast` might not complain, but the resulting
transformation might not be suitable for `Earmark.Transform.transform` anymore. From this follows that
any function passed in as value of the `postprocessor:` option must obey to these conventions.
### `map_ast_with`
this is like `map_ast` but like a reducer an accumulator can also be passed through.
For that reason the function is called with two arguments, the first element being the same value
as in `map_ast` and the second the accumulator. The return values need to be equally augmented
tuples.
A simple example, annotating traversal order in the meta map's `:count` key, as we are not
interested in text nodes we use the fourth parameter `ignore_strings` which defaults to `false`
iex(0)> input = [
...(0)> {"ul", [], [{"li", [], ["one"], %{}}, {"li", [], ["two"], %{}}], %{}},
...(0)> {"p", [], ["hello"], %{}}]
...(0)> counter = fn {t, a, _, m}, c -> {{t, a, nil, Map.put(m, :count, c)}, c+1} end
...(0)> map_ast_with(input, 0, counter, true)
{[ {"ul", [], [{"li", [], ["one"], %{count: 1}}, {"li", [], ["two"], %{count: 2}}], %{count: 0}},
{"p", [], ["hello"], %{count: 3}}], 4}
## Structure Modifying Transformers
For structure modifications a tree traversal is needed and no clear pattern of how to assist this task with
tools has emerged yet.
"""
@doc false
def transform(ast, options \\ %{initial_indent: 0, indent: 2})
def transform(ast, options) when is_list(options) do
transform(ast, options|>Enum.into(%{initial_indent: 0, indent: 2}))
end
def transform(ast, options) when is_map(options) do
options1 = options
|> Map.put_new(:indent, 2)
to_html(ast, options1)
end
@doc """
Coming soon
"""
def map_ast(ast, fun, ignore_strings \\ false) do
_walk_ast(ast, fun, ignore_strings, [])
end
@doc """
Coming soon
"""
def map_ast_with(ast, value, fun, ignore_strings \\ false) do
_walk_ast_with(ast, value, fun, ignore_strings, [])
end
defp maybe_add_newline(options)
defp maybe_add_newline(%Options{compact_output: true}), do: []
defp maybe_add_newline(_), do: ?\n
defp to_html(ast, options) do
_to_html(ast, options, Map.get(options, :initial_indent, 0))|> IO.iodata_to_binary
end
defp _to_html(ast, options, level, verbatim \\ false)
defp _to_html({:comment, _, content, _}, options, _level, _verbatim) do
["<!--", Enum.intersperse(content, ?\n), "-->", maybe_add_newline(options)]
end
defp _to_html({"code", atts, children, meta}, options, level, _verbatim) do
verbatim = meta |> Map.get(:verbatim, false)
[ open_tag("code", atts),
_to_html(children, Map.put(options, :smartypants, false), level, verbatim),
"</code>"]
end
defp _to_html({tag, atts, children, _}, options, level, verbatim) when tag in @compact_tags do
[open_tag(tag, atts),
children
|> Enum.map(&_to_html(&1, options, level, verbatim)),
"</", tag, ?>]
end
defp _to_html({tag, atts, _, _}, options, level, _verbatim) when tag in @void_elements do
[ make_indent(options, level), open_tag(tag, atts), maybe_add_newline(options) ]
end
defp _to_html(elements, options, level, verbatim) when is_list(elements) do
elements
|> Enum.map(&_to_html(&1, options, level, verbatim))
end
defp _to_html(element, options, _level, false) when is_binary(element) do
escape(element, options)
end
defp _to_html(element, options, level, true) when is_binary(element) do
[make_indent(options, level), element]
end
defp _to_html({"pre", atts, children, meta}, options, level, _verbatim) do
verbatim = meta |> Map.get(:verbatim, false)
[ make_indent(options, level),
open_tag("pre", atts),
_to_html(children, Map.put(options, :smartypants, false), level, verbatim),
"</pre>", maybe_add_newline(options)]
end
defp _to_html({tag, atts, children, meta}, options, level, _verbatim) do
verbatim = meta |> Map.get(:verbatim, false)
[ make_indent(options, level),
open_tag(tag, atts),
maybe_add_newline(options),
_to_html(children, options, level+1, verbatim),
close_tag(tag, options, level)]
end
defp close_tag(tag, options, level) do
[make_indent(options, level), "</", tag, ?>, maybe_add_newline(options)]
end
defp escape(element, options)
defp escape("", _opions) do
[]
end
@dbl1_rgx ~r{(^|[-ββ/\(\[\{"ββ\s])'}
@dbl2_rgx ~r{(^|[-ββ/\(\[\{β\s])\"}
defp escape(element, %{smartypants: true} = options) do
# Unfortunately these regexes still have to be left.
# It doesn't seem possible to make escape_to_iodata
# transform, for example, "--'" to "ββ" without
# significantly complicating the code to the point
# it outweights the performance benefit.
element =
element
|> replace(@dbl1_rgx, "\\1β")
|> replace(@dbl2_rgx, "\\1β")
escape = Map.get(options, :escape, true)
escape_to_iodata(element, 0, element, [], true, escape, 0)
end
defp escape(element, %{escape: escape}) do
escape_to_iodata(element, 0, element, [], false, escape, 0)
end
defp escape(element, _options) do
escape_to_iodata(element, 0, element, [], false, true, 0)
end
defp make_att(name_value_pair, tag)
defp make_att({name, value}, _) do
[" ", name, "=\"", value, "\""]
end
defp make_indent(options, level)
defp make_indent(%Options{compact_output: true}, _level) do
""
end
defp make_indent(%{indent: indent}, level) do
Stream.cycle([" "])
|> Enum.take(level*indent)
end
defp open_tag(tag, atts)
defp open_tag(tag, atts) when tag in @void_elements do
[?<, tag, Enum.map(atts, &make_att(&1, tag)), " />"]
end
defp open_tag(tag, atts) do
[?<, tag, Enum.map(atts, &make_att(&1, tag)), ?>]
end
# Optimized HTML escaping + smartypants, insipred by Plug.HTML
# https://github.com/elixir-plug/plug/blob/v1.11.0/lib/plug/html.ex
# Do not escape HTML entities
defp escape_to_iodata("&#x" <> rest, skip, original, acc, smartypants, escape, len) do
escape_to_iodata(rest, skip, original, acc, smartypants, escape, len + 3)
end
escapes = [
{?<, "<"},
{?>, ">"},
{?&, "&"},
{?", """},
{?', "'"}
]
# Can't use character codes for multibyte unicode characters
smartypants_escapes = [
{"---", "β"},
{"--", "β"},
{?', "β"},
{?", "β"},
{"...", "β¦"}
]
# These match only if `smartypants` is true
for {match, insert} <- smartypants_escapes do
# Unlike HTML escape matches, smartypants matches may contain more than one character
match_length = if is_binary(match), do: byte_size(match), else: 1
defp escape_to_iodata(<<unquote(match), rest::bits>>, skip, original, acc, true, escape, 0) do
escape_to_iodata(rest, skip + unquote(match_length), original, [acc | unquote(insert)], true, escape, 0)
end
defp escape_to_iodata(<<unquote(match), rest::bits>>, skip, original, acc, true, escape, len) do
part = binary_part(original, skip, len)
escape_to_iodata(rest, skip + len + unquote(match_length), original, [acc, part | unquote(insert)], true, escape, 0)
end
end
for {match, insert} <- escapes do
defp escape_to_iodata(<<unquote(match), rest::bits>>, skip, original, acc, smartypants, true, 0) do
escape_to_iodata(rest, skip + 1, original, [acc | unquote(insert)], smartypants, true, 0)
end
defp escape_to_iodata(<<unquote(match), rest::bits>>, skip, original, acc, smartypants, true, len) do
part = binary_part(original, skip, len)
escape_to_iodata(rest, skip + len + 1, original, [acc, part | unquote(insert)], smartypants, true, 0)
end
end
defp escape_to_iodata(<<_char, rest::bits>>, skip, original, acc, smartypants, escape, len) do
escape_to_iodata(rest, skip, original, acc, smartypants, escape, len + 1)
end
defp escape_to_iodata(<<>>, 0, original, _acc, _smartypants, _escape, _len) do
original
end
defp escape_to_iodata(<<>>, skip, original, acc, _smartypants, _escape, len) do
[acc | binary_part(original, skip, len)]
end
@pop {:__end__}
defp _pop_to_pop(result, intermediate \\ [])
defp _pop_to_pop([@pop, {tag, atts, _, meta}|rest], intermediate) do
[{tag, atts, intermediate, meta}|rest]
end
defp _pop_to_pop([continue|rest], intermediate) do
_pop_to_pop(rest, [continue|intermediate])
end
defp _walk_ast(ast, fun, ignore_strings, result)
defp _walk_ast([], _fun, _ignore_strings, result), do: Enum.reverse(result)
defp _walk_ast([[]|rest], fun, ignore_strings, result) do
_walk_ast(rest, fun, ignore_strings, _pop_to_pop(result))
end
defp _walk_ast([string|rest], fun, ignore_strings, result) when is_binary(string) do
new = if ignore_strings, do: string, else: fun.(string)
_walk_ast(rest, fun, ignore_strings, [new|result])
end
defp _walk_ast([{_, _, content, _}=tuple|rest], fun, ignore_strings, result) do
{new_tag, new_atts, _, new_meta} = fun.(tuple)
_walk_ast([content|rest], fun, ignore_strings, [@pop, {new_tag, new_atts, [], new_meta}|result])
end
defp _walk_ast([[h|t]|rest], fun, ignore_strings, result) do
_walk_ast([h, t|rest], fun, ignore_strings, result)
end
defp _walk_ast_with(ast, value, fun, ignore_strings, result)
defp _walk_ast_with([], value, _fun, _ignore_strings, result), do: {Enum.reverse(result), value}
defp _walk_ast_with([[]|rest], value, fun, ignore_strings, result) do
_walk_ast_with(rest, value, fun, ignore_strings, _pop_to_pop(result))
end
defp _walk_ast_with([string|rest], value, fun, ignore_strings, result) when is_binary(string) do
if ignore_strings do
_walk_ast_with(rest, value, fun, ignore_strings, [string|result])
else
{news, newv} = fun.(string, value)
_walk_ast_with(rest, newv, fun, ignore_strings, [news|result])
end
end
defp _walk_ast_with([{_, _, content, _}=tuple|rest], value, fun, ignore_strings, result) do
{{new_tag, new_atts, _, new_meta}, new_value} = fun.(tuple, value)
_walk_ast_with([content|rest], new_value, fun, ignore_strings, [@pop, {new_tag, new_atts, [], new_meta}|result])
end
defp _walk_ast_with([[h|t]|rest], value, fun, ignore_strings, result) do
_walk_ast_with([h, t|rest], value, fun, ignore_strings, result)
end
end
|
lib/earmark/transform.ex
| 0.768386 | 0.610163 |
transform.ex
|
starcoder
|
defmodule Edeliver.Relup.InsertInstruction do
@moduledoc """
Provides functions to insert relup instructions at a given position
which can be used in `Edeliver.Relup.Instruction` behaviour implementations
in the relup file.
"""
alias Edeliver.Relup.Instructions
@doc """
Inserts instruction(s) before the point of no return.
All instructions running before that point of no return which fail will cause the
upgrade to fail, while failing instructions running after that point will cause the
node to restart the release.
"""
@spec insert_before_point_of_no_return(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions) :: updated_instructions::Instructions.t|Instructions.instructions
def insert_before_point_of_no_return(instructions = %Instructions{}, new_instructions) do
%{instructions|
up_instructions: insert_before_point_of_no_return(instructions.up_instructions, new_instructions),
down_instructions: insert_before_point_of_no_return(instructions.down_instructions, new_instructions)
}
end
def insert_before_point_of_no_return(existing_instructions, new_instructions) do
insert_before_instruction(existing_instructions, new_instructions, :point_of_no_return)
end
@doc """
Inserts instruction(s) right after the point of no return.
This means that it is the first instruction which should not fail, because the release
handler will restart the release if any instruction fails after the point
of no return.
"""
@spec insert_after_point_of_no_return(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions) :: updated_instructions::Instructions.t|Instructions.instructions
def insert_after_point_of_no_return(instructions = %Instructions{}, new_instructions) do
%{instructions|
up_instructions: insert_after_point_of_no_return(instructions.up_instructions, new_instructions),
down_instructions: insert_after_point_of_no_return(instructions.down_instructions, new_instructions)
}
end
def insert_after_point_of_no_return(existing_instructions, new_instructions) do
insert_after_instruction(existing_instructions, new_instructions, :point_of_no_return)
end
@doc """
Inserts instruction(s) right after the last `load_object_code` instruction
which is usually before the "point of no return" and one of the first instructions.
This means that it is the first custom instruction which is executed. It is executed twice,
once when checking whether the upgrade can be installed and once when the upgrade is installed.
"""
@spec insert_after_load_object_code(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions) :: updated_instructions::Instructions.t|Instructions.instructions
def insert_after_load_object_code(instructions = %Instructions{}, new_instructions) do
%{instructions|
up_instructions: insert_after_load_object_code(instructions.up_instructions, new_instructions),
down_instructions: insert_after_load_object_code(instructions.down_instructions, new_instructions)
}
end
def insert_after_load_object_code(existing_instructions, new_instructions) do
last_load_object_code_instruction = existing_instructions |> Enum.reverse |> List.keyfind(:load_object_code, 0)
if last_load_object_code_instruction do
insert_after_instruction(existing_instructions, new_instructions, last_load_object_code_instruction)
else
append(existing_instructions, new_instructions)
end
end
@doc """
Appends instruction(s) to the instruction after the "point of no return" but before any instruction
which:
- loads or unloads new code, which means before any
`load_module`, `load`, `add_module`, `delete_module`,
`remove`, `purge` instruction and
- before any instruction which updates, starts or stops
any running processes, which means before any
`code_change`, `update`, `start`, `stop` instruction and
- before any instruction which (re-)starts or stops
any application or the emulator, which means before any
`add_application`, `remove_application`, `restart_application`,
`restart_emulator` and `restart_new_emulator` instruction.
It does not consider load-instructions for `Edeliver.Relup.RunnableInstruction`s
as code loading instructions for the release. They are inserted by the
`RunnableInstruction` itself to ensure that the code of the runnable instruction
is loaded before the instruction is executed. See `Edeliver.Relup.ShiftInstruction.ensure_module_loaded_before_instruction/3`.
"""
@spec append_after_point_of_no_return(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions) :: updated_instructions::Instructions.t|Instructions.instructions
def append_after_point_of_no_return(instructions = %Instructions{}, new_instructions) do
%{instructions|
up_instructions: append_after_point_of_no_return(instructions.up_instructions, new_instructions),
down_instructions: append_after_point_of_no_return(instructions.down_instructions, new_instructions)
}
end
def append_after_point_of_no_return(existing_instructions, new_instruction) when is_list(existing_instructions) and not is_list(new_instruction) do
append_after_point_of_no_return(existing_instructions, [new_instruction])
end
def append_after_point_of_no_return(existing_instructions, new_instructions) when is_list(existing_instructions) do
append_after_point_of_no_return(existing_instructions, new_instructions, false, [])
end
defp append_after_point_of_no_return(_existing_instructions = [:point_of_no_return|rest], new_instructions, _after_point_of_no_return = false, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, true, [:point_of_no_return|instructions_before_instruction])
end
defp append_after_point_of_no_return(_existing_instructions = [instruction|rest], new_instructions, after_point_of_no_return = false, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [instruction|instructions_before_instruction])
end
# skip instructions which loads code and are inserted before a runnable instruction. see `Edeliver.Relup.RunnableInstruction`
# and `Edeliver.Relup.Instruction.ensure_module_loaded_before_instruction/3`. That load instructions are inserted by the
# `RunnableInstruction` itself and are not considered to be a 'real' code loading instruction for the running application.
defp append_after_point_of_no_return(_existing_instructions = [load_runnable_instruction = {:load_module, module}, runnable_instruction = {:apply, {module, :run, _args}}|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [runnable_instruction, load_runnable_instruction|instructions_before_instruction])
end
defp append_after_point_of_no_return(_existing_instructions = [load_runnable_instruction = {:load_module, module, _dep_mods}, runnable_instruction = {:apply, {module, :run, _args}}|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [runnable_instruction, load_runnable_instruction|instructions_before_instruction])
end
defp append_after_point_of_no_return(_existing_instructions = [load_runnable_instruction = {:load_module, module, _pre_purge, _post_purge, _dep_mods}, runnable_instruction = {:apply, {module, :run, _args}}|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [runnable_instruction, load_runnable_instruction|instructions_before_instruction])
end
defp append_after_point_of_no_return(_existing_instructions = [load_runnable_instruction = {:add_module, module}, runnable_instruction = {:apply, {module, :run, _args}}|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [runnable_instruction, load_runnable_instruction|instructions_before_instruction])
end
defp append_after_point_of_no_return(_existing_instructions = [load_runnable_instruction = {:add_module, module, _dep_mods}, runnable_instruction = {:apply, {module, :run, _args}}|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [runnable_instruction, load_runnable_instruction|instructions_before_instruction])
end
defp append_after_point_of_no_return(_existing_instructions = [load_runnable_instruction = {:load, {module, _pre_purge, _post_purge}}, runnable_instruction = {:apply, {module, :run, _args}}|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [runnable_instruction, load_runnable_instruction|instructions_before_instruction])
end
# check whether the instruction is an instruction modifying code, processes or applications
defp append_after_point_of_no_return(existing_instructions = [instruction|rest], new_instructions, after_point_of_no_return = true, instructions_before_instruction) do
if modifies_code?(instruction) or modifies_processes?(instruction) or modifies_applications?(instruction) do
Enum.reverse(instructions_before_instruction) ++ new_instructions ++ existing_instructions
else
append_after_point_of_no_return(rest, new_instructions, after_point_of_no_return, [instruction|instructions_before_instruction])
end
end
defp append_after_point_of_no_return(_existing_instructions = [], new_instructions, _after_point_of_no_return, instructions_before_instruction) do
Enum.reverse(instructions_before_instruction) ++ new_instructions
end
@doc """
Appends instruction(s) to the list of other instructions.
"""
@spec append(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions) :: updated_instructions::Instructions.t|Instructions.instructions
def append(instructions = %Instructions{}, new_instructions) do
%{instructions|
up_instructions: append(instructions.up_instructions, new_instructions),
down_instructions: append(instructions.down_instructions, new_instructions)
}
end
def append(existing_instructions, new_instruction) when is_list(existing_instructions) and not is_list(new_instruction) do
append(existing_instructions, [new_instruction])
end
def append(existing_instructions, new_instructions) when is_list(existing_instructions) do
existing_instructions ++ new_instructions
end
@doc """
Inserts instruction(s) before the given instruction.
"""
@spec insert_before_instruction(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions, before_instruction::Instructions.instruction) :: updated_instructions::Instructions.t|Instructions.instructions
def insert_before_instruction(instructions = %Instructions{}, new_instructions, before_instruction) do
%{instructions|
up_instructions: insert_before_instruction(instructions.up_instructions, new_instructions, before_instruction),
down_instructions: insert_after_instruction(instructions.down_instructions, new_instructions, before_instruction)
}
end
def insert_before_instruction(existing_instructions, new_instruction, before_instruction) when is_list(existing_instructions) and not is_list(new_instruction) do
insert_before_instruction(existing_instructions, [new_instruction], before_instruction)
end
def insert_before_instruction(existing_instructions, new_instructions, before_instruction) when is_list(existing_instructions) do
insert_before_instruction(existing_instructions, new_instructions, before_instruction, [])
end
defp insert_before_instruction(existing_instructions = [before_instruction|_], new_instructions, before_instruction, instructions_before_instruction) do
Enum.reverse(instructions_before_instruction) ++ new_instructions ++ existing_instructions
end
defp insert_before_instruction(_existing_instructions = [no_point_of_no_return_instruction|rest], new_instructions, before_instruction, instructions_before_instruction) do
insert_before_instruction(rest, new_instructions, before_instruction, [no_point_of_no_return_instruction|instructions_before_instruction])
end
defp insert_before_instruction(_existing_instructions = [], new_instructions, _before_instruction, instructions_before_instruction) do
Enum.reverse(instructions_before_instruction) ++ new_instructions
end
@doc """
Inserts instruction(s) after the given instruction.
"""
@spec insert_after_instruction(Instructions.t|Instructions.instructions, new_instructions::Instructions.instruction|Instructions.instructions, after_instruction::Instructions.instruction) :: updated_instructions::Instructions.t|Instructions.instructions
def insert_after_instruction(instructions = %Instructions{}, new_instructions, after_instruction) do
%{instructions|
up_instructions: insert_after_instruction(instructions.up_instructions, new_instructions, after_instruction),
down_instructions: insert_before_instruction(instructions.down_instructions, new_instructions, after_instruction)
}
end
def insert_after_instruction(existing_instructions, new_instruction, after_instruction) when is_list(existing_instructions) and not is_list(new_instruction) do
insert_after_instruction(existing_instructions, [new_instruction], after_instruction)
end
def insert_after_instruction(existing_instructions, new_instructions, after_instruction) when is_list(existing_instructions) do
insert_after_instruction(existing_instructions, new_instructions, after_instruction, [])
end
defp insert_after_instruction(_existing_instructions = [after_instruction|rest], new_instructions, after_instruction, instructions_before_instruction) do
Enum.reverse(instructions_before_instruction) ++ [after_instruction|new_instructions] ++ rest
end
defp insert_after_instruction(_existing_instructions = [no_point_of_no_return_instruction|rest], new_instructions, after_instruction, instructions_before_instruction) do
insert_after_instruction(rest, new_instructions, after_instruction, [no_point_of_no_return_instruction|instructions_before_instruction])
end
defp insert_after_instruction(_existing_instructions = [], new_instructions, _after_instruction, instructions_before_instruction) do
Enum.reverse(instructions_before_instruction) ++ new_instructions
end
@doc """
Returns true if the given instruction is an instruction which modifies an application
by either (re-)starting or stopping it or by restarting the emulator. It returns
`true` for the `add_application`, `remove_application`, `restart_new_emulator`
and the `restart_emulator`, relup instructions.
"""
@spec modifies_applications?(Instructions.instruction) :: boolean
def modifies_applications?({:add_application, _application}), do: true
def modifies_applications?({:add_application, _application, _type}), do: true
def modifies_applications?({:remove_application, _application}), do: true
def modifies_applications?({:restart_application, _application}), do: true
def modifies_applications?(:restart_new_emulator), do: true
def modifies_applications?(:restart_emulator), do: true
def modifies_applications?(_), do: false
@doc """
Returns true if the given instruction is an instruction which modifies code
by loading, unloading or purging it. It returns `true` for the `load_module`, `add_module`
`delete_module`, `load`, `remove` and `purge` relup instructions.
"""
@spec modifies_code?(Instructions.instruction) :: boolean
def modifies_code?({:load_module, _module}), do: true
def modifies_code?({:load_module, _module, _dep_mods}), do: true
def modifies_code?({:load_module, _module, _pre_purge, _post_purge, _dep_mods}), do: true
def modifies_code?({:add_module, _module}), do: true
def modifies_code?({:add_module, _module, _dep_mods}), do: true
def modifies_code?({:load, {_module, _pre_purge, _post_purge}}), do: true
def modifies_code?({:purge, [_module]}), do: true
def modifies_code?({:remove, {_module, _pre_purge, _post_purge}}), do: true
def modifies_code?({:delete_module, _module}), do: true
def modifies_code?({:delete_module, _module, _dep_mods}), do: true
def modifies_code?(_), do: false
@doc """
Returns true if the given instruction is an instruction which modifies any process
by either by sending the `code_change` sys event or by starting or stopping any
process. It returns `true` for the `code_change`, `start`, `stop` and `update`
relup instructions.
"""
@spec modifies_processes?(Instructions.instruction) :: boolean
def modifies_processes?({:update, _mod}), do: true
def modifies_processes?({:update, _mod, :supervisor}), do: true
def modifies_processes?({:update, _mod, _change_or_dep_mods}), do: true
def modifies_processes?({:update, _mod, _change, _dep_mods}), do: true
def modifies_processes?({:update, _mod, _change, _pre_purge, _post_purge, _dep_mods}), do: true
def modifies_processes?({:update, _mod, Timeout, _change, _pre_purge, _post_purge, _dep_mods}), do: true
def modifies_processes?({:update, _mod, ModType, Timeout, _change, _pre_purge, _post_purge, _dep_mods}), do: true
def modifies_processes?({:code_change, [{_mod, _extra}]}), do: true
def modifies_processes?({:code_change, _mode, [{_mod, _extra}]}), do: true
def modifies_processes?({:start, [_mod]}), do: true
def modifies_processes?({:stop, [_mod]}), do: true
def modifies_processes?(_), do: false
end
|
lib/edeliver/relup/insert_instruction.ex
| 0.826747 | 0.468061 |
insert_instruction.ex
|
starcoder
|
defmodule KingAlbertEx.Game do
@moduledoc """
This module defines a type representing the total game state at a given point in time, along with functions
relating to that type. A "game state" here includes everything about the current game: card positions, messages
to the the user, etc..
"""
alias KingAlbertEx.Config
alias KingAlbertEx.Deck
alias KingAlbertEx.Game
alias KingAlbertEx.Board
alias KingAlbertEx.Move
@clear_screen "\x1b[2J\x1b[1;1H"
@quit_command "quit"
@help_command "help"
@rules_command "rules"
@help_text """
Enter two letters to describe your move, indicating the "from" position and the "to" position.
For full rules, enter "#{@rules_command}". To quit, enter "#{@quit_command}".
"""
@rules_text """
The game board consists of four Foundations (labelled a to d), nine Columns (e to m), and a Reserve (n to t).
The aim of the game is to complete the Foundations in ascending order, from Ace through to King in each suit.
In each Column, only the last card (nearer the bottom of the screen) is available for play.
All cards in the Reserve are available to play; however cards cannot be moved back into the Reserve once played.
Any empty Foundation can be filled with the Ace of its suit; and thereafter each Foundation must then be built up
in ascending sequence from 2 through to King.
Cards can be played to Columns only one at a time, in descending order in alternating colours. For example,
if the last card in a Column is a 4\u2661, then either the 3\u2663 or the 3\u2660 could be added.
You have won when the top card of each Foundation is a King.
"""
@opaque t() :: %__MODULE__{
board: Board.t(),
messages: [String.t()],
over: boolean
}
defstruct board: nil, messages: [], over: false
@spec new(Deck.t()) :: t()
def new(shuffled_deck) do
board = Board.new(shuffled_deck)
%Game{board: board}
end
@spec display(t()) :: String.t()
def display(%Game{board: board, messages: messages}) do
Enum.join([@clear_screen, Board.display(board)] ++ messages, "\n")
end
@spec apply(t(), String.t()) :: t()
def apply(game, command) do
prompted_message = Config.prompt() <> command
case command do
@quit_command ->
handle_quit(game, prompted_message)
@help_command ->
handle_help(game, prompted_message)
@rules_command ->
handle_rules(game, prompted_message)
command ->
case Move.from_string(command) do
nil -> handle_ill_formed_move(game, prompted_message)
move -> handle_well_formed_move(game, prompted_message, move)
end
end
end
@spec over?(t()) :: boolean
def over?(%Game{over: over}), do: over
@spec finalize(t()) :: t()
defp finalize(game), do: %Game{game | over: true}
@spec update_board(t(), Board.t()) :: t()
defp update_board(game, board), do: %Game{game | board: board}
@spec add_messages(t(), [String.t()]) :: t()
defp add_messages(%Game{messages: messages} = game, extra_messages) do
%Game{game | messages: messages ++ extra_messages}
end
@spec reset_messages(t(), [String.t()]) :: t()
defp reset_messages(game, messages) do
%Game{game | messages: messages}
end
@spec handle_quit(t(), String.t()) :: t()
defp handle_quit(game, prompted_message) do
game |> finalize() |> add_messages([prompted_message, "Bye!"])
end
@spec handle_help(t(), String.t()) :: t()
defp handle_help(game, prompted_message) do
add_messages(game, [prompted_message, @help_text])
end
@spec handle_rules(t(), String.t()) :: t()
defp handle_rules(game, prompted_message) do
add_messages(game, [prompted_message, @rules_text])
end
@spec handle_ill_formed_move(t(), String.t()) :: t()
defp handle_ill_formed_move(game, prompted_message) do
add_messages(game, [prompted_message, "Invalid move.", @help_text])
end
@spec handle_well_formed_move(t(), String.t(), Move.t()) :: t()
defp handle_well_formed_move(%Game{board: board} = game, prompted_message, move) do
case Board.apply(board, move) do
nil ->
add_messages(game, [prompted_message, "Invalid move. Try again."])
new_board ->
game = update_board(game, new_board)
case Board.victory_state(new_board) do
:won ->
game |> finalize() |> reset_messages(["You won! Congratulations."])
:lost ->
game |> finalize() |> reset_messages(["No legal moves are available. You lost."])
:ongoing ->
reset_messages(game, [])
end
end
end
end
|
lib/king_albert_ex/game.ex
| 0.787482 | 0.497925 |
game.ex
|
starcoder
|
defmodule Tabula do
@moduledoc """
Tabula can transform a list of maps (structs too, e.g. Ecto schemas)
or Keywords into an ASCII/GitHub Markdown table.
"""
import Enum, only: [
concat: 2,
intersperse: 2,
map: 2,
max: 1,
with_index: 1,
zip: 2
]
@index "#"
@newline '\n'
@sheets \
org_mode: [
heading: " | ",
heading_border: "-+-",
heading_left_outer_border: "|-",
heading_right_outer_border: "-|",
left_outer_border: "| ",
right_outer_border: " |",
row: " | ",
spacer: "-"
],
github_md: [
heading: " | ",
heading_border: " | ",
heading_left_outer_border: "| ",
heading_right_outer_border: " |",
left_outer_border: "| ",
right_outer_border: " |",
row: " | ",
spacer: "-"
]
@default_sheet :org_mode
defmacro __using__(opts) do
quote do
def print_table(rows) do
unquote(__MODULE__).print_table(rows, unquote(opts))
end
def print_table(rows, override_opts) do
unquote(__MODULE__).print_table(
rows, Keyword.merge(unquote(opts), override_opts))
end
def render_table(rows) do
unquote(__MODULE__).render_table(rows, unquote(opts))
end
def render_table(rows, override_opts) do
unquote(__MODULE__).render_table(
rows, Keyword.merge(unquote(opts), override_opts))
end
end
end
defprotocol Row do
@fallback_to_any true
def get(row, col, default \\ nil)
def keys(row)
end
defimpl Row, for: Map do
def get(row, col, default \\ nil), do: Map.get(row, col, default)
def keys(row), do: Map.keys(row)
end
defimpl Row, for: List do
def get(row, col, default \\ nil), do: Keyword.get(row, col, default)
def keys(row), do: Keyword.keys(row)
end
defimpl Row, for: Any do
def get(%{__struct__: _} = row, col, default \\ nil), do: Map.get(row, col, default)
def keys(%{__struct__: _} = row) do
row
|> Map.from_struct
|> Map.keys
end
end
def print_table(rows, opts \\ []) do
rows
|> render_table(opts)
|> IO.puts()
end
def render_table(rows, opts \\ []) do
rows
|> extract_cols(opts)
|> render_table(rows, opts)
|> :erlang.list_to_binary()
end
defp render_table([_ | _] = cols, rows, opts) do
widths = max_widths(cols, rows)
formatters = formatters(widths, opts)
spacers = spacers(widths, opts)
[
render_row(cols, :heading, formatters, opts),
render_row(spacers, :heading_border, formatters, opts),
rows
|> with_index()
|> map(fn indexed_row ->
cols
|> values(indexed_row)
|> render_row(:row, formatters, opts)
end)
]
end
def max_widths(cols, rows) do
max_index =
rows
|> length()
|> strlen()
map(cols, fn k ->
max([
strlen(k),
max_index
| map(rows, &strlen(Row.get(&1, k)))
])
end)
end
defp extract_cols([first | _], opts) do
case opts[:only] do
cols when is_list(cols) -> cols
nil -> Row.keys(first)
end
end
defp render_row(cells, style_element, formatters, opts) do
separator = style(style_element, opts)
{left_outer_border, right_outer_border} =
outer_border_style(style_element, opts)
row =
cells
|> zip(formatters)
|> map(fn {k, f} -> f.(k) end)
|> intersperse(separator)
concat([left_outer_border, row, right_outer_border], @newline)
end
defp render_cell(v) when is_binary(v), do: v
defp render_cell(v) when is_number(v), do: inspect(v)
defp render_cell(%{__struct__: _} = v) do
if String.Chars.impl_for(v) do
to_string(v)
else
inspect(v)
end
end
defp render_cell(v), do: inspect(v)
defp formatters(widths, _opts) do
map(widths, fn w ->
fn @index = cell ->
# need to pad_leading '#' orelse github fails to render
String.pad_leading(cell, w)
cell when is_binary(cell) ->
String.pad_trailing(cell, w)
cell when is_number(cell) ->
cell
|> render_cell()
|> String.pad_leading(w)
cell ->
cell
|> render_cell()
|> String.pad_trailing(w)
end
end)
end
defp spacers(widths, opts) do
spacer = style(:spacer, opts)
map(widths, &String.duplicate(spacer, &1))
end
defp strlen(x) do
x
|> render_cell
|> String.length()
end
defp values(cols, {row, index}) do
map(cols, fn (@index) -> index + 1
(col) -> Row.get(row, col)
end)
end
defp outer_border_style(:heading_border, opts) do
{style(:heading_left_outer_border, opts),
style(:heading_right_outer_border, opts)}
end
defp outer_border_style(style, opts) when style in [:heading, :row] do
{style(:left_outer_border, opts),
style(:right_outer_border, opts)}
end
defp style(style, opts) do
sheet = Keyword.get(opts, :style, @default_sheet)
@sheets[sheet][style]
end
end
|
lib/tabula.ex
| 0.641647 | 0.477615 |
tabula.ex
|
starcoder
|
defmodule Number.Currency do
@moduledoc """
Provides functions for converting numbers into formatted currency strings.
"""
import Number.Delimit, only: [number_to_delimited: 2]
@doc """
Converts a number to a formatted currency string.
## Parameters
* `number` - A float or integer to convert.
* `options` - A keyword list of options. See the documentation of all
available options below for more information.
## Options
* `:unit` - The currency symbol to use. Default: "$"
* `:precision` - The number of decimal places to include. Default: 2
* `:delimiter` - The character to use to delimit the number by thousands.
Default: ","
* `:separator` - The character to use to separate the number from the decimal
places. Default: "."
* `:format` - The format of the number. This can be used to put the currency
symbol in a different place. See the examples for usage. There are two
supported format string placeholders:
* `%u` - Represents the currency symbol, or unit.
* `%n` - Represents the number.
* `:negative_format` - The format of the number when it is negative. Uses the
same formatting placeholders as the `:format` option.
Default config for these options can be specified in the `Number`
application configuration.
config :number, currency: [
unit: "Β£",
precision: 2,
delimiter: ",",
separator: ".",
format: "%u%n", # "Β£30.00"
negative_format: "(%u%n)" # "(Β£30.00)"
]
## Examples
iex> Number.Currency.number_to_currency(nil)
nil
iex> Number.Currency.number_to_currency(1000)
"$1,000.00"
iex> Number.Currency.number_to_currency(1000, unit: "Β£")
"Β£1,000.00"
iex> Number.Currency.number_to_currency(-1000)
"-$1,000.00"
iex> Number.Currency.number_to_currency(-234234.23)
"-$234,234.23"
iex> Number.Currency.number_to_currency(1234567890.50)
"$1,234,567,890.50"
iex> Number.Currency.number_to_currency(1234567890.506)
"$1,234,567,890.51"
iex> Number.Currency.number_to_currency(1234567890.506, precision: 3)
"$1,234,567,890.506"
iex> Number.Currency.number_to_currency(-1234567890.50, negative_format: "(%u%n)")
"($1,234,567,890.50)"
iex> Number.Currency.number_to_currency(1234567890.50, unit: "R$", separator: ",", delimiter: "")
"R$1234567890,50"
iex> Number.Currency.number_to_currency(1234567890.50, unit: "R$", separator: ",", delimiter: "", format: "%n %u")
"1234567890,50 R$"
iex> Number.Currency.number_to_currency(Decimal.new(50.0))
"$50.00"
iex> Number.Currency.number_to_currency(Decimal.new(-100.01))
"-$100.01"
iex> Number.Currency.number_to_currency(Decimal.new(-100.01), unit: "$", separator: ",", delimiter: ".", negative_format: "- %u %n")
"- $ 100,01"
"""
@spec number_to_currency(Number.t(), list) :: String.t()
def number_to_currency(number, options \\ [])
def number_to_currency(nil, _options), do: nil
def number_to_currency(number, options) do
options = Keyword.merge(config(), options)
{number, format} = get_format(number, options)
number = number_to_delimited(number, options)
format
|> String.replace(~r/%u/, options[:unit])
|> String.replace(~r/%n/, number)
end
defp get_format(number, options) do
number = Decimal.new(number)
case Decimal.cmp(number, Decimal.new(0)) do
:lt -> {Decimal.abs(number), options[:negative_format] || "-#{options[:format]}"}
_ -> {number, options[:format]}
end
end
defp config do
defaults = [
delimiter: ",",
separator: ".",
precision: 2,
unit: "$",
format: "%u%n"
]
Keyword.merge(defaults, Application.get_env(:number, :currency, []))
end
end
|
lib/number/currency.ex
| 0.925289 | 0.730506 |
currency.ex
|
starcoder
|
defmodule Roger.Job do
@moduledoc """
Base module for implementing Roger jobs.
To start, `use Roger.Job` in your module. The only required callback
to implement is the `perform/1` function.
defmodule TestJob do
use Roger.Job
def perform(_args) do
# perform some work here...
end
end
Other functions that can be implemented in a job module are the
following:
`queue_key/1` - Enforces job uniqueness. When returning a string
from this function, Roger enforces that only one job per queue key
can be put in the queue at the same time. Only when the job has left
the queue (after it has been executed), it will be possible to
enqueue a job with the same queue key again.
`execution_key/1` - Enforces job execution serialization. When
returning a string from this function, Roger enforces that not more
than one job with the same job is executed concurrently. However, it
is still possible to have multiple jobs with the same execution key
enqueued, but jobs that have the same execution key will be put in a
waiting queue and processed serially.
`queue_type/1` - Specifies which partition queue the job will run
on. By default, this function returns `:default`, the default queue
type.
`retryable?/0` - Specifies whether the job should be retried using
an exponential backoff scheme. The default implementation returns
false, meaning that jobs will not be retried.
"""
@type t :: %__MODULE__{}
@derive {Poison.Encoder, only: ~w(id module args queue_key execution_key retry_count started_at queued_at)a}
defstruct id: nil,
module: nil,
args: nil,
queue_key: nil,
execution_key: nil,
retry_count: 0,
started_at: 0,
queued_at: 0,
max_execution_time: :infinity
alias Roger.{Queue, Partition.Global, Job}
require Logger
@content_type "partition/x-erlang-binary"
@doc """
Enqueues a job in the given partition.
"""
def enqueue(%__MODULE__{} = job, partition_id, override_queue \\ nil) do
queue = Queue.make_name(partition_id, override_queue || queue_type(job))
# Check the queue key; when there is a queue key and it is not
# queued, immediately add it to the queue key set to prevent
# races.
if job.queue_key != nil and Global.queued?(partition_id, job.queue_key, :add) do
{:error, :duplicate}
else
job = %Job{job | queued_at: Roger.now()}
channel_name = Application.get_env(:roger, :channel_name)
{:ok, channel} = AMQP.Application.get_channel(channel_name)
AMQP.Basic.publish(channel, "", queue, encode(job), Job.publish_opts(job, partition_id))
end
end
@doc """
Constructs the AMQP options for publishing the job
"""
def publish_opts(%__MODULE__{} = job, partition_id) do
[content_type: @content_type, persistent: true, message_id: job.id, app_id: partition_id]
end
@doc """
To implement a job module, `use Roger.Job` in your module, and
implement its required `perform/1` function.
"""
defmacro __using__(_) do
quote location: :keep do
@behaviour Roger.Job
@doc false
def queue_type(_args), do: :default
defoverridable queue_type: 1
end
end
@callback queue_key(any) :: String.t()
@callback execution_key(any) :: String.t()
@callback queue_type(any) :: atom
@callback perform(any) :: any
@callback retryable?() :: true | false
@doc """
Ability to set max execution time of the job in seconds.
The default is :infinity which will allow the job unlimited time to finish.
Minimum value is 1 seconds.
"""
@callback max_execution_time() :: integer() | :infinity
@optional_callbacks queue_key: 1, execution_key: 1, retryable?: 0, max_execution_time: 0
@doc """
Creates a new job based on a job module.
The given `module` must exist as an Elixir module and must be
implementing the Job behaviour (`use Roger.Job`). Arguments can be
passed by the `args` parameter as a list. Job `id` is a random hex assigned to the job.
The function returns the Job struct, which can be sent off to the
queues using `Job.enqueue/2`.
## Examples
iex> {:ok, job} = Roger.Job.create(Roger.JobTest.SquareJob, [4])
iex> job.__struct__
Roger.Job
"""
def create(module, args \\ [], id \\ generate_job_id()) when is_atom(module) do
keys =
~w(queue_key execution_key)a
|> Enum.map(fn prop ->
if function_exported?(module, prop, 1) do
{prop, Kernel.apply(module, prop, [args])}
else
{prop, nil}
end
end)
|> Enum.into(%{})
max_execution_time =
if function_exported?(module, :max_execution_time, 0) do
execution_time =
case apply(module, :max_execution_time, []) do
0 -> 1
execution_time -> execution_time
end
%{max_execution_time: execution_time}
else
%{}
end
%__MODULE__{module: module, args: args, id: id}
|> Map.merge(keys)
|> Map.merge(max_execution_time)
|> validate
end
defp generate_job_id do
:crypto.strong_rand_bytes(20) |> Base.hex_encode32(case: :lower)
end
@doc """
Executes the given job.
This function is called from within a `Job.Worker` process, there's
no need to call it yourself.
"""
def execute(%__MODULE__{} = job) do
Kernel.apply(job.module, :perform, [job.args])
end
@doc """
Decode a binary payload into a Job struct, and validates it.
"""
@spec decode(data :: binary) :: {:ok, Roger.Job.t()} | {:error, msg :: String.t()}
def decode(payload) do
try do
{:ok, :erlang.binary_to_term(payload)}
rescue
ArgumentError ->
{:error, "Job decoding error"}
end
|> validate
end
@doc """
Encodes a job into a binary payload.
"""
@spec encode(job :: Job.t()) :: binary
def encode(job) do
job |> :erlang.term_to_binary()
end
defp validate({:ok, job}), do: validate(job)
defp validate({:error, _} = e), do: e
defp validate(%__MODULE__{id: id}) when not is_binary(id) do
{:error, "Job id must be set"}
end
defp validate(%__MODULE__{module: module}) when not is_atom(module) do
{:error, "Job module must be an atom"}
end
defp validate(%__MODULE__{module: module} = job) do
case Code.ensure_loaded(module) do
{:error, :nofile} ->
{:error, "Unknown job module: #{module}"}
{:error, :embedded} ->
Logger.error("Module not available: #{module}")
{:error, "Module not loaded correctly: #{module}"}
{:module, ^module} ->
functions = module.__info__(:functions)
case Enum.member?(functions, {:perform, 1}) do
false ->
{:error, "Invalid job module: #{module} does not implement Roger.Job"}
true ->
{:ok, job}
end
end
end
@doc """
Given a job, return its queue type
"""
def queue_type(%__MODULE__{} = job) do
job.module.queue_type(job.args)
end
@doc """
Given a job, return whether it's retryable or not.
"""
def retryable?(%__MODULE__{} = job) do
if function_exported?(job.module, :retryable?, 0) do
job.module.retryable?()
else
false
end
end
end
|
lib/roger/job.ex
| 0.890818 | 0.45048 |
job.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.