code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule BSV.Contract.Helpers do
@moduledoc """
Base helper module containing helper functions for use in `BSV.Contract`
modules.
Using `BSV.Contract.Helpers` will import itself and all related helper modules
into your context.
use BSV.Contract.Helpers
Alternative, helper modules can be imported individually.
import BSV.Contract.Helpers
import BSV.Contract.OpCodeHelpers
import BSV.Contract.VarIntHelpers
"""
alias BSV.{Contract, PrivKey, Sig, UTXO}
import BSV.Contract.OpCodeHelpers
defmacro __using__(_) do
quote do
import BSV.Contract.Helpers
import BSV.Contract.OpCodeHelpers
import BSV.Contract.VarIntHelpers
end
end
@doc """
Assuming the top stack element is an unsigned integer, casts it to a
`BSV.ScriptNum.t()` encoded number.
"""
@spec decode_uint(Contract.t(), atom()) :: Contract.t()
def decode_uint(contract, endianess \\ :little)
def decode_uint(%Contract{} = contract, endianess)
when endianess in [:le, :little]
do
contract
|> push(<<0>>)
|> op_cat()
|> op_bin2num()
end
# TODO encode big endian decoding
def decode_uint(%Contract{} = _contract, endianess)
when endianess in [:be, :big],
do: raise "Big endian decoding not implemented yet"
@doc """
Iterates over the given enumerable, invoking the `handle_each` function on
each.
## Example
contract
|> each(["foo", "bar", "baz"], fn el, c ->
c
|> push(el)
|> op_cat()
end)
"""
@spec each(
Contract.t(),
Enum.t(),
(Enum.element(), Contract.t() -> Contract.t())
) :: Contract.t()
def each(%Contract{} = contract, enum, handle_each)
when is_function(handle_each),
do: Enum.reduce(enum, contract, handle_each)
@doc """
Pushes the given data onto the script. If a list of data elements is given,
each will be pushed to the script as seperate pushdata elements.
"""
@spec push(
Contract.t(),
atom() | binary() | integer() |
list(atom() | binary() | integer())
) ::Contract.t()
def push(%Contract{} = contract, data) when is_list(data),
do: each(contract, data, &push(&2, &1))
def push(%Contract{} = contract, data),
do: Contract.script_push(contract, data)
@doc """
Iterates the given number of times, invoking the `handle_each` function on
each iteration.
## Example
contract
|> repeat(5, fn _i, c ->
c
|> op_5()
|> op_add()
end)
"""
@spec repeat(
Contract.t(),
non_neg_integer(),
(non_neg_integer(), Contract.t() -> Contract.t())
) :: Contract.t()
def repeat(%Contract{} = contract, loops, handle_each)
when is_integer(loops) and loops > 0
and is_function(handle_each),
do: Enum.reduce(0..loops-1, contract, handle_each)
@doc """
Reverses the top item on the stack.
This helper function pushes op codes on to the script that will reverse a
binary of the given length.
"""
@spec reverse(Contract.t(), integer()) :: Contract.t()
def reverse(%Contract{} = contract, length)
when is_integer(length) and length > 1
do
contract
|> repeat(length-1, fn _i, contract ->
contract
|> op_1()
|> op_split()
end)
|> repeat(length-1, fn _i, contract ->
contract
|> op_swap()
|> op_cat()
end)
end
@doc """
Signs the transaction [`context`](`t:BSV.Contract.ctx/0`) and pushes the
signature onto the script.
A list of private keys can be given, in which case each is used to sign and
multiple signatures are added.
If no context is available in the [`contract`](`t:BSV.Contract.t/0`), then
71 bytes of zeros are pushed onto the script for each private key.
"""
@spec sig(Contract.t(), PrivKey.t() | list(PrivKey.t())) :: Contract.t()
def sig(%Contract{} = contract, privkey) when is_list(privkey),
do: each(contract, privkey, &sig(&2, &1))
def sig(
%Contract{ctx: {tx, index}, opts: opts, subject: %UTXO{txout: txout}} = contract,
%PrivKey{} = privkey
) do
signature = Sig.sign(tx, index, txout, privkey, opts)
Contract.script_push(contract, signature)
end
def sig(%Contract{ctx: nil} = contract, %PrivKey{} = _privkey),
do: Contract.script_push(contract, <<0::568>>)
@doc """
Extracts the bytes from top item on the stack, starting on the given `start`
index for `length` bytes. The stack item is replaced with the sliced value.
Binaries are zero indexed. If `start` is a negative integer, then the start
index is counted from the end.
"""
@spec slice(Contract.t(), integer(), non_neg_integer()) :: Contract.t()
def slice(%Contract{} = contract, start, length) when start < 0 do
contract
|> op_size()
|> push(start * -1)
|> op_sub()
|> op_split()
|> op_nip()
|> slice(0, length)
end
def slice(%Contract{} = contract, start, length) when start > 0 do
contract
|> trim(start)
|> slice(0, length)
end
def slice(%Contract{} = contract, 0, length) do
contract
|> push(length)
|> op_split()
|> op_drop()
end
@doc """
Trims the given number of leading or trailing bytes from the top item on the
stack. The stack item is replaced with the trimmed value.
When the given `length` is a positive integer, leading bytes are trimmed. When
a negative integer is given, trailing bytes are trimmed.
"""
@spec trim(Contract.t(), integer()) :: Contract.t()
def trim(%Contract{} = contract, length) when length > 0 do
contract
|> push(length)
|> op_split()
|> op_nip()
end
def trim(%Contract{} = contract, length) when length < 0 do
contract
|> op_size()
|> push(length * -1)
|> op_sub()
|> op_split()
|> op_drop()
end
def trim(%Contract{} = contract, 0), do: contract
end
|
lib/bsv/contract/helpers.ex
| 0.837753 | 0.483283 |
helpers.ex
|
starcoder
|
defmodule Timber.Exceptions.Translator do
alias Timber.Events.ErrorEvent
@moduledoc """
This module implements a Logger translator to take advantage of
the richer metadata available from Logger in OTP 21 and Elixir 1.7+.
Including the translator allows for crash reasons and stacktraces to be
included as structured metadata within Timber.
The translator depends on using Elixir's internal Logger.Translator, and
is not compatible with other translators as a Logger event can only be
translated once.
To install, add the translator in your application's start function:
```
# ...
:ok = Logger.add_translator({Timber.Exceptions.Translator, :translate})
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
```
"""
def translate(min_level, level, kind, message) do
case Logger.Translator.translate(min_level, level, kind, message) do
{:ok, char, metadata} ->
new_metadata = transform_metadata(metadata)
{:ok, char, new_metadata}
{:ok, char} ->
{:ok, char}
:skip ->
:skip
:none ->
:none
end
end
def transform_metadata(nil), do: []
def transform_metadata(metadata) do
with {:ok, crash_reason} <- Keyword.fetch(metadata, :crash_reason),
{:ok, event} <- get_error_event(crash_reason) do
Timber.Event.to_metadata(event)
|> Keyword.merge(metadata)
else
_ ->
metadata
end
end
defp get_error_event({{%{__exception__: true} = error, stacktrace}, _stack})
when is_list(stacktrace) do
{:ok, build_error_event(error, stacktrace, :error)}
end
defp get_error_event({%{__exception__: true} = error, stacktrace}) when is_list(stacktrace) do
{:ok, build_error_event(error, stacktrace, :error)}
end
defp get_error_event({{type, reason}, stacktrace}) when is_list(stacktrace) do
{:ok, build_error_event(reason, stacktrace, type)}
end
defp get_error_event({error, stacktrace}) when is_list(stacktrace) do
{:ok, build_error_event(error, stacktrace, :error)}
end
defp get_error_event(_) do
{:error, :no_info}
end
defp build_error_event(%{__exception__: true} = error, stacktrace, _type) do
ErrorEvent.from_exception(error)
|> ErrorEvent.add_backtrace(stacktrace)
end
defp build_error_event(error, stacktrace, _type) do
ErlangError.normalize(error, stacktrace)
|> ErrorEvent.from_exception()
|> ErrorEvent.add_backtrace(stacktrace)
end
end
|
lib/timber_exceptions/translator.ex
| 0.784773 | 0.72431 |
translator.ex
|
starcoder
|
defmodule ExlasticSearch.Model do
@moduledoc """
Base macro for generating elasticsearch modules. Is intended to be used in conjunction with a
Ecto model (although that is not strictly necessary).
It includes three primary macros:
* `indexes/2`
* `settings/1`
* `mapping/2`
The usage is something like this
```
indexes :my_type do
settings Application.get_env(:some, :settings)
mapping :column
mapping :other_column, type: :keyword
end
```
This will set up settings and mappings for index my_types with type my_type (specify the singularized
type in the macro, so pluralization works naturally).
The following functions will also be created:
* `__es_mappings__/0` - map of all fully specified mappings for the given type
* `__mappings__/0` - columns with mappings for the given type
* `__es_index__/0` - the elasticsearch index for this model
* `__es_index__/1` - the elasticsearch index for reads/writes when performing zero-downtime updates
(pass either `:read` or `:index` respectively)
* `__doc_type__/0` - the default document type for searches in __es_index__()
* `__es_settings__/0` - the settings for the index of this model
"""
@type_inference Application.get_env(:exlasticsearch, :type_inference)
defmacro __using__(_) do
quote do
import ExlasticSearch.Model
import Ecto.Query, only: [from: 2]
@es_query %ExlasticSearch.Query{
queryable: __MODULE__,
index_type: Keyword.get(Application.get_env(:exlasticsearch, __MODULE__, []), :index_type, :read)
}
@mapping_options %{}
def es_type(column), do: __schema__(:type, column) |> ecto_to_es()
def search_query(), do: @es_query
def indexing_query(query \\ __MODULE__) do
Ecto.Query.from(r in query, order_by: [asc: :id])
end
defoverridable [indexing_query: 0, indexing_query: 1]
end
end
@doc """
Opens up index definition for the current model. Will name the index and generate metadata
attributes for the index based on subsequent calls to `settings/1` and `mappings/2`.
Accepts
* `type` - the indexes type (and index name will be `type <> "s"`)
* `block` - the definition of the index
"""
defmacro indexes(type, block) do
quote do
Module.register_attribute(__MODULE__, :es_mappings, accumulate: true)
@read_version :ignore
@index_version :ignore
def __doc_type__(), do: unquote(type)
unquote(block)
def __es_index__(type \\ :read)
def __es_index__(:read), do: index_version(unquote(type), @read_version)
def __es_index__(:index), do: index_version(unquote(type), @index_version)
def __es_index__(:delete), do: __es_index__(:read)
def __es_index__(_), do: __es_index__(:read)
def __es_mappings__() do
@mapping_options
|> Map.put(:properties, @es_mappings
|> Enum.into(%{}, fn {key, value} ->
{key, value |> Enum.into(%{type: es_type(key)})}
end))
end
@es_mapped_cols @es_mappings |> Enum.map(&elem(&1, 0))
@es_decode_template @es_mappings
|> Enum.map(fn {k, v} -> {k, Map.new(v)} end)
|> Enum.map(&ExlasticSearch.Model.mapping_template/1)
def __mappings__(), do: @es_mapped_cols
def __mapping_options__(), do: @mapping_options
def __es_decode_template__(), do: @es_decode_template
def es_decode(map) when is_map(map), do: struct(__MODULE__.SearchResult, es_decode(map, __MODULE__))
def es_decode(_), do: nil
@after_compile ExlasticSearch.Model
end
end
defmacro __after_compile__(_, _) do
quote do
use ExlasticSearch.Model.SearchResult
end
end
defmodule SearchResult do
@moduledoc """
Wrapper for a models search result. Used for response parsing
"""
defmacro __using__(_) do
columns = __CALLER__.module.__mappings__()
quote do
defmodule SearchResult do
defstruct unquote(columns)
end
end
end
end
@doc """
Adds a new mapping to the ES schema. The type of the mapping will be inferred automatically, unless explictly set
in props.
Accepts:
* `name` - the name of the mapping
* `props` - is a map/kw list of ES mapping configuration (e.g. `search_analyzer: "my_search_analyzer", type: "text"`)
"""
defmacro mapping(name, props \\ []) do
quote do
ExlasticSearch.Model.__mapping__(__MODULE__, unquote(name), unquote(props))
end
end
@doc """
A map of index settings. Structure is the same as specified by ES.
"""
defmacro settings(settings) do
quote do
def __es_settings__(), do: %{settings: unquote(settings)}
end
end
defmacro options(options) do
quote do
@mapping_options unquote(options)
end
end
defmacro versions({index, read}) do
quote do
@read_version unquote(read)
@index_version unquote(index)
end
end
defmacro versions(index) do
quote do
@read_version unquote(index)
@index_version unquote(index)
end
end
def __mapping__(mod, name, properties) do
Module.put_attribute(mod, :es_mappings, {name, properties})
end
@doc """
Converts a search result to `model`'s search result type
"""
def es_decode(source, model) do
model.__es_decode_template__()
|> do_decode(source)
end
def index_version(type), do: "#{type}s"
def index_version(type, :ignore), do: index_version(type)
def index_version(type, version), do: "#{type}s#{version}"
def mapping_template({name, %{properties: properties}}), do: {Atom.to_string(name), name, Enum.map(properties, &mapping_template/1)}
def mapping_template({name, _}), do: {Atom.to_string(name), name, :preserve}
def ecto_to_es(type), do: @type_inference.infer(type)
defp do_decode(template, source) when is_map(source) do
template
|> Enum.map(fn
{key, atom_key, :preserve} -> {atom_key, Map.get(source, key)}
{key, atom_key, template} -> {atom_key, do_decode(template, Map.get(source, key))}
end)
|> Enum.into(%{})
end
defp do_decode(template, source) when is_list(source) do
Enum.map(source, &do_decode(template, &1))
end
defp do_decode(_, _), do: nil
end
|
lib/exlasticsearch/model.ex
| 0.855535 | 0.805594 |
model.ex
|
starcoder
|
defmodule Ecto.Adapters.MySQL do
@moduledoc """
Adapter module for MySQL.
It handles and pools the connections to the MySQL
database using `mariaex` and a connection pool,
such as `poolboy`.
## Options
MySQL options split in different categories described
below. All options should be given via the repository
configuration. These options are also passed to the module
specified in the `:pool` option, so check that module's
documentation for more options.
### Compile time options
Those options should be set in the config file and require
recompilation in order to make an effect.
* `:adapter` - The adapter name, in this case, `Ecto.Adapters.MySQL`
* `:pool` - The connection pool module, defaults to `DBConnection.Poolboy`
* `:pool_timeout` - The default timeout to use on pool calls, defaults to `5000`
* `:timeout` - The default timeout to use on queries, defaults to `15000`
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 3306)
* `:username` - Username
* `:password` - <PASSWORD>
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
* `:parameters` - Keyword list of connection parameters
* `:connect_timeout` - The timeout for establishing new connections (default: 5000)
* `:socket_options` - Specifies socket configuration
The `:socket_options` are particularly useful when configuring the size
of both send and receive buffers. For example, when Ecto starts with a
pool of 20 connections, the memory usage may quickly grow from 20MB to
50MB based on the operating system default values for TCP buffers. It is
advised to stick with the operating system defaults but they can be
tweaked if desired:
socket_options: [recbuf: 8192, sndbuf: 8192]
We also recommend developers to consult the
[Mariaex documentation](https://hexdocs.pm/mariaex/Mariaex.html#start_link/1)
for a complete listing of all supported options.
### Storage options
* `:charset` - the database encoding (default: "utf8")
* `:collation` - the collation order
* `:dump_path` - where to place dumped structures
## Limitations
There are some limitations when using Ecto with MySQL that one
needs to be aware of.
### Engine
Since Ecto uses transactions, MySQL users running old versions
(5.1 and before) must ensure their tables use the InnoDB engine
as the default (MyISAM) does not support transactions.
Tables created by Ecto are guaranteed to use InnoDB, regardless
of the MySQL version.
### UUIDs
MySQL does not support UUID types. Ecto emulates them by using
`binary(16)`.
### Read after writes
Because MySQL does not support RETURNING clauses in INSERT and
UPDATE, it does not support the `:read_after_writes` option of
`Ecto.Schema.field/3`.
### DDL Transaction
MySQL does not support migrations inside transactions as it
automatically commits after some commands like CREATE TABLE.
Therefore MySQL migrations does not run inside transactions.
### usec in datetime
Old MySQL versions did not support usec in datetime while
more recent versions would round or truncate the usec value.
Therefore, in case the user decides to use microseconds in
datetimes and timestamps with MySQL, be aware of such
differences and consult the documentation for your MySQL
version.
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL, :mariaex
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
@behaviour Ecto.Adapter.Structure
## Custom MySQL types
@doc false
def loaders(:map, type), do: [&json_decode/1, type]
def loaders({:map, type}, _), do: [&json_decode/1, type]
def loaders(:boolean, type), do: [&bool_decode/1, type]
def loaders(:binary_id, type), do: [Ecto.UUID, type]
def loaders({:embed, _} = type, _), do: [&json_decode/1, &Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(_, type), do: [type]
defp bool_decode(<<0>>), do: {:ok, false}
defp bool_decode(<<1>>), do: {:ok, true}
defp bool_decode(0), do: {:ok, false}
defp bool_decode(1), do: {:ok, true}
defp bool_decode(x), do: {:ok, x}
defp json_decode(x) when is_binary(x),
do: {:ok, Application.get_env(:ecto, :json_library).decode!(x)}
defp json_decode(x),
do: {:ok, x}
## Storage API
@doc false
def storage_up(opts) do
database = Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
charset = opts[:charset] || "utf8"
command =
~s(CREATE DATABASE `#{database}` DEFAULT CHARACTER SET = #{charset})
|> concat_if(opts[:collation], &"DEFAULT COLLATE = #{&1}")
{output, status} = run_with_mysql command, opts
cond do
status == 0 -> :ok
String.contains?(output, "database exists") -> {:error, :already_up}
true -> {:error, output}
end
end
defp concat_if(content, nil, _fun), do: content
defp concat_if(content, value, fun), do: content <> " " <> fun.(value)
@doc false
def storage_down(opts) do
database = Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
{output, status} = run_with_mysql("DROP DATABASE `#{database}`", opts)
cond do
status == 0 -> :ok
String.contains?(output, "doesn't exist") -> {:error, :already_down}
true -> {:error, output}
end
end
defp run_with_mysql(sql_command, opts) do
args = ["--silent", "--execute", sql_command]
run_with_cmd("mysql", opts, args)
end
@doc false
def supports_ddl_transaction? do
true
end
@doc false
def insert(repo, %{source: {prefix, source}, autogenerate_id: {key, :id}}, params, [key], opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [fields], [])
case Ecto.Adapters.SQL.query(repo, sql, values, opts) do
{:ok, %{num_rows: 1, last_insert_id: last_insert_id}} ->
{:ok, [{key, last_insert_id}]}
{:error, err} ->
case @conn.to_constraints(err) do
[] -> raise err
constraints -> {:invalid, constraints}
end
end
end
def insert(repo, schema_meta, params, [], opts) do
super(repo, schema_meta, params, [], opts)
end
def insert(_repo, %{schema: schema}, _params, returning, _opts) do
raise ArgumentError, "MySQL does not support :read_after_writes in schemas for non-primary keys. " <>
"The following fields in #{inspect schema} are tagged as such: #{inspect returning}"
end
@doc false
def structure_dump(default, config) do
table = config[:migration_source] || "schema_migrations"
path = config[:dump_path] || Path.join(default, "structure.sql")
with {:ok, versions} <- select_versions(table, config),
{:ok, contents} <- mysql_dump(config),
{:ok, contents} <- append_versions(table, versions, contents) do
File.mkdir_p!(Path.dirname(path))
File.write!(path, contents)
{:ok, path}
end
end
defp select_versions(table, config) do
case run_query(~s[SELECT version FROM `#{table}` ORDER BY version], config) do
{:ok, %{rows: rows}} -> {:ok, Enum.map(rows, &hd/1)}
{:error, %{mariadb: %{code: 1146}}} -> {:ok, []}
{:error, _} = error -> error
end
end
defp mysql_dump(config) do
case run_with_cmd("mysqldump", config, ["--no-data", "--routines", config[:database]]) do
{output, 0} -> {:ok, output}
{output, _} -> {:error, output}
end
end
defp append_versions(_table, [], contents) do
{:ok, contents}
end
defp append_versions(table, versions, contents) do
{:ok,
contents <>
~s[INSERT INTO `#{table}` (version) VALUES ] <>
Enum.map_join(versions, ", ", &"(#{&1})") <>
~s[;\n\n]}
end
@doc false
def structure_load(default, config) do
path = config[:dump_path] || Path.join(default, "structure.sql")
args = [
"--execute", "SET FOREIGN_KEY_CHECKS = 0; SOURCE #{path}; SET FOREIGN_KEY_CHECKS = 1",
"--database", config[:database]
]
case run_with_cmd("mysql", config, args) do
{_output, 0} -> {:ok, path}
{output, _} -> {:error, output}
end
end
defp run_query(sql, opts) do
{:ok, _} = Application.ensure_all_started(:mariaex)
opts =
opts
|> Keyword.delete(:name)
|> Keyword.put(:pool, DBConnection.Connection)
|> Keyword.put(:backoff_type, :stop)
{:ok, pid} = Task.Supervisor.start_link
task = Task.Supervisor.async_nolink(pid, fn ->
{:ok, conn} = Mariaex.start_link(opts)
value = Ecto.Adapters.MySQL.Connection.execute(conn, sql, [], opts)
GenServer.stop(conn)
value
end)
timeout = Keyword.get(opts, :timeout, 15_000)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, result}} ->
{:ok, result}
{:ok, {:error, error}} ->
{:error, error}
{:exit, {%{__struct__: struct} = error, _}}
when struct in [Mariaex.Error, DBConnection.Error] ->
{:error, error}
{:exit, reason} ->
{:error, RuntimeError.exception(Exception.format_exit(reason))}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
defp run_with_cmd(cmd, opts, opt_args) do
unless System.find_executable(cmd) do
raise "could not find executable `#{cmd}` in path, " <>
"please guarantee it is available before running ecto commands"
end
env =
if password = opts[:password] do
[{"MYSQL_PWD", password}]
else
[]
end
host = opts[:hostname] || System.get_env("MYSQL_HOST") || "localhost"
port = opts[:port] || System.get_env("MYSQL_TCP_PORT") || "3306"
args = ["--user", opts[:username], "--host", host, "--port", to_string(port)] ++ opt_args
System.cmd(cmd, args, env: env, stderr_to_stdout: true)
end
end
|
deps/ecto/lib/ecto/adapters/mysql.ex
| 0.809728 | 0.621196 |
mysql.ex
|
starcoder
|
defmodule Phoenix.Token do
@moduledoc """
Tokens provide a way to generate and verify bearer
tokens for use in Channels or API authentication.
The data stored in the token is signed to prevent tampering
but not encrypted. This means it is safe to store identification
information (such as user IDs) but should not be used to store
confidential information (such as credit card numbers).
## Example
When generating a unique token for use in an API or Channel
it is advised to use a unique identifier for the user, typically
the id from a database. For example:
iex> user_id = 1
iex> token = Phoenix.Token.sign(MyApp.Endpoint, "user salt", user_id)
iex> Phoenix.Token.verify(MyApp.Endpoint, "user salt", token, max_age: 86400)
{:ok, 1}
In that example we have a user's id, we generate a token and
verify it using the secret key base configured in the given
`endpoint`. We guarantee the token will only be valid for one day
by setting a max age (recommended).
The first argument to both `sign/4` and `verify/4` can be one of:
* the module name of a Phoenix endpoint (shown above) - where
the secret key base is extracted from the endpoint
* `Plug.Conn` - where the secret key base is extracted from the
endpoint stored in the connection
* `Phoenix.Socket` - where the secret key base is extracted from
the endpoint stored in the socket
* a string, representing the secret key base itself. A key base
with at least 20 randomly generated characters should be used
to provide adequate entropy.
The second argument is a [cryptographic salt](https://en.wikipedia.org/wiki/Salt_(cryptography))
which must be the same in both calls to `sign/4` and `verify/4`.
For instance, it may be called "user auth" when generating a token
that will be used to authenticate users on channels or on your APIs.
The third argument can be any term (string, int, list, etc.)
that you wish to codify into the token. Upon valid verification,
this same term will be extracted from the token.
## Usage
Once a token is signed, we can send it to the client in multiple ways.
One is via the meta tag:
<%= tag :meta, name: "channel_token",
content: Phoenix.Token.sign(@conn, "user salt", @current_user.id) %>
Or an endpoint that returns it:
def create(conn, params) do
user = User.create(params)
render(conn, "user.json",
%{token: Phoenix.Token.sign(conn, "user salt", user.id), user: user})
end
Once the token is sent, the client may now send it back to the server
as an authentication mechanism. For example, we can use it to authenticate
a user on a Phoenix channel:
defmodule MyApp.UserSocket do
use Phoenix.Socket
def connect(%{"token" => token}, socket) do
case Phoenix.Token.verify(socket, "user salt", token, max_age: 86400) do
{:ok, user_id} ->
socket = assign(socket, :user, Repo.get!(User, user_id))
{:ok, socket}
{:error, _} ->
:error
end
end
end
In this example, the phoenix.js client will send the token in the
`connect` command which is then validated by the server.
`Phoenix.Token` can also be used for validating APIs, handling
password resets, e-mail confirmation and more.
"""
require Logger
alias Plug.Crypto.KeyGenerator
alias Plug.Crypto.MessageVerifier
@doc """
Encodes data and signs it resulting in a token you can send to clients.
## Options
* `:key_iterations` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 1000
* `:key_length` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 32
* `:key_digest` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to `:sha256`
* `:signed_at` - set the timestamp of the token in seconds.
Defaults to `System.system_time(:second)`
"""
def sign(context, salt, data, opts \\ []) when is_binary(salt) do
{signed_at_seconds, key_opts} = Keyword.pop(opts, :signed_at)
signed_at_ms = if signed_at_seconds, do: trunc(signed_at_seconds * 1000), else: now_ms()
secret = get_key_base(context) |> get_secret(salt, key_opts)
%{data: data, signed: signed_at_ms}
|> :erlang.term_to_binary()
|> MessageVerifier.sign(secret)
end
@doc """
Decodes the original data from the token and verifies its integrity.
## Examples
In this scenario we will create a token, sign it, then provide it to a client
application. The client will then use this token to authenticate requests for
resources from the server. (See `Phoenix.Token` summary for more info about
creating tokens.)
iex> user_id = 99
iex> secret = "<KEY>"
iex> user_salt = "<PASSWORD>"
iex> token = Phoenix.Token.sign(secret, user_salt, user_id)
The mechanism for passing the token to the client is typically through a
cookie, a JSON response body, or HTTP header. For now, assume the client has
received a token it can use to validate requests for protected resources.
When the server receives a request, it can use `verify/4` to determine if it
should provide the requested resources to the client:
iex> Phoenix.Token.verify(secret, user_salt, token, max_age: 86400)
{:ok, 99}
In this example, we know the client sent a valid token because `verify/4`
returned a tuple of type `{:ok, user_id}`. The server can now proceed with
the request.
However, if the client had sent an expired or otherwise invalid token
`verify/4` would have returned an error instead:
iex> Phoenix.Token.verify(secret, user_salt, expired, max_age: 86400)
{:error, :expired}
iex> Phoenix.Token.verify(secret, user_salt, invalid, max_age: 86400)
{:error, :invalid}
## Options
* `:max_age` - verifies the token only if it has been generated
"max age" ago in seconds. A reasonable value is 1 day (`86400`
seconds)
* `:key_iterations` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 1000
* `:key_length` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to 32
* `:key_digest` - option passed to `Plug.Crypto.KeyGenerator`
when generating the encryption and signing keys. Defaults to `:sha256`
"""
def verify(context, salt, token, opts \\ [])
def verify(context, salt, token, opts) when is_binary(salt) and is_binary(token) do
secret = context |> get_key_base() |> get_secret(salt, opts)
case MessageVerifier.verify(token, secret) do
{:ok, message} ->
%{data: data, signed: signed} = Plug.Crypto.safe_binary_to_term(message)
if expired?(signed, opts[:max_age]) do
{:error, :expired}
else
{:ok, data}
end
:error ->
{:error, :invalid}
end
end
def verify(_context, salt, nil, _opts) when is_binary(salt) do
{:error, :missing}
end
defp get_key_base(%Plug.Conn{} = conn),
do: conn |> Phoenix.Controller.endpoint_module() |> get_endpoint_key_base()
defp get_key_base(%Phoenix.Socket{} = socket),
do: get_endpoint_key_base(socket.endpoint)
defp get_key_base(endpoint) when is_atom(endpoint),
do: get_endpoint_key_base(endpoint)
defp get_key_base(string) when is_binary(string) and byte_size(string) >= 20,
do: string
defp get_endpoint_key_base(endpoint) do
endpoint.config(:secret_key_base) || raise """
no :secret_key_base configuration found in #{inspect endpoint}.
Ensure your environment has the necessary mix configuration. For example:
config :my_app, MyApp.Endpoint,
secret_key_base: ...
"""
end
# Gathers configuration and generates the key secrets and signing secrets.
defp get_secret(secret_key_base, salt, opts) do
iterations = Keyword.get(opts, :key_iterations, 1000)
length = Keyword.get(opts, :key_length, 32)
digest = Keyword.get(opts, :key_digest, :sha256)
key_opts = [iterations: iterations,
length: length,
digest: digest,
cache: Plug.Keys]
KeyGenerator.generate(secret_key_base, salt, key_opts)
end
defp expired?(_signed, :infinity), do: false
defp expired?(_signed, nil) do
# TODO v2: Default to 86400 on future releases.
Logger.warn ":max_age was not set on Phoenix.Token.verify/4. " <>
"A max_age is recommended otherwise tokens are forever valid. " <>
"Please set it to the amount of seconds the token is valid, " <>
"such as 86400 (1 day), or :infinity if you really want this token to be valid forever"
false
end
defp expired?(signed, max_age_secs), do: (signed + trunc(max_age_secs * 1000)) < now_ms()
defp now_ms, do: System.system_time(:millisecond)
end
|
lib/phoenix/token.ex
| 0.913266 | 0.625896 |
token.ex
|
starcoder
|
defmodule Membrane.AAC.Filler do
@moduledoc """
Element that fills gaps in AAC stream with silent frames.
"""
use Membrane.Filter
import Membrane.Caps.Matcher, only: [one_of: 1]
alias Membrane.{Buffer, Time}
# Silence frame per channel configuration
@silent_frames %{
1 => <<222, 2, 0, 76, 97, 118, 99, 53, 56, 46, 53, 52, 46, 49, 48, 48, 0, 2, 48, 64, 14>>,
2 =>
<<255, 241, 80, 128, 3, 223, 252, 222, 2, 0, 76, 97, 118, 99, 53, 56, 46, 57, 49, 46, 49,
48, 48, 0, 66, 32, 8, 193, 24, 56>>
}
@caps {Membrane.AAC, profile: :LC, channels: one_of([1, 2])}
def_input_pad :input, demand_unit: :buffers, caps: @caps
def_output_pad :output, caps: @caps
defmodule State do
@moduledoc false
# Membrane normalizes timestamps and stream always starts with timestamp 0.
@initial_timestamp 0
@default_channels 1
@type t :: %__MODULE__{
frame_duration: Membrane.Time.t(),
channels: non_neg_integer(),
expected_timestamp: non_neg_integer()
}
@enforce_keys [:frame_duration]
defstruct [expected_timestamp: @initial_timestamp, channels: @default_channels] ++
@enforce_keys
end
@doc """
Returns a silent AAC frame that this element uses to fill gaps in the stream.
"""
@spec silent_frame(integer()) :: binary()
def silent_frame(channels), do: Map.fetch!(@silent_frames, channels)
@impl true
def handle_init(_opts) do
{:ok, %State{frame_duration: nil}}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_caps(:input, caps, _ctx, state) do
new_duration = caps.samples_per_frame / caps.sample_rate * Time.second()
state = %State{state | frame_duration: new_duration, channels: caps.channels}
{{:ok, forward: caps}, state}
end
@impl true
def handle_process(:input, buffer, _ctx, state) do
use Ratio, comparison: true
%{timestamp: current_timestamp} = buffer.metadata
%{expected_timestamp: expected_timestamp, frame_duration: frame_duration} = state
expected_timestamp = expected_timestamp || current_timestamp
silent_frames_timestamps =
Stream.iterate(expected_timestamp, &(&1 + frame_duration))
|> Enum.take_while(&silent_frame_needed?(&1, current_timestamp, frame_duration))
silent_frame_payload = silent_frame(state.channels)
buffers =
Enum.map(silent_frames_timestamps, fn timestamp ->
%Buffer{buffer | payload: silent_frame_payload}
|> Bunch.Struct.put_in([:metadata, :timestamp], timestamp)
end) ++ [buffer]
expected_timestamp = expected_timestamp + length(buffers) * frame_duration
{{:ok, buffer: {:output, buffers}}, %{state | expected_timestamp: expected_timestamp}}
end
defp silent_frame_needed?(expected_timestamp, current_timestamp, frame_duration) do
use Ratio, comparison: true
current_timestamp - expected_timestamp > frame_duration / 2
end
end
|
lib/membrane/aac/filler.ex
| 0.852506 | 0.457076 |
filler.ex
|
starcoder
|
defmodule Octopod do
@moduledoc """
This module is the lower-level API for the library. To use the higher-level
API, see pyctopod.ex.
The way this library works is this:
1. Your elixir application starts a python instance that runs whatever python code
2. That python code will want to send files to other running python instances
3. The python code calls into this library (perhaps through a wrapper lib), 'send(fpath, topic)'
4. This library finds the file specified and sends it (perhaps with compression) to anyone listening to 'topic'
5. The listening servers receive the file, deserialize it, and hand it over to the python process that they are running
"""
use GenServer
alias Octopod.Export
# Client API
@doc """
Starts the python process as a GenServer and returns the pid. Pass
this pid into the other functions in this module to use it.
## Examples
iex> {:ok, pid} = Octopod.start()
iex> is_pid(pid)
true
"""
def start(pyargs \\ []) do
start_link(pyargs)
end
@doc """
Starts the python process as a GenServer and returns the pid. Just
like start/1, but also registers any handler with the process.
Convenience function for:
```elixir
{:ok, pid} = Octopod.start(pyargs)
answer = Octopod.call(pid, mod, :register_handler, [self()])
{answer, pid}
```
This means that `mod` must have a `register_handler()` function.
## Examples
iex> privpath = [:code.priv_dir(:octopod), "test"] |> Path.join() |> to_charlist()
iex> {:ok, pid} = Octopod.start_cast(:test, [{:cd, privpath}])
iex> is_pid(pid)
true
iex> privpath = [:code.priv_dir(:octopod), "pyctopod"] |> Path.join() |> to_charlist()
iex> {:ok, pid} = Octopod.start_cast(:pyctopod, [{:cd, privpath}])
iex> is_pid(pid)
true
"""
def start_cast(mod, pyargs \\ [], msgbox_pid \\ nil) do
msgbox_pid = if (msgbox_pid == nil), do: self(), else: msgbox_pid
{:ok, pid} = start_link(pyargs)
:undefined = Octopod.call(pid, mod, :register_handler, [msgbox_pid])
{:ok, pid}
end
@doc """
Call this to terminate a running python process.
## Examples
iex> {:ok, pid} = Octopod.start()
iex> Octopod.stop(pid)
:ok
"""
def stop(pypid) do
GenServer.stop(pypid, :normal)
end
@doc """
Executes `mod.func(args)` synchronously in the python context.
## Examples
iex> {:ok, pypid} = Octopod.start()
iex> Octopod.call(pypid, :operator, :add, [2, 3])
5
"""
def call(pypid, mod, func, args) do
GenServer.call(pypid, {mod, func, args}, :infinity)
end
@doc """
Passes `msg` to the module registered with `start_cast`. You must use
`start_cast/2` to get `pypid` and the module registered with `start_cast/2`
must have a message handler that can handle the type of message being passed.
## Examples
iex> privpath = [:code.priv_dir(:octopod), "test"] |> Path.join() |> to_charlist()
iex> {:ok, pid} = Octopod.start_cast(:test, [{:cd, privpath}])
iex> :ok = Octopod.cast(pid, "hello")
iex> receive do
...> {:ok, "hello FROM PYTHON!"} -> :ok
...> _ -> :err
...> after
...> 3_000 -> :err_timeout
...> end
:ok
"""
def cast(pypid, msg) do
GenServer.cast(pypid, msg)
end
# Helper Functions
defp start_link(pyargs) do
GenServer.start_link(__MODULE__, pyargs)
end
# Server Callbacks
def init(pyargs) do
session = Export.start(pyargs)
{:ok, session}
end
def handle_call({mod, func, args}, _from, session) do
result = Export.call(session, mod, func, args)
{:reply, result, session}
end
def handle_cast(msg, session) do
Export.cast(session, msg)
{:noreply, session}
end
def handle_info({:python, message}, session) do
IO.puts("Received message from python: #{inspect message}")
{:stop, :normal, session}
end
def terminate(_reason, session) do
Export.stop(session)
:ok
end
end
|
legacy/artie/apps/octopod/lib/octopod.ex
| 0.858644 | 0.692915 |
octopod.ex
|
starcoder
|
defmodule Discord.SortedSet.Types do
@moduledoc """
This module provides common types that can be used in any part of the SortedSet library.
"""
@typedoc """
SortedSets are stored in the NIF's memory space, constructing an operating on the SortedSet is
done through a reference that uniquely identifies the SortedSet in NIF space
"""
@type sorted_set :: reference()
@typedoc """
There are common errors that can be returned from any SortedSet operation, the common_errors
type enumerates them.
`{:error, :bad_reference}` is returned any time a reference is passed to the NIF but that
reference does not identify a SortedSet.
`{:error, :lock_fail}` is returned when the NIF can not guarantee concurrency safety. NIFs are
not bound by the same guarantees as Erlang / Elixir code executing in the BEAM VM, to safe guard
against multiple threads of execution mutating the same SortedSet concurrently a Mutex is used
internally to lock the data structure during all operations.
`{:error, :unsupported_type}` is returned any time an item is passed to the SortedSet that is
either in whole or in part an unsupported type. The following types are not supported in
SortedSet, Reference, Function, Port, and Pid. Unsupported types poison other types, so a list
containing a single element (regardless of nesting) of an unsupported type is unsupported, same
for tuples.
"""
@type common_errors ::
{:error, :bad_reference} | {:error, :lock_fail} | {:error, :unsupported_type}
@typedoc """
Success responses returned from the NIF when adding an element to the set.
`{:ok, :added, index :: integer()}` is returned by the NIF to indicate that the add was executed
successfully and a new element was inserted into the SortedSet at the specified index.
`{:ok, :duplicate, index :: integer()}` is returned by the NIF to indicate that the add was
executed successfully but the element already existed within the SortedSet, the index of the
existing element is returned.
The NIF provides more detailed but less conventional return values, these are coerced in the
`SortedSet` module to more conventional responses. Due to how the NIF is implemented there is
no distinction in NIF space between `add` and `index_add`, these more detailed response values
allow the Elixir wrapper to implement both with the same underlying mechanism
"""
@type nif_add_result ::
{:ok, :added, index :: integer()} | {:ok, :duplicate, index :: integer()}
@typedoc """
Response returned from the NIF when appending a bucket.
`:ok` is returned by the NIF to indicate that the bucket was appended.
`{:error, :max_bucket_size_exceeded}` is returned by the NIF to indicate that the list of terms
passed in meets or exceeds the max_bucket_size of the set.
"""
@type nif_append_bucket_result :: :ok | {:error, :max_bucket_size_exceeded}
@typedoc """
Response returned from the NIF when selecting an element at a given index
`{:ok, element :: any()}` is returned by the NIF to indicate that the index was in bounds and an
element was found at the given index
`{:error, :index_out_of_bounds}` is returned by the NIF to indicate that the index was not
within the bounds of the SortedSet.
The NIF provides more detailed by less conventional return values, these are coerced in the
`SortedSet` module to more conventional responses. Specifically in the case of `at/3` it is a
common pattern to allow the caller to define a default value for when the element is not found,
there is no need to pay the penalty of copying this default value into and back out of NIF
space.
"""
@type nif_at_result :: {:ok, element :: any()} | {:error, :index_out_of_bounds}
@typedoc """
Responses returned from the NIF when finding an element in the set
`{:ok, index :: integer()}` is returned by the NIF to indicate that the element was found at the
specified index
`{:error, :not_found}` is returned by the NIF to indicate that the element was not found
"""
@type nif_find_result :: {:ok, index :: integer()} | {:error, :not_found}
@typedoc """
Responses returned from the NIF when removing an element in the set
`{:ok, :removed, index :: integer()}` is returned by the NIF to indicate that the remove was
executed successfully and the element has been removed from the set. In addition it returns the
index that the element was found out prior to removal.
`{:error, :not_found}` is returned by the NIF to indicate that the remove was executed
successfully, but the specified element was not present in the SortedSet.
The NIF provides more detailed but less conventional return values, these are coerced in the
`SortedSet` module to more conventional responses. Due to how the NIF is implemented there is
no distinction in NIF space between `remove` and `index_remove`, these more detailed response
values allow the Elixir wrapper to implement both with the same underlying mechanism
"""
@type nif_remove_result :: {:ok, :removed, index :: integer()} | {:error, :not_found}
@typedoc """
Only a subset of Elixir types are supported by the nif, the semantic type `supported_term` can
be used as a shorthand for terms of these supported types.
"""
@type supported_term :: integer() | atom() | tuple() | list() | String.t()
end
|
lib/sorted_set/types.ex
| 0.910739 | 0.687761 |
types.ex
|
starcoder
|
defmodule Tonic do
@moduledoc """
A DSL for conveniently loading binary data/files.
The DSL is designed to closely represent the structure of the actual binary data
layout. So it aims to be easy to read, and easy to change.
The DSL defines functionality to represent types, endianness, groups, chunks,
repeated data, branches, optional segments. Where the majority of these functions
can be further extended to customize the behaviour and result.
The default behaviour of these operations is to remove the data that was read from the
current binary data, and append the value to the current block. Values by default are
in the form of a tagged value if a name is supplied `{ :name, value }`, otherwise are
simply `value` if no name is supplied. The default return value behaviour can be
overridden by passing in a function.
The most common types are defined in `Tonic.Types` for convenience. These are
common integer and floating point types, and strings. The behaviour of types can
further be customized when used otherwise new types can be defined using the `type/2`
function.
To use the DSL, call `use Tonic` in your module and include any additional type modules
you may require. Then you are free to write the DSL directly inside the module. Certain
options may be passed to the library on `use`, to indicate additional behaviours. The
currently supported options are:
`optimize:` which can be passed `true` to enable all optimizations, or a keyword list
enabling the specific optimizations. Enabling optimizations may make debugging issues
trickier, so best practice is to enable after it's been tested. Current specific
optimizations include:
:reduce #Enables the code reduction optimization, so the generated code is reduced as much as possible.
Example
-------
defmodule PNG do
use Tonic, optimize: true
endian :big
repeat :magic, 8, :uint8
repeat :chunks do
uint32 :length
string :type, length: 4
chunk get(:length) do
on get(:type) do
"IHDR" ->
uint32 :width
uint32 :height
uint8 :bit_depth
uint8 :colour_type
uint8 :compression_type
uint8 :filter_method
uint8 :interlace_method
"gAMA" ->
uint32 :gamma, fn { name, value } -> { name, value / 100000 } end
"cHRM" ->
group :white_point do
uint32 :x, fn { name, value } -> { name, value / 100000 } end
uint32 :y, fn { name, value } -> { name, value / 100000 } end
end
group :red do
uint32 :x, fn { name, value } -> { name, value / 100000 } end
uint32 :y, fn { name, value } -> { name, value / 100000 } end
end
group :green do
uint32 :x, fn { name, value } -> { name, value / 100000 } end
uint32 :y, fn { name, value } -> { name, value / 100000 } end
end
group :blue do
uint32 :x, fn { name, value } -> { name, value / 100000 } end
uint32 :y, fn { name, value } -> { name, value / 100000 } end
end
"iTXt" ->
string :keyword, ?\\0
string :text
_ -> repeat :uint8
end
end
uint32 :crc
end
end
\#Example load result:
\#{{:magic, [137, 80, 78, 71, 13, 10, 26, 10]},
\# {:chunks,
\# [{{:length, 13}, {:type, "IHDR"}, {:width, 48}, {:height, 40},
\# {:bit_depth, 8}, {:colour_type, 6}, {:compression_type, 0},
\# {:filter_method, 0}, {:interlace_method, 0}, {:crc, 3095886193}},
\# {{:length, 4}, {:type, "gAMA"}, {:gamma, 0.45455}, {:crc, 201089285}},
\# {{:length, 32}, {:type, "cHRM"}, {:white_point, {:x, 0.3127}, {:y, 0.329}},
\# {:red, {:x, 0.64}, {:y, 0.33}}, {:green, {:x, 0.3}, {:y, 0.6}},
\# {:blue, {:x, 0.15}, {:y, 0.06}}, {:crc, 2629456188}},
\# {{:length, 345}, {:type, "iTXt"}, {:keyword, "XML:com.adobe.xmp"},
\# {:text,
\# <<0, 0, 0, 0, 60, 120, 58, 120, 109, 112, 109, 101, 116, 97, 32, 120, 109, 108, 110, 115, 58, 120, 61, 34, 97, 100, 111, 98, 101, 58, 110, 115, 58, 109, 101, 116, 97, 47, 34, ...>>},
\# {:crc, 1287792473}},
\# {{:length, 1638}, {:type, "IDAT"},
\# [88, 9, 237, 216, 73, 143, 85, 69, 24, 198, 241, 11, 125, 26, 68, 148, 25,
\# 109, 4, 154, 102, 114, 192, 149, 70, 137, 137, 209, 152, 152, 24, 19, 190,
\# 131, 75, 22, 234, 55, 224, 59, ...], {:crc, 2269121590}},
\# {{:length, 0}, {:type, "IEND"}, [], {:crc, 2923585666}}]}}
"""
@type block(body) :: [do: body]
@type callback :: ({ any, any } -> any)
@type ast :: Macro.t
@type endianness :: :little | :big | :native
@type signedness :: :signed | :unsigned
@type length :: non_neg_integer | (list -> boolean)
@doc false
defp optimize_flags(flag, options) when is_list(options), do: options[flag] == true
defp optimize_flags(_flag, options), do: options == true
defmacro __using__(options) do
quote do
import Tonic
import Tonic.Types
@before_compile unquote(__MODULE__)
@tonic_current_scheme :load
@tonic_previous_scheme []
@tonic_data_scheme Map.put(%{}, @tonic_current_scheme, [])
@tonic_unique_function_id 0
@tonic_enable_optimization [
reduce: unquote(optimize_flags(:reduce, options[:optimize]))
]
end
end
@doc false
defp op_name({ _, name }), do: name
defp op_name({ _, name, _ }), do: name
defp op_name({ :repeat, _, name, _ }), do: name
defp op_name({ _, name, _, _ }), do: name
defp op_name({ :repeat, _, name, _, _ }), do: name
defp op_name(_), do: nil
@doc false
def var_entry(name, v = { name, _ }), do: v
def var_entry(name, value), do: { name, value }
@doc false
def fixup_value({ :get, value }), do: quote do: get_value([scope|currently_loaded], unquote(value))
def fixup_value({ :get, _, [value] }), do: fixup_value({ :get, value })
def fixup_value({ :get, _, [value, fun] }), do: fixup_value({ :get, { value, fun } })
def fixup_value(value), do: quote do: unquote(value)
@doc false
def callback({ value, data }, fun), do: { fun.(value), data }
@doc false
defp create_call({ function }), do: quote do: callback(unquote(function)([scope|currently_loaded], data, nil, endian), fn { _, value } -> value end)
defp create_call({ :spec, module }), do: quote do: unquote(module).load([], data, nil, endian)
defp create_call({ function, name }), do: quote do: unquote(function)([scope|currently_loaded], data, unquote(fixup_value(name)), endian)
defp create_call({ function, name, fun }) when is_function(fun) or is_tuple(fun), do: quote do: callback(unquote(function)([scope|currently_loaded], data, unquote(fixup_value(name)), endian), unquote(fun))
defp create_call({ function, name, endianness }), do: quote do: unquote(function)([scope|currently_loaded], data, unquote(fixup_value(name)), unquote(fixup_value(endianness)))
defp create_call({ :repeat, function, name, length }), do: quote do: repeater(unquote({ :&, [], [{ :/, [], [{ function, [], __MODULE__ }, 4] }] }), unquote(fixup_value(length)), [scope|currently_loaded], data, unquote(fixup_value(name)), endian)
defp create_call({ function, name, endianness, fun }), do: quote do: callback(unquote(function)([scope|currently_loaded], data, unquote(fixup_value(name)), unquote(fixup_value(endianness))), unquote(fun))
defp create_call({ :repeat, function, name, length, fun }), do: quote do: repeater(unquote({ :&, [], [{ :/, [], [{ function, [], __MODULE__ }, 4] }] }), unquote(fixup_value(length)), [scope|currently_loaded], data, unquote(fixup_value(name)), endian, unquote(fun))
@doc false
defp expand_data_scheme([], init_value), do: [quote([do: { unquote(init_value), data }])]
defp expand_data_scheme(scheme, init_value) do
[quote([do: loaded = unquote(init_value)]), quote([do: scope = []])|expand_operation(scheme, [quote([do: { loaded, data }])])]
end
@doc false
defp expand_operation([], ops), do: ops
defp expand_operation([{ :endian, endianness }|scheme], ops), do: expand_operation(scheme, [quote([do: endian = unquote(fixup_value(endianness))])|ops])
defp expand_operation([{ :optional, :end }|scheme], ops) do
{ optional, [{ :optional }|scheme] } = Enum.split_while(scheme, fn
{ :optional } -> false
_ -> true
end)
expand_operation(scheme, [quote do
{ loaded, scope, data } = try do
unquote_splicing(expand_operation(optional, []))
{ loaded, scope, data }
rescue
_ in MatchError -> { loaded, scope, data }
_ in CaseClauseError -> { loaded, scope, data }
end
end|ops])
end
defp expand_operation([{ :chunk, id, :end }|scheme], ops) do
{ match, _ } = Code.eval_quoted(quote([do: fn
{ :chunk, _, :end } -> true
{ :chunk, unquote(id), _ } -> false
_ -> true
end]))
{ chunk, [{ :chunk, _, length }|scheme] } = Enum.split_while(scheme, match)
expand_operation(scheme, [quote do
size = unquote(fixup_value(length))
unquote({ String.to_atom("next_data" <> to_string(id)), [], __MODULE__ }) = binary_part(data, size, byte_size(data) - size)
data = binary_part(data, 0, size)
end|[quote do
unquote_splicing(expand_operation(chunk, []))
end|[quote([do: data = unquote({ String.to_atom("next_data" <> to_string(id)), [], __MODULE__ })])|ops]]])
end
defp expand_operation([{ :on, _, :match, match }|scheme], ops) do
[clause, { :__block__, [], body }] = expand_operation(scheme, [[match], { :__block__, [], ops }])
[clause, { :__block__, [], body ++ [quote(do: { loaded, scope, data, endian })] }]
end
defp expand_operation([{ :on, id, :end }|scheme], ops) do
{ fun, _ } = Code.eval_quoted(quote([do: fn
{ :on, unquote(id), _ } -> false
_ -> true
end]))
{ matches, [{ :on, _, condition }|scheme] } = Enum.split_while(scheme, fun)
{ fun, _ } = Code.eval_quoted(quote([do: fn
{ :on, unquote(id), :match, _ } -> true
_ -> false
end]))
expand_operation(scheme, [quote do
{ loaded, scope, data, endian } = case unquote(fixup_value(condition)) do
unquote(Enum.chunk_by(matches, fun) |> Enum.chunk_every(2) |> Enum.map(fn [branch, match|_] ->
{ :->, [], expand_operation(branch ++ match, []) }
end) |> Enum.reverse)
end
end|ops])
end
defp expand_operation([{ :skip, op }|scheme], ops) do
expand_operation(scheme, [
quote([do: { _, data } = unquote(create_call(op))])
|ops])
end
defp expand_operation([{ :assert_empty }|scheme], ops) do
expand_operation(scheme, [
quote([do: if(data != <<>>, do: raise(Tonic.NotEmpty, data: data))])
|ops])
end
defp expand_operation([op|scheme], ops) do
expand_operation(scheme, [
quote([do: { value, data } = unquote(create_call(op))]),
quote([do: loaded = :erlang.append_element(loaded, value)]),
quote([do: scope = [var_entry(unquote(op_name(op)), value)|scope]])
|ops])
end
@doc false
defp find_used_variables(ast = { name, [], __MODULE__ }, unused) do
{ unused, _ } = Enum.map_reduce(unused, nil, fn
vars, :found -> { vars, :found }
vars, _ ->
{ value, new } = Keyword.get_and_update(vars, name, &({ &1, true }))
if(value != nil, do: { new, :found }, else: { vars, nil })
end)
{ ast, unused }
end
defp find_used_variables(ast, unused), do: { ast, unused }
@doc false
defp find_unused_variables({ :=, _, [variable|init] }, unused) do
{ _, names } = Macro.prewalk(variable, [], fn
ast = { name, [], __MODULE__ }, acc -> { ast, [{ name, false }|acc] }
ast, acc -> { ast, acc }
end)
{ _, unused } = Macro.prewalk(init, unused, &find_used_variables/2)
[names|unused]
end
defp find_unused_variables({ :__block__, _, ops }, unused), do: Enum.reduce(ops, unused, &find_unused_variables/2)
defp find_unused_variables(op, unused) do
{ _, unused } = Macro.prewalk(op, unused, &find_used_variables/2)
unused
end
@doc false
defp mark_unused_variables(functions) do
Enum.map(functions, fn { :def, ctx, [{ name, name_ctx, args }, [do: body]] } ->
unused = Enum.reverse(find_unused_variables(body, []))
{ body, _ } = mark_unused_variables(body, unused)
#Run it a second time to prevent cases where: var1 is initialized, var1 is assigned to var2, var2 is not used and so removed making var1 also now unused (so needs to be removed)
[arg_unused|unused] = Enum.reverse(find_unused_variables(body, [Enum.map(args, fn { arg, _, _ } -> { arg, false } end)]))
{ body, _ } = mark_unused_variables(body, Enum.map(unused, fn
variables -> Enum.filter(variables, fn { name, _ } -> !(to_string(name) |> String.starts_with?("_")) end)
end))
{ :def, ctx, [{ name, name_ctx, underscore_variables(args, arg_unused) }, [do: body]] }
end)
end
@doc false
defp mark_unused_variables({ :=, ctx, [variable|init] }, [assignment|unused]) do
used = Enum.reduce(assignment, false, fn
{ _, used }, false -> used
_, _ -> true
end)
{ if(used, do: { :=, ctx, [underscore_variables(variable, assignment)|init] }, else: { :__block__, [], [] }), unused }
end
defp mark_unused_variables({ :__block__, ctx, ops }, unused) do
{ ops, unused } = Enum.map_reduce(ops, unused, &mark_unused_variables/2)
ops = Enum.filter(ops, fn
{ :__block__, [], [] } -> false
_ -> true
end)
{ { :__block__, ctx, ops }, unused }
end
defp mark_unused_variables(op, unused), do: { op, unused }
@doc false
defp underscore_variables(variables, assignment) do
Macro.prewalk(variables, fn
{ name, [], __MODULE__ } -> { if(Keyword.get(assignment, name, true), do: name, else: String.to_atom("_" <> to_string(name))), [], __MODULE__ }
ast -> ast
end)
end
@doc false
defp reduce_functions(functions), do: reduce_functions(functions, { [], [], %{} })
@doc false
defp reduce_functions([func = { :def, ctx, [{ name, name_ctx, args }, body] }|functions], { reduced, unique, replace }) do
f = { :def, ctx, [{ nil, name_ctx, args }, Macro.prewalk(body, fn t -> Macro.update_meta(t, &Keyword.delete(&1, :line)) end)] }
reduce_functions(functions, case Enum.find(unique, fn { _, func } -> func == f end) do
{ replacement, _ } -> { reduced, unique, Map.put(replace, name, replacement) }
_ -> { [func|reduced], [{ name, f }|unique], replace }
end)
end
defp reduce_functions([other|functions], { reduced, unique, replace }), do: reduce_functions(functions, { [other|reduced], unique, replace })
defp reduce_functions([], { reduced, _unique, replace }) do
List.foldl(reduced, [], fn
{ :def, ctx, [func, body] }, acc ->
[{ :def, ctx, [func, Macro.prewalk(body, fn
{ name, ctx, args } when is_atom(name) -> { replace[name] || name, ctx, args }
t -> t
end)] }|acc]
other, acc -> [other|acc]
end)
end
defmacro __before_compile__(env) do
code = quote do
unquote(Map.keys(Module.get_attribute(env.module, :tonic_data_scheme)) |> Enum.map(fn scheme ->
{ :def, [context: __MODULE__, import: Kernel], [
{ scheme, [context: __MODULE__], [{ :currently_loaded, [], __MODULE__ }, { :data, [], __MODULE__ }, { :name, [], __MODULE__ }, { :endian, [], __MODULE__ }] },
[do: { :__block__, [], expand_data_scheme(Module.get_attribute(env.module, :tonic_data_scheme)[scheme], case to_string(scheme) do
<<"load_group_", _ :: binary>> -> quote do: { name }
<<"load_skip_", _ :: binary>> -> quote do: { name }
_ -> quote do: {} #load, load_repeat_
end) }]
]
}
end))
end
code = if(Module.get_attribute(env.module, :tonic_enable_optimization)[:reduce] == true, do: reduce_functions(code), else: code)
code = mark_unused_variables(code)
{ :__block__, [], Enum.map(code, fn function -> { :__block__, [], [quote(do: @doc false), function] } end) }
end
#loading
@doc """
Loads the binary data using the spec from a given module.
The return value consists of the loaded values and the remaining data that wasn't read.
"""
@spec load(bitstring, module) :: { any, bitstring }
def load(data, module) when is_bitstring(data) do
module.load([], data, nil, :native)
end
@doc """
Loads the file data using the spec from a given module.
The return value consists of the loaded values and the remaining data that wasn't read.
"""
@spec load_file(Path.t, module) :: { any, bitstring }
def load_file(file, module) do
{ :ok, data } = File.read(file)
load(data, module)
end
#get
@doc false
defp get_value([], [], name), do: raise(Tonic.MarkNotFound, name: name)
defp get_value([scope|loaded], [], name), do: get_value(loaded, scope, name)
defp get_value(_, [{ name, value }|_], name), do: value
defp get_value(loaded, [_|vars], name), do: get_value(loaded, vars, name)
@doc false
def get_value(loaded, { name, fun }), do: fun.(get_value(loaded, [], name))
def get_value(loaded, fun) when is_function(fun), do: fun.(loaded)
def get_value(loaded, name), do: get_value(loaded, [], name)
#get
@doc """
Get the loaded value by using either a name to lookup the value, or a function to manually
look it up.
**`get(atom) :: any`**#{" "}
Using a name for the lookup will cause it to search for that matched name in the current
loaded data scope and containing scopes (but not separate branched scopes). If the name
is not found, an exception will be raised `Tonic.MarkNotFound`.
**`get(fun) :: any`**#{" "}
Using a function for the lookup will cause it to pass the current state to the function.
where the function can return the value you want to get.
Examples
--------
uint8 :length
repeat get(:length), :uint8
uint8 :length
repeat get(fn [[{ :length, length }]] -> length end), :uint8
"""
#get fn loaded -> 0 end
#get :value
@spec get(atom) :: ast
@spec get((list -> any)) :: ast
defmacro get(name_or_fun) do
quote do
{ :get, unquote(Macro.escape(name_or_fun)) }
end
end
@doc """
Get the loaded value with name, and pass the value into a function.
Examples
--------
uint8 :length
repeat get(:length, fn length -> length - 1 end)
"""
#get :value, fn value -> value end
@spec get(atom, (list -> any)) :: ast
defmacro get(name, fun) do
quote do
{ :get, { unquote(name), unquote(Macro.escape(fun)) } }
end
end
#on
@doc """
Executes the given load operations of a particular clause that matches the condition.
Examples
--------
uint8 :type
on get(:type) do
1 -> uint32 :value
2 -> float32 :value
end
"""
@spec on(term, [do: [{ :->, any, any }]]) :: ast
defmacro on(condition, [do: clauses]) do
quote do
on_id = @tonic_unique_function_id
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :on, on_id, unquote(Macro.escape(condition)) }|@tonic_data_scheme[@tonic_current_scheme]])
unquote({ :__block__, [], Enum.map(clauses, fn { :->, _, [[match]|args] } ->
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :on, on_id, :match, unquote(Macro.escape(match)) }|@tonic_data_scheme[@tonic_current_scheme]])
unquote({ :__block__, [], args })
end
end) })
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :on, on_id, :end }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
#chunk
@doc """
Extract a chunk of data for processing.
Executes the load operations only on the given chunk.
**<code class="inline">chunk(<a href="#t:length/0">length</a>, <a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the block as the load operation on the chunk of length.
Example
-------
chunk 4 do
uint8 :a
uint8 :b
end
chunk 4 do
repeat :uint8
end
"""
#chunk 4, do: nil
@spec chunk(length, block(any)) :: ast
defmacro chunk(length, block) do
quote do
chunk_id = @tonic_unique_function_id
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :chunk, chunk_id, unquote(Macro.escape(length)) }|@tonic_data_scheme[@tonic_current_scheme]])
unquote(block)
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :chunk, chunk_id, :end }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
#skip
@doc """
Skip the given load operations.
Executes the load operations but doesn't return the loaded data.
**<code class="inline">skip(atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Skip the given type.
**<code class="inline">skip(<a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Skip the given block.
Example
-------
skip :uint8
skip do
uint8 :a
uint8 :b
end
"""
#skip :type
@spec skip(atom) :: ast
defmacro skip(type) when is_atom(type) do
quote do
skip([do: unquote(type)()])
end
end
#skip do: nil
@spec skip(block(any)) :: ast
defmacro skip(block) do
quote do
skip_func_name = String.to_atom("load_skip_" <> to_string(:__tonic_anon__) <> "_" <> to_string(unquote(__CALLER__.line)) <> "_" <> to_string(@tonic_unique_function_id))
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :skip, { skip_func_name, :__tonic_anon__ } }|@tonic_data_scheme[@tonic_current_scheme]])
@tonic_previous_scheme [@tonic_current_scheme|@tonic_previous_scheme]
@tonic_current_scheme skip_func_name
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [])
unquote(block)
[current|previous] = @tonic_previous_scheme
@tonic_previous_scheme previous
@tonic_current_scheme current
end
end
#optional
@doc """
Optionally execute the given load operations.
Usually if the current data does not match what is trying to be loaded, a match error
will be raised and the data will not be loaded successfully. Using `optional` is a way
to avoid that. If there is a match error the load operations it attempted to execute
will be skipped, and it will continue on with the rest of the data spec. If there
isn't a match error then the load operations that were attempted will be combined with
the current loaded data.
**<code class="inline">optional(atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Optionally load the given type.
**<code class="inline">optional(<a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Optionally load the given block.
Example
-------
optional :uint8
optional do
uint8 :a
uint8 :b
end
"""
#optional :type
@spec optional(atom) :: ast
defmacro optional(type) when is_atom(type) do
quote do
optional([do: unquote(type)()])
end
end
#optional do: nil
@spec optional(block(any)) :: ast
defmacro optional(block) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :optional }|@tonic_data_scheme[@tonic_current_scheme]])
unquote(block)
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :optional, :end }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
#empty!
@doc """
Assert that all the data has been loaded from the current data. If there is still data,
the `Tonic.NotEmpty` exception will be raised.
Example
-------
int8 :a
empty!() #check that there is no data left
"""
defmacro empty!() do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :assert_empty }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
#spec
@doc """
Execute the current spec again.
Examples
--------
defmodule Recursive do
use Tonic
uint8
optional do: spec
end
\# Tonic.load <<1, 2, 3>>, Recursive
\# => {{1, {2, {3}}}, ""}
"""
@spec spec() :: ast
defmacro spec() do
quote do
spec(__MODULE__)
end
end
#spec
@doc """
Execute the provided spec.
Examples
--------
defmodule Foo do
use Tonic
uint8
spec Bar
end
defmodule Bar do
use Tonic
uint8 :a
uint8 :b
end
\# Tonic.load <<1, 2, 3>>, Foo
\# => {{1, {{:a, 2}, {:b, 3}}}, ""}
"""
@spec spec(module) :: ast
defmacro spec(module) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :spec, unquote(module) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
#endian
@doc """
Sets the default endianness used by types where endianness is not specified.
Examples
--------
endian :little
uint32 :value #little endian
endian :big
uint32 :value #big endian
endian :little
uint32 :value, :big #big endian
"""
#endian :little
@spec endian(endianness) :: ast
defmacro endian(endianness) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :endian, unquote(endianness) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
#repeat
@doc """
Repeat the given load operations until it reaches the end.
**<code class="inline">repeat(atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the type as the load operation to be repeated.
**<code class="inline">repeat(<a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the block as the load operation to be repeated.
Examples
--------
repeat :uint8
repeat do
uint8 :a
uint8 :b
end
"""
#repeat :type
@spec repeat(atom) :: ast
defmacro repeat(type) when is_atom(type) do
quote do
repeat(fn _ -> false end, unquote(type))
end
end
#repeat do: nil
@spec repeat(block(any)) :: ast
defmacro repeat(block) do
quote do
repeat(fn _ -> false end, unquote(block))
end
end
@doc """
Repeat the given load operations until it reaches the end or for length.
**<code class="inline">repeat(atom, atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the type as the load operation to be repeated. And wraps the output with the given
name.
**<code class="inline">repeat(atom, <a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the block as the load operation to be repeated. And wraps the output with the given
name.
**<code class="inline">repeat(<a href="#t:length/0">length</a>, atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the type as the load operation to be repeated. And repeats for length.
**<code class="inline">repeat(<a href="#t:length/0">length</a>, <a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the block as the load operation to be repeated. And repeats for length.
Examples
--------
repeat :values, :uint8
repeat :values do
uint8 :a
uint8 :b
end
repeat 4, :uint8
repeat fn _ -> false end, :uint8
repeat 2 do
uint8 :a
uint8 :b
end
repeat fn _ -> false end do
uint8 :a
uint8 :b
end
"""
#repeat :new_repeat, :type
@spec repeat(atom, atom) :: ast
defmacro repeat(name, type) when is_atom(name) and is_atom(type) do
quote do
repeat(unquote(name), fn _ -> false end, unquote(type))
end
end
#repeat :new_repeat, do: nil
@spec repeat(atom, block(any)) :: ast
defmacro repeat(name, block) when is_atom(name) do
quote do
repeat(unquote(name), fn _ -> false end, unquote(block))
end
end
#repeat times, :type
@spec repeat(length, atom) :: ast
defmacro repeat(length, type) when is_atom(type) do
quote do
repeat(:__tonic_anon__, unquote(length), fn { _, value } ->
Enum.map(value, fn { i } -> i end)
end, [do: unquote(type)()])
end
end
#repeat times, do: nil
@spec repeat(length, block(any)) :: ast
defmacro repeat(length, block) do
quote do
repeat(:__tonic_anon__, unquote(length), fn { _, value } ->
value
end, unquote(block))
end
end
@doc """
Repeat the given load operations for length.
**<code class="inline">repeat(atom, <a href="#t:length/0">length</a>, atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the type as the load operation to be repeated. And wraps the output with the given
name. Repeats for length.
**<code class="inline">repeat(atom, <a href="#t:length/0">length</a>, <a href="#t:block/1">block(any)</a>) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Uses the block as the load operation to be repeated. And wraps the output with the given
name. Repeats for length.
Examples
--------
repeat :values, 4, :uint8
repeat :values, fn _ -> false end, :uint8
repeat :values, 4 do
uint8 :a
uint8 :b
end
repeat :values, fn _ -> false end do
uint8 :a
uint8 :b
end
"""
#repeat :new_repeat, times, :type
@spec repeat(atom, length, atom) :: ast
defmacro repeat(name, length, type) when is_atom(type) do
quote do
repeat(unquote(name), unquote(length), fn { name, value } ->
{ name, Enum.map(value, fn { i } -> i end) }
end, [do: unquote(type)()])
end
end
#repeat :new_repeat, times, do: nil
@spec repeat(atom, length, block(any)) :: ast
defmacro repeat(name, length, block) do
quote do
repeat_func_name = String.to_atom("load_repeat_" <> to_string(unquote(name)) <> "_" <> to_string(unquote(__CALLER__.line)) <> "_" <> to_string(@tonic_unique_function_id))
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :repeat, repeat_func_name, unquote(name), unquote(Macro.escape(length)) }|@tonic_data_scheme[@tonic_current_scheme]])
@tonic_previous_scheme [@tonic_current_scheme|@tonic_previous_scheme]
@tonic_current_scheme repeat_func_name
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [])
unquote(block)
[current|previous] = @tonic_previous_scheme
@tonic_previous_scheme previous
@tonic_current_scheme current
end
end
@doc """
Repeats the load operations for length, passing the result to a callback.
Examples
--------
repeat :values, 4, fn result -> result end do
uint8 :a
uint8 :b
end
repeat :values, 4, fn { name, value } -> value end do
uint8 :a
uint8 :b
end
repeat :values, fn _ -> false end, fn result -> result end do
uint8 :a
uint8 :b
end
"""
#repeat :new_repeat, times, fn { name, value } -> value end, do: nil
@spec repeat(atom, length, callback, block(any)) :: ast
defmacro repeat(name, length, fun, block) do
quote do
repeat_func_name = String.to_atom("load_repeat_" <> to_string(unquote(name)) <> "_" <> to_string(unquote(__CALLER__.line)) <> "_" <> to_string(@tonic_unique_function_id))
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :repeat, repeat_func_name, unquote(name), unquote(Macro.escape(length)), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
@tonic_previous_scheme [@tonic_current_scheme|@tonic_previous_scheme]
@tonic_current_scheme repeat_func_name
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [])
unquote(block)
[current|previous] = @tonic_previous_scheme
@tonic_previous_scheme previous
@tonic_current_scheme current
end
end
@doc false
defp repeater_(_, should_stop, list, _, <<>>, name, _) when is_function(should_stop) or should_stop === nil, do: { { name, :lists.reverse(list) }, <<>> }
defp repeater_(func, should_stop, list, currently_loaded, data, name, endian) when is_function(should_stop) do
{ value, data } = func.(currently_loaded, data, nil, endian)
case should_stop.(list = [value|list]) do
true -> { { name, :lists.reverse(list) }, data }
_ -> repeater_(func, should_stop, list, currently_loaded, data, name, endian)
end
end
@doc false
defp repeater_(_, 0, list, _, data, name, _), do: { { name, :lists.reverse(list) }, data }
defp repeater_(func, n, list, currently_loaded, data, name, endian) do
{ value, data } = func.(currently_loaded, data, nil, endian)
repeater_(func, n - 1, [value|list], currently_loaded, data, name, endian)
end
@doc false
def repeater(func, n, currently_loaded, data, name, endian), do: repeater_(func, n, [], currently_loaded, data, name, endian)
@doc false
def repeater(func, n, currently_loaded, data, name, endian, fun) when is_function(fun), do: callback(repeater_(func, n, [], currently_loaded, data, name, endian), fun)
#group
@doc """
Group the load operations.
Examples
--------
group do
uint8 :a
uint8 :b
end
"""
#group do: nil
@spec group(block(any)) :: ast
defmacro group(block) do
quote do
group(:__tonic_anon__, fn group -> :erlang.delete_element(1, group) end, unquote(block))
end
end
@doc """
Group the load operations, wrapping them with the given name.
Examples
--------
group :values do
uint8 :a
uint8 :b
end
"""
#group :new_group, do: nil
@spec group(atom, block(any)) :: ast
defmacro group(name, block) do
quote do
group_func_name = String.to_atom("load_group_" <> to_string(unquote(name)) <> "_" <> to_string(unquote(__CALLER__.line)) <> "_" <> to_string(@tonic_unique_function_id))
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ group_func_name, unquote(name) }|@tonic_data_scheme[@tonic_current_scheme]])
@tonic_previous_scheme [@tonic_current_scheme|@tonic_previous_scheme]
@tonic_current_scheme group_func_name
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [])
unquote(block)
[current|previous] = @tonic_previous_scheme
@tonic_previous_scheme previous
@tonic_current_scheme current
end
end
@doc """
Group the load operations, wrapping them with the given name and passing the result to a callback.
Examples
--------
group :values, fn { _, value } -> value end do
uint8 :a
uint8 :b
end
"""
#group :new_group, fn { name, value } -> value end, do: nil
@spec group(atom, callback, block(any)) :: ast
defmacro group(name, fun, block) do
quote do
group_func_name = String.to_atom("load_group_" <> to_string(unquote(name)) <> "_" <> to_string(unquote(__CALLER__.line)) <> "_" <> to_string(@tonic_unique_function_id))
@tonic_unique_function_id @tonic_unique_function_id + 1
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ group_func_name, unquote(name), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
@tonic_previous_scheme [@tonic_current_scheme|@tonic_previous_scheme]
@tonic_current_scheme group_func_name
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [])
unquote(block)
[current|previous] = @tonic_previous_scheme
@tonic_previous_scheme previous
@tonic_current_scheme current
end
end
#type creation
@doc false
defp binary_parameters(type, size, signedness, endianness) do
{
:-, [], [
{
:-, [], [
{
:-, [], [
{ :size, [], [size] },
{ signedness, [], nil }
]
},
{ endianness, [], nil }
]
},
{ type, [], nil }
]
}
end
@doc """
Declare a new type as an alias of another type or of a function.
**<code class="inline">type(atom, atom) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Create the new type as an alias of another type.
**<code class="inline">type(atom, (bitstring, atom, <a href="#t:endianness/0">endianness</a> -> { any, bitstring })) :: <a href="#t:ast/0">ast</a></code>**#{" "}
Implement the type as a function.
Examples
--------
type :myint8, :int8
type :myint8, fn data, name, _ ->
<<value :: integer-size(8)-signed, data :: bitstring>> data
{ { name, value }, data }
end
type :myint16, fn
data, name, :little ->
<<value :: integer-size(16)-signed-little, data :: bitstring>> = data
{ { name, value }, data }
data, name, :big ->
<<value :: integer-size(16)-signed-big, data :: bitstring>> = data
{ { name, value }, data }
data, name, :native ->
<<value :: integer-size(16)-signed-native, data :: bitstring>> = data
{ { name, value }, data }
end
"""
#type alias of other type
#type :new_type, :old_type
@spec type(atom, atom) :: ast
defmacro type(name, type) when is_atom(type) do
quote do
defmacro unquote(name)() do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness_or_fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(Macro.escape(endianness_or_fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness, fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(endianness), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
def unquote(name)(currently_loaded, data, name, endianness), do: unquote(type)(currently_loaded, data, name, endianness)
end
end
#type with function
#type :new_type, fn data, name, endian -> { nil, data } end
@spec type(atom, (bitstring, atom, endianness -> { any, bitstring })) :: ast
defmacro type(name, fun) do
quote do
defmacro unquote(name)() do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness_or_fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(Macro.escape(endianness_or_fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness, fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(endianness), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
def unquote(name)(currently_loaded, data, name, endianness), do: unquote(fun).(data, name, endianness)
end
end
@doc """
Declare a new type as an alias of another type with an overriding (fixed) endianness.
Examples
--------
type :mylittleint16, :int16, :little
"""
#type alias of other type, overriding endianness
#type :new_type, :old_type, :little
@spec type(atom, atom, endianness) :: ast
defmacro type(name, type, endianness) do
quote do
defmacro unquote(name)() do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness_or_fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(Macro.escape(endianness_or_fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness, fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(endianness), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
def unquote(name)(currently_loaded, data, name, _), do: unquote(type)(currently_loaded, data, name, unquote(endianness))
end
end
@doc """
Declare a new type for a binary type of size with signedness (if used).
Examples
--------
type :myint16, :integer, 16, :signed
"""
#type with binary type, size, and signedness. applies to all endian types
#type :new_type, :integer, 32, :signed
@spec type(atom, atom, non_neg_integer, signedness) :: ast
defmacro type(name, type, size, signedness) do
quote do
defmacro unquote(name)() do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness_or_fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(Macro.escape(endianness_or_fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness, fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(endianness), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
def unquote(name)(currently_loaded, data, name, :little) do
<<value :: unquote(binary_parameters(type, size, signedness, :little)), data :: bitstring>> = data
{ { name, value }, data }
end
def unquote(name)(currently_loaded, data, name, :native) do
<<value :: unquote(binary_parameters(type, size, signedness, :native)), data :: bitstring>> = data
{ { name, value }, data }
end
def unquote(name)(currently_loaded, data, name, :big) do
<<value :: unquote(binary_parameters(type, size, signedness, :big)), data :: bitstring>> = data
{ { name, value }, data }
end
end
end
@doc """
Declare a new type for a binary type of size with signedness (if used) and a overriding
(fixed) endianness.
Examples
--------
type :mylittleint16, :integer, 16, :signed, :little
"""
#type with binary type, size, signedness, and endianness
#type :new_type, :integer, :32, :signed, :little
@spec type(atom, atom, non_neg_integer, signedness, endianness) :: ast
defmacro type(name, type, size, signedness, endianness) do
quote do
defmacro unquote(name)() do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness_or_fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(Macro.escape(endianness_or_fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
defmacro unquote(name)(label, endianness, fun) do
quote do
@tonic_data_scheme Map.put(@tonic_data_scheme, @tonic_current_scheme, [{ :erlang.element(1, unquote(__ENV__.function)), unquote(label), unquote(endianness), unquote(Macro.escape(fun)) }|@tonic_data_scheme[@tonic_current_scheme]])
end
end
def unquote(name)(currently_loaded, data, name, _) do
<<value :: unquote(binary_parameters(type, size, signedness, endianness)), data :: bitstring>> = data
{ { name, value }, data }
end
end
end
end
defmodule Tonic.Types do
import Tonic
@doc """
Read a single bit boolean value.
"""
type :bit, fn <<value :: size(1), data :: bitstring>>, name, _ ->
{ { name, value == 1 }, data }
end
@doc """
Read an 8-bit signed integer.
"""
type :int8, :integer, 8, :signed
@doc """
Read a 16-bit signed integer.
"""
type :int16, :integer, 16, :signed
@doc """
Read a 32-bit signed integer.
"""
type :int32, :integer, 32, :signed
@doc """
Read a 64-bit signed integer.
"""
type :int64, :integer, 64, :signed
@doc """
Read an 8-bit unsigned integer.
"""
type :uint8, :integer, 8, :unsigned
@doc """
Read a 16-bit unsigned integer.
"""
type :uint16, :integer, 16, :unsigned
@doc """
Read a 32-bit unsigned integer.
"""
type :uint32, :integer, 32, :unsigned
@doc """
Read a 64-bit unsigned integer.
"""
type :uint64, :integer, 64, :unsigned
@doc """
Read a 32-bit floating point.
"""
type :float32, :float, 32, :signed
@doc """
Read a 64-bit floating point.
"""
type :float64, :float, 64, :signed
defp list_to_string_drop_last_value(values, string \\ "")
defp list_to_string_drop_last_value([], string), do: string
defp list_to_string_drop_last_value([_], string), do: string
defp list_to_string_drop_last_value([{ c }|values], string), do: list_to_string_drop_last_value(values, string <> <<c>>)
@doc false
def convert_to_string_without_last_byte({ name, values }), do: { name, convert_to_string_without_last_byte(values) }
def convert_to_string_without_last_byte(values), do: list_to_string_drop_last_value(values)
@doc false
def convert_to_string({ name, values }), do: { name, convert_to_string(values) }
def convert_to_string(values), do: List.foldl(values, "", fn { c }, s -> s <> <<c>> end)
@doc """
Read a string.
Default will read until end of data. Otherwise a length value can be specified `length: 10`,
or it can read up to a terminator `?\\n` or `terminator: ?\\n`, or both limits can be applied.
The string can have its trailing characters stripped by using `strip: ?\\n` or `strip: "\\n"`.
Examples
--------
string :read_to_end
string :read_8_chars, length: 8
string :read_till_nul, 0
string :read_till_newline, ?\\n
string :read_till_newline_or_8_chars, length: 8, terminator: ?\\n
string :read_to_end_remove_newline, strip: ?\\n
"""
defmacro string(name \\ [], options \\ [])
defmacro string(terminator, []) when is_integer(terminator), do: quote do: string(terminator: unquote(terminator))
defmacro string([terminator: terminator], []) do
quote do
repeat nil, fn [{ c }|_] -> c == unquote(fixup_value(terminator)) end, fn { _, values } -> convert_to_string_without_last_byte(values) end, do: uint8()
end
end
defmacro string([length: length], []) do
quote do
repeat nil, unquote(length), fn { _, values } -> convert_to_string(values) end, do: uint8()
end
end
defmacro string([], []) do
quote do
repeat nil, fn _ -> false end, fn { _, values } -> convert_to_string(values) end, do: uint8()
end
end
defmacro string(options, []) when is_list(options) do
quote do
repeat nil, fn chars = [{ c }|_] ->
c == unquote(fixup_value(options[:terminator])) or length(chars) == unquote(fixup_value(options[:length])) #todo: should change repeat step callback to pass in the length too
end, fn { _, values } ->
str = case List.last(values) == { unquote(fixup_value(options[:terminator])) } do #maybe repeat callbacks shouldn't pre-reverse the list and instead leave it up to the callback to reverse?
true -> convert_to_string_without_last_byte(values)
_ -> convert_to_string(values)
end
unquote(if options[:strip] != nil do
case fixup_value(options[:strip]) do
literal when is_integer(literal) or is_binary(literal) -> quote do: String.trim_trailing(str, <<unquote(fixup_value(options[:strip])) :: utf8>>)
runtime ->
quote do
case unquote(runtime) do
chr when is_integer(chr) -> String.trim_trailing(str, <<chr :: utf8>>)
bin -> String.trim_trailing(str, bin)
end
end
end
else
quote do: str
end)
end, do: uint8()
end
end
defmacro string(name, terminator) when is_integer(terminator), do: quote do: string(unquote(name), terminator: unquote(terminator))
defmacro string(name, [terminator: terminator]) do
quote do
repeat unquote(name), fn [{ c }|_] -> c == unquote(fixup_value(terminator)) end, &convert_to_string_without_last_byte/1, do: uint8()
end
end
defmacro string(name, [length: length]) do
quote do
repeat unquote(name), unquote(length), &convert_to_string/1, do: uint8()
end
end
defmacro string(name, []) do
quote do
repeat unquote(name), fn _ -> false end, &convert_to_string/1, do: uint8()
end
end
defmacro string(name, options) do
quote do
repeat unquote(name), fn chars = [{ c }|_] ->
c == unquote(fixup_value(options[:terminator])) or length(chars) == unquote(fixup_value(options[:length])) #todo: should change repeat step callback to pass in the length too
end, fn charlist = { _, values } ->
{ name, str } = case List.last(values) == { unquote(fixup_value(options[:terminator])) } do #maybe repeat callbacks shouldn't pre-reverse the list and instead leave it up to the callback to reverse?
true -> convert_to_string_without_last_byte(charlist)
_ -> convert_to_string(charlist)
end
{ name, unquote(if options[:strip] != nil do
case fixup_value(options[:strip]) do
literal when is_integer(literal) or is_binary(literal) -> quote do: String.trim_trailing(str, <<unquote(fixup_value(options[:strip])) :: utf8>>)
runtime ->
quote do
case unquote(runtime) do
chr when is_integer(chr) -> String.trim_trailing(str, <<chr :: utf8>>)
bin -> String.trim_trailing(str, bin)
end
end
end
else
quote do: str
end) }
end, do: uint8()
end
end
end
defmodule Tonic.MarkNotFound do
defexception [:message, :name]
def exception(option), do: %Tonic.MarkNotFound{ message: "no loaded value marked with name: #{option[:name]}", name: option[:name] }
end
defmodule Tonic.NotEmpty do
defexception [:message, :data]
def exception(option), do: %Tonic.NotEmpty{ message: "data is not empty: #{inspect(option[:data])}", data: option[:data] }
end
|
lib/tonic.ex
| 0.846498 | 0.683198 |
tonic.ex
|
starcoder
|
defmodule FrontMatter do
@moduledoc """
Parse a file or string containing front matter and a document body.
Front matter is a block of yaml wrapped between two lines containing `---`.
In this example, the front matter contains `title: Hello` and `tags: x, y, z`, and the body is
`Hello, world`:
```md
---
title: Hello
tags: x, y, z
---
Hello, world
```
After parsing the document, front matter is returned as a map, and the body as
a string.
```elixir
FrontMatter.parse_file "example.md"
{:ok, %{"title" => "Hello", "tags" => ["x", "y", "z"]}, "Hello, world"}
```
"""
@doc """
Read a file, parse it's contents, and return it's front matter and body.
Returns `{:ok, matter, body}` on success (`matter` is a map), or
`{:error, error}` on error.
iex> FrontMatter.parse_file "test/fixtures/dumb.md"
{:ok, %{"title" => "Hello", "tags" => ["x", "y", "z"]}, "Hello, world\\n"}
iex> FrontMatter.parse_file "test/fixtures/idontexist.md"
{:error, :enoent}
"""
def parse_file(path) do
case File.read(path) do
{:ok, contents} ->
parse(contents)
{:error, error} ->
{:error, error}
end
end
@doc """
Read a file, parse it's contents, and return it's front matter and body.
Returns `{matter, body}` on success (`matter` is a map), throws on error.
iex> FrontMatter.parse_file! "test/fixtures/dumb.md"
{%{"title" => "Hello", "tags" => ["x", "y", "z"]}, "Hello, world\\n"}
iex> try do
...> FrontMatter.parse_file! "test/fixtures/idontexist.md"
...> rescue
...> e in FrontMatter.Error -> e.message
...> end
"File not found"
iex> try do
...> FrontMatter.parse_file! "test/fixtures/invalid.md"
...> rescue
...> e in FrontMatter.Error -> e.message
...> end
"Error parsing yaml front matter"
"""
def parse_file!(path) do
case parse_file(path) do
{:ok, matter, body} ->
{matter, body}
{:error, :enoent} ->
raise FrontMatter.Error, message: "File not found"
{:error, _} ->
raise FrontMatter.Error
end
end
@doc """
Parse a string and return it's front matter and body.
Returns `{:ok, matter, body}` on success (`matter` is a map), or
`{:error, error}` on error.
iex> FrontMatter.parse "---\\ntitle: Hello\\n---\\nHello, world"
{:ok, %{"title" => "Hello"}, "Hello, world"}
iex> FrontMatter.parse "---\\ntitle: Hello\\n--\\nHello, world"
{:error, :invalid_front_matter}
"""
def parse(string) do
string
|> split_string()
|> process_parts()
end
@doc """
Parse a string and return it's front matter and body.
Returns `{matter, body}` on success (`matter` is a map), throws on error.
iex> FrontMatter.parse! "---\\ntitle: Hello\\n---\\nHello, world"
{%{"title" => "Hello"}, "Hello, world"}
iex> try do
...> FrontMatter.parse! "---\\ntitle: Hello\\n--\\nHello, world"
...> rescue
...> e in FrontMatter.Error -> e.message
...> end
"Error parsing yaml front matter"
"""
def parse!(string) do
case parse(string) do
{:ok, matter, body} ->
{matter, body}
{:error, _} ->
raise FrontMatter.Error
end
end
defp split_string(string) do
split_pattern = ~r/[\s\r\n]---[\s\r\n]/s
string
|> (&String.trim_leading(&1)).()
|> (&("\n" <> &1)).()
|> split_by_regex(split_pattern, parts: 3)
end
defp split_by_regex(string, pattern, opts), do: Regex.split(pattern, string, opts)
defp process_parts([_, yaml, body]) do
case parse_yaml(yaml) do
{:ok, yaml} ->
{:ok, yaml, body}
{:error, error} ->
{:error, error}
end
end
defp process_parts(_), do: {:error, :invalid_front_matter}
defp parse_yaml(yaml) do
case YamlElixir.read_from_string(yaml) do
{:ok, parsed} ->
{:ok, parsed |> transform()}
error ->
error
end
end
defp parse_list({k, v}) do
pattern = ~r/,/
if v =~ pattern do
v =
v
|> String.replace(" ", "")
|> String.split(pattern)
{k, v}
else
{k, v}
end
end
defp transform(content) do
content
|> Task.async_stream(&parse_list/1)
|> Enum.into(%{}, fn {:ok, {k, v}} -> {k, v} end)
end
end
|
lib/front_matter.ex
| 0.854688 | 0.853242 |
front_matter.ex
|
starcoder
|
defmodule CsvDemo.Transformer do
def in_state(input, output, state \\ "UT") do
process(input, output, fn stream ->
stream
|> Stream.filter(&(&1.state == state))
end)
end
def registration(input, output) do
process(input, output, fn stream ->
stream
|> Stream.filter(
&(&1.last |> String.to_charlist() |> Enum.at(0) |> (fn x -> x in ?A..?K end).())
)
|> Stream.filter(&(Date.diff(Date.utc_today(), &1.birthday) > 18 * 365))
end)
end
def john(input, output) do
process(input, output, fn stream ->
stream
|> Stream.map(&(%{&1 | first: "<NAME>", last: "Smith"} |> IO.inspect()))
end)
end
def add_full_name(input, output) do
process(
input,
output,
fn stream ->
stream
|> Stream.map(&Map.put(&1, :full_name, "#{&1.first} #{&1.last}"))
|> Stream.map(&Map.drop(&1, [:first, :last]))
end,
&generic_build_line/1
)
end
defp process(input, output, transformer, builder \\ &build_line/1) do
with out = output |> Path.expand() |> File.stream!() do
input
|> Path.expand()
|> File.stream!()
|> Stream.drop(1)
|> Stream.map(&parse_line/1)
|> transformer.()
|> Stream.map(builder)
|> Enum.into(out)
end
end
defp parse_line(line) do
[first, last, phone, street, city, zip, state, birthday] =
line |> String.trim() |> String.split(",")
%{
first: first,
last: last,
phone: phone,
street: street,
city: city,
zip: zip,
state: state,
birthday: Date.from_iso8601!(birthday)
}
end
defp build_line(%{
first: first,
last: last,
phone: phone,
street: street,
city: city,
zip: zip,
state: state,
birthday: birthday
}) do
[first, last, phone, street, city, zip, state, to_string(birthday)]
|> Enum.join(",")
|> (&(&1 <> "\n")).()
end
defp generic_build_line(map) do
map
|> Map.values()
|> Enum.join(",")
|> (&(&1 <> "\n")).()
end
end
|
csv_demo/lib/csv_demo/transformer.ex
| 0.592784 | 0.425665 |
transformer.ex
|
starcoder
|
defmodule AWS.Athena do
@moduledoc """
Amazon Athena is an interactive query service that lets you use standard
SQL to analyze data directly in Amazon S3. You can point Athena at your
data in Amazon S3 and run ad-hoc queries and get results in seconds. Athena
is serverless, so there is no infrastructure to set up or manage. You pay
only for the queries you run. Athena scales automatically—executing queries
in parallel—so results are fast, even with large datasets and complex
queries. For more information, see [What is Amazon
Athena](http://docs.aws.amazon.com/athena/latest/ug/what-is.html) in the
*Amazon Athena User Guide*.
If you connect to Athena using the JDBC driver, use version 1.1.0 of the
driver or later with the Amazon Athena API. Earlier version drivers do not
support the API. For more information and to download the driver, see
[Accessing Amazon Athena with
JDBC](https://docs.aws.amazon.com/athena/latest/ug/connect-with-jdbc.html).
For code samples using the AWS SDK for Java, see [Examples and Code
Samples](https://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
@doc """
Returns the details of a single named query or a list of up to 50 queries,
which you provide as an array of query ID strings. Requires you to have
access to the workgroup in which the queries were saved. Use
`ListNamedQueriesInput` to get the list of named query IDs in the specified
workgroup. If information could not be retrieved for a submitted query ID,
information about the query ID submitted is listed under
`UnprocessedNamedQueryId`. Named queries differ from executed queries. Use
`BatchGetQueryExecutionInput` to get details about each unique query
execution, and `ListQueryExecutionsInput` to get a list of query execution
IDs.
"""
def batch_get_named_query(client, input, options \\ []) do
request(client, "BatchGetNamedQuery", input, options)
end
@doc """
Returns the details of a single query execution or a list of up to 50 query
executions, which you provide as an array of query execution ID strings.
Requires you to have access to the workgroup in which the queries ran. To
get a list of query execution IDs, use
`ListQueryExecutionsInput$WorkGroup`. Query executions differ from named
(saved) queries. Use `BatchGetNamedQueryInput` to get details about named
queries.
"""
def batch_get_query_execution(client, input, options \\ []) do
request(client, "BatchGetQueryExecution", input, options)
end
@doc """
Creates (registers) a data catalog with the specified name and properties.
Catalogs created are visible to all users of the same AWS account.
"""
def create_data_catalog(client, input, options \\ []) do
request(client, "CreateDataCatalog", input, options)
end
@doc """
Creates a named query in the specified workgroup. Requires that you have
access to the workgroup.
For code samples using the AWS SDK for Java, see [Examples and Code
Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
def create_named_query(client, input, options \\ []) do
request(client, "CreateNamedQuery", input, options)
end
@doc """
Creates a workgroup with the specified name.
"""
def create_work_group(client, input, options \\ []) do
request(client, "CreateWorkGroup", input, options)
end
@doc """
Deletes a data catalog.
"""
def delete_data_catalog(client, input, options \\ []) do
request(client, "DeleteDataCatalog", input, options)
end
@doc """
Deletes the named query if you have access to the workgroup in which the
query was saved.
For code samples using the AWS SDK for Java, see [Examples and Code
Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
def delete_named_query(client, input, options \\ []) do
request(client, "DeleteNamedQuery", input, options)
end
@doc """
Deletes the workgroup with the specified name. The primary workgroup cannot
be deleted.
"""
def delete_work_group(client, input, options \\ []) do
request(client, "DeleteWorkGroup", input, options)
end
@doc """
Returns the specified data catalog.
"""
def get_data_catalog(client, input, options \\ []) do
request(client, "GetDataCatalog", input, options)
end
@doc """
Returns a database object for the specfied database and data catalog.
"""
def get_database(client, input, options \\ []) do
request(client, "GetDatabase", input, options)
end
@doc """
Returns information about a single query. Requires that you have access to
the workgroup in which the query was saved.
"""
def get_named_query(client, input, options \\ []) do
request(client, "GetNamedQuery", input, options)
end
@doc """
Returns information about a single execution of a query if you have access
to the workgroup in which the query ran. Each time a query executes,
information about the query execution is saved with a unique ID.
"""
def get_query_execution(client, input, options \\ []) do
request(client, "GetQueryExecution", input, options)
end
@doc """
Streams the results of a single query execution specified by
`QueryExecutionId` from the Athena query results location in Amazon S3. For
more information, see [Query
Results](https://docs.aws.amazon.com/athena/latest/ug/querying.html) in the
*Amazon Athena User Guide*. This request does not execute the query but
returns results. Use `StartQueryExecution` to run a query.
To stream query results successfully, the IAM principal with permission to
call `GetQueryResults` also must have permissions to the Amazon S3
`GetObject` action for the Athena query results location.
<important> IAM principals with permission to the Amazon S3 `GetObject`
action for the query results location are able to retrieve query results
from Amazon S3 even if permission to the `GetQueryResults` action is
denied. To restrict user or role access, ensure that Amazon S3 permissions
to the Athena query location are denied.
</important>
"""
def get_query_results(client, input, options \\ []) do
request(client, "GetQueryResults", input, options)
end
@doc """
Returns table metadata for the specified catalog, database, and table.
"""
def get_table_metadata(client, input, options \\ []) do
request(client, "GetTableMetadata", input, options)
end
@doc """
Returns information about the workgroup with the specified name.
"""
def get_work_group(client, input, options \\ []) do
request(client, "GetWorkGroup", input, options)
end
@doc """
Lists the data catalogs in the current AWS account.
"""
def list_data_catalogs(client, input, options \\ []) do
request(client, "ListDataCatalogs", input, options)
end
@doc """
Lists the databases in the specified data catalog.
"""
def list_databases(client, input, options \\ []) do
request(client, "ListDatabases", input, options)
end
@doc """
Provides a list of available query IDs only for queries saved in the
specified workgroup. Requires that you have access to the specified
workgroup. If a workgroup is not specified, lists the saved queries for the
primary workgroup.
For code samples using the AWS SDK for Java, see [Examples and Code
Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
def list_named_queries(client, input, options \\ []) do
request(client, "ListNamedQueries", input, options)
end
@doc """
Provides a list of available query execution IDs for the queries in the
specified workgroup. If a workgroup is not specified, returns a list of
query execution IDs for the primary workgroup. Requires you to have access
to the workgroup in which the queries ran.
For code samples using the AWS SDK for Java, see [Examples and Code
Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
def list_query_executions(client, input, options \\ []) do
request(client, "ListQueryExecutions", input, options)
end
@doc """
Lists the metadata for the tables in the specified data catalog database.
"""
def list_table_metadata(client, input, options \\ []) do
request(client, "ListTableMetadata", input, options)
end
@doc """
Lists the tags associated with an Athena workgroup or data catalog
resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Lists available workgroups for the account.
"""
def list_work_groups(client, input, options \\ []) do
request(client, "ListWorkGroups", input, options)
end
@doc """
Runs the SQL query statements contained in the `Query`. Requires you to
have access to the workgroup in which the query ran. Running queries
against an external catalog requires `GetDataCatalog` permission to the
catalog. For code samples using the AWS SDK for Java, see [Examples and
Code
Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
def start_query_execution(client, input, options \\ []) do
request(client, "StartQueryExecution", input, options)
end
@doc """
Stops a query execution. Requires you to have access to the workgroup in
which the query ran.
For code samples using the AWS SDK for Java, see [Examples and Code
Samples](http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in
the *Amazon Athena User Guide*.
"""
def stop_query_execution(client, input, options \\ []) do
request(client, "StopQueryExecution", input, options)
end
@doc """
Adds one or more tags to an Athena resource. A tag is a label that you
assign to a resource. In Athena, a resource can be a workgroup or data
catalog. Each tag consists of a key and an optional value, both of which
you define. For example, you can use tags to categorize Athena workgroups
or data catalogs by purpose, owner, or environment. Use a consistent set of
tag keys to make it easier to search and filter workgroups or data catalogs
in your account. For best practices, see [Tagging Best
Practices](https://aws.amazon.com/answers/account-management/aws-tagging-strategies/).
Tag keys can be from 1 to 128 UTF-8 Unicode characters, and tag values can
be from 0 to 256 UTF-8 Unicode characters. Tags can use letters and numbers
representable in UTF-8, and the following characters: + - = . _ : / @. Tag
keys and values are case-sensitive. Tag keys must be unique per resource.
If you specify more than one tag, separate them by commas.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from a data catalog or workgroup resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the data catalog that has the specified name.
"""
def update_data_catalog(client, input, options \\ []) do
request(client, "UpdateDataCatalog", input, options)
end
@doc """
Updates the workgroup with the specified name. The workgroup's name cannot
be changed.
"""
def update_work_group(client, input, options \\ []) do
request(client, "UpdateWorkGroup", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "athena"}
host = build_host("athena", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonAthena.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/athena.ex
| 0.900024 | 0.574723 |
athena.ex
|
starcoder
|
defmodule Appsignal.Instrumentation.Decorators do
@moduledoc """
Instrumentation decorators
This module contains various [function
decorators](https://github.com/arjan/decorator) for instrumenting
function calls.
`@decorate transaction` - when a function decorated like this is
called, a transaction is started in the `:http_request` namespace.
`@decorate transaction(:background_job)` - when a function decorated
like this is called, a transaction is started in the
`:background_job` namespace.
`@decorate transaction_event` - when a function decorated like this
is called, it will add an event onto the transaction's timeline. The
name of the event will be the name of the function that's decorated.
`@decorate transaction_event(:category)` - when a function decorated
like this is called, it will add an event onto the transaction's
timeline. The name of the event will be the name of the function
that's decorated. In addition, the event will be grouped into the
given `:category`.
`@decorate channel_action` - this decorator is meant to be put
before the `handle_in/3` function of a Phoenix.Channel. See
`Appsignal.Phoenix.Channel` for more information on how to
instrument channel events.
"""
use Decorator.Define,
transaction: 0,
transaction: 1,
transaction_event: 0,
transaction_event: 1,
channel_action: 0
@doc false
def transaction(body, context) do
transaction(:http_request, body, context)
end
@doc false
def transaction(namespace, body, context) do
quote do
transaction = Appsignal.Transaction.start(
Appsignal.Transaction.generate_id,
unquote(namespace)
)
|> Appsignal.Transaction.set_action(unquote("#{context.module}##{context.name}"))
result = unquote(body)
Appsignal.Transaction.finish(transaction)
:ok = Appsignal.Transaction.complete(transaction)
result
end
end
@doc false
def transaction_event(category, body, context) do
decorate_event(".#{category}", body, context)
end
@doc false
def transaction_event(body, context) do
decorate_event("", body, context)
end
defp decorate_event(postfix, body, context) do
quote do
Appsignal.Instrumentation.Helpers.instrument(
self(),
unquote("#{context.name}#{postfix}"),
unquote("#{context.module}.#{context.name}"),
fn -> unquote(body) end)
end
end
@doc false
def channel_action(body, context = %{args: [action, _payload, socket]}) do
quote do
Appsignal.Phoenix.Channel.channel_action(
unquote(context.module),
unquote(action),
unquote(socket),
fn -> unquote(body) end
)
end
end
end
|
lib/appsignal/instrumentation/decorators.ex
| 0.858422 | 0.629362 |
decorators.ex
|
starcoder
|
defmodule Google.Protobuf.DoubleValue do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: 0.0
]
@type t :: %__MODULE__{
value: number
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:double, data.value, <<9>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<9, data::binary>>) do
Decoder.field(:double, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.FloatValue do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: 0.0
]
@type t :: %__MODULE__{
value: number
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:float, data.value, <<13>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<13, data::binary>>) do
Decoder.field(:float, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.Int64Value do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: 0
]
@type t :: %__MODULE__{
value: integer
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:int64, data.value, <<8>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:int64, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.UInt64Value do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: 0
]
@type t :: %__MODULE__{
value: non_neg_integer
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:uint64, data.value, <<8>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:uint64, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.Int32Value do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: 0
]
@type t :: %__MODULE__{
value: integer
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:int32, data.value, <<8>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:int32, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.UInt32Value do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: 0
]
@type t :: %__MODULE__{
value: non_neg_integer
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:uint32, data.value, <<8>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:uint32, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.BoolValue do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: false
]
@type t :: %__MODULE__{
value: boolean
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bool, data.value, <<8>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<8, data::binary>>) do
Decoder.field(:bool, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.StringValue do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: ""
]
@type t :: %__MODULE__{
value: String.t
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:string, data.value, <<10>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:string, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
defmodule Google.Protobuf.BytesValue do
@moduledoc false
alias Pbuf.Decoder
import Bitwise, only: [bsr: 2, band: 2]
@derive Jason.Encoder
defstruct [
value: <<>>
]
@type t :: %__MODULE__{
value: binary
}
@spec new(Enum.t) :: t
def new(data) do
struct(__MODULE__, data)
end
@spec encode_to_iodata!(t | map) :: iodata
def encode_to_iodata!(data) do
alias Elixir.Pbuf.Encoder
[
Encoder.field(:bytes, data.value, <<10>>),
]
end
@spec encode!(t | map) :: binary
def encode!(data) do
:erlang.iolist_to_binary(encode_to_iodata!(data))
end
@spec decode!(binary) :: t
def decode!(data) do
Decoder.decode!(__MODULE__, data)
end
@spec decode(binary) :: {:ok, t} | :error
def decode(data) do
Decoder.decode(__MODULE__, data)
end
def decode(acc, <<10, data::binary>>) do
Decoder.field(:bytes, :value, acc, data)
end
# failed to decode, either this is an unknown tag (which we can skip), or
# it is a wrong type (which is an error)
def decode(acc, data) do
{prefix, data} = Decoder.varint(data)
tag = bsr(prefix, 3)
type = band(prefix, 7)
case tag in [1] do
false -> {acc, Decoder.skip(type, data)}
true ->
err = %Decoder.Error{
tag: tag,
module: __MODULE__,
message: "#{__MODULE__} tag #{tag} has an incorrect type of #{type}"
}
{:error, err}
end
end
def __finalize_decode__(args) do
struct = Elixir.Enum.reduce(args, %__MODULE__{}, fn
{k, v}, acc -> Map.put(acc, k, v)
end)
struct
end
end
|
lib/protoc/google/protobuf/wrappers.pb.ex
| 0.839175 | 0.612744 |
wrappers.pb.ex
|
starcoder
|
defmodule Aoc2017 do
@moduledoc false
def go(day) do
do_day(day)
end
def do_day(:all, 0) do
:done
end
def do_day(:all, num) do
do_day(num)
do_day(:all, num-1)
end
def do_day(:all) do
do_day(:all, 25)
end
def do_day(1) do
IO.inspect :day1.part_a, label: "Day 1 part A"
IO.inspect :day1.part_b, label: "Day 1 part B"
end
def do_day(2) do
IO.inspect :day2.part_a, label: "Day 2 part A"
IO.inspect :day2.part_b, label: "Day 2 part B"
end
def do_day(3) do
IO.inspect :day3.part_a, label: "Day 3 part A"
IO.inspect :day3.part_b, label: "Day 3 part B"
end
def do_day(4) do
IO.inspect Day4.part_a, label: "Day 4 part A"
IO.inspect Day4.part_b, label: "Day 4 part B"
end
def do_day(5) do
IO.inspect Day5.part_a, label: "Day 5 part A"
IO.inspect Day5.part_b, label: "Day 5 part B"
end
def do_day(6) do
IO.inspect Day6.part_a, label: "Day 6 part A"
IO.inspect Day6.part_b, label: "Day 6 part B"
end
def do_day(7) do
IO.inspect Day7.part_a, label: "Day 7 part A"
try do
IO.puts "Day 7 part B: "
Day7.part_b
catch
x -> x
end
end
def do_day(8) do
IO.inspect Day8.part_a, label: "Day 8 part A"
IO.inspect Day8.part_b, label: "Day 8 part B"
end
def do_day(9) do
IO.inspect Day9.part_a, label: "Day 9 part A"
IO.inspect Day9.part_b, label: "Day 9 part B"
end
def do_day(10) do
IO.inspect Day10.part_a, label: "Day 10 part A"
IO.inspect Day10.part_b, label: "Day 10 part B"
end
def do_day(11) do
IO.inspect Day11.part_a, label: "Day 11 part A"
IO.inspect Day11.part_b, label: "Day 11 part B"
end
def do_day(12) do
IO.inspect Day12.part_a, label: "Day 12 part A"
IO.inspect Day12.part_b, label: "Day 12 part B"
end
def do_day(13) do
IO.inspect Day13.part_a, label: "Day 13 part A"
IO.inspect Day13.part_b, label: "Day 13 part B"
end
def do_day(14) do
IO.inspect Day14.part_a, label: "Day 14 part A"
IO.inspect Day14.part_b, label: "Day 14 part B"
end
def do_day(15) do
IO.inspect Day15.part_a, label: "Day 15 part A"
IO.inspect Day15.part_b, label: "Day 15 part B"
end
def do_day(16) do
IO.inspect Day16.part_a, label: "Day 16 part A"
IO.inspect Day16.part_b, label: "Day 16 part B"
end
def do_day(17) do
IO.inspect Day17.part_a, label: "Day 17 part A"
IO.inspect Day17.part_b, label: "Day 17 part B"
end
def do_day(18) do
IO.inspect Day18.part_a, label: "Day 18 part A"
IO.inspect Day18.part_b, label: "Day 18 part B"
end
def do_day(19) do
IO.inspect Day19.part_a, label: "Day 19 part A"
IO.inspect Day19.part_b, label: "Day 19 part B"
end
def do_day(20) do
IO.inspect Day20.part_a, label: "Day 20 part A"
IO.inspect Day20.part_b, label: "Day 20 part B"
end
def do_day(invalid) do
IO.inspect invalid, label: "This day is invalid"
end
end
|
lib/aoc2017.ex
| 0.561455 | 0.677714 |
aoc2017.ex
|
starcoder
|
defmodule Cashtrail.Entities do
@moduledoc """
The Entities context manages the data related to entities. An Entity keeps all
financial data of something, that can be a company, financial finances,
organization, church, event, etc. And they can have one owner or other members,
as well.
See `Cashtrail.Entities.Entity` to have more info about entity.
"""
@type user :: Cashtrail.Users.User.t()
@type entity :: Cashtrail.Entities.Entity.t()
@type entity_member :: Cashtrail.Entities.EntityMember.t()
@type entity_member_permission :: Cashtrail.Entities.EntityMember.permission()
import Ecto.Query, warn: false
alias Cashtrail.Repo
alias Cashtrail.{Entities, Paginator, Users}
import Cashtrail.QueryBuilder, only: [build_filter: 3, build_search: 3]
import Cashtrail.Statuses, only: [filter_by_status: 3]
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of entities in the
`:entries` field.
## Arguments
* options - A `keyword` list of the following options:
* `:filter` => filters by following attributes:
* `:type` or `"type"`
* `:status` or `"status"`
* `:search` => search entities by `:name`.
* See `Cashtrail.Paginator.paginate/2` to see paginations options.
See `Cashtrail.Entities.Entity` to have more detailed info about the fields to
be filtered or searched.
## Examples
iex> list_entities()
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.Entity{}, ...]}
iex> list_entities(filter: %{type: :company})
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.Entity{type: :company}, ...]}
iex> list_entities(search: "my")
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.Entity{name: "My company"}, ...]}
"""
@spec list_entities(keyword) :: Paginator.Page.t(entity())
def list_entities(options \\ []) do
from(e in Entities.Entity)
|> build_filter(Keyword.get(options, :filter), [:type])
|> filter_by_status(Keyword.get(options, :filter), :status)
|> build_search(Keyword.get(options, :search), [:name])
|> Paginator.paginate(options)
end
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of entities in the
`:entries` field from the given user.
## Arguments
* user - A `%Cashtrail.Users.User{}` that owns or is member of the entity.
* options - A `keyword` list of the following options:
* `:filter` => filters by following attributes:
* `:type` or `"type"`
* `:status` or `"status"`
* `:search` => search entities by `:name`.
* `:relation_type` => filter by relation type, that can be:
* `:owner` => list only entities owned by the user.
* `:member` => list only entities that the user is member of.
* `:both` => the default value, list entities that the user is owned by or
is member of the entities.
* See `Cashtrail.Paginator.paginate/2` to see paginations options.
See `Cashtrail.Entities.Entity` to have more detailed info about the fields to
be filtered or searched.
## Examples
iex> list_entities_for(owner)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.Entity{}, ...]}
iex> list_entities_for(member)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.Entity{}, ...]}
"""
@spec list_entities_for(user, keyword) :: Paginator.Page.t(entity())
def list_entities_for(%Users.User{id: user_id}, options \\ []) do
from(e in Entities.Entity)
|> build_filter(Keyword.get(options, :filter), [:type])
|> filter_by_status(Keyword.get(options, :filter), :status)
|> build_search(Keyword.get(options, :search), [:name])
|> of_relation(user_id, Keyword.get(options, :relation_type, :both))
|> Paginator.paginate(options)
end
defp of_relation(query, user_id, :owner) do
where(query, [e], e.owner_id == ^user_id)
end
defp of_relation(query, user_id, :member) do
query
|> join(:left, [e], m in assoc(e, :members))
|> where([_, m], m.user_id == ^user_id)
end
defp of_relation(query, user_id, _) do
query
|> join(:left, [e], m in assoc(e, :members))
|> where([e, m], e.owner_id == ^user_id or m.user_id == ^user_id)
end
@doc """
Gets a single entity.
Raises `Ecto.NoResultsError` if the Entity does not exist.
See `Cashtrail.Entities.Entity` to have more detailed info about the returned
struct.
## Arguments
* id - A `string` that is the unique id of the entity to be found.
## Examples
iex> get_entity!(123)
%Cashtrail.Entities.Entity{}
iex> get_entity!(456)
** (Ecto.NoResultsError)
"""
@spec get_entity!(Ecto.UUID.t()) :: entity()
def get_entity!(id), do: Repo.get!(Entities.Entity, id)
@doc """
Creates an entity.
## Arguments
* params - A `map` with the params of the entity to be created:
* `:name` (required) - A `string` with the name or description of the entity.
* `:type` - A `string` with the type of the entity. It can be `:personal`,
`:company` or `:other`. Defaults to `:personal`.
* `:owner_id` - A `string` that references to the `Cashtrail.Users.User` that
is the owner of the entity.
See `Cashtrail.Entities.Entity` to have more detailed info about the fields.
## Examples
iex> create_entity(user, %{field: value})
{:ok, %Cashtrail.Entities.Entity{}}
iex> create_entity(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_entity(user, map, boolean) ::
{:ok, entity()} | {:error, Ecto.Changeset.t(entity())}
def create_entity(user, attrs, create_tenants \\ true)
def create_entity(%Users.User{} = user, attrs, true) do
with {:ok, entity} <- create_entity(user, attrs, false),
{:ok, _tenant} <- Entities.Tenants.create(entity) do
{:ok, entity}
end
end
def create_entity(%Users.User{id: user_id}, attrs, false) do
%Entities.Entity{owner_id: user_id}
|> Entities.Entity.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates an entity.
## Arguments
* user - The `%Cashtrail.Entities.Entity{}` to be updated.
* params - A `map` with the field of the entity to be updated. See
`create_entity/2` to know about the params that can be given.
## Examples
iex> update_entity(entity, %{field: new_value})
{:ok, %Cashtrail.Entities.Entity{}}
iex> update_entity(entity, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_entity(entity(), map) :: {:ok, entity()} | {:error, Ecto.Changeset.t(entity())}
def update_entity(%Entities.Entity{} = entity, attrs) do
entity
|> Entities.Entity.changeset(attrs)
|> Repo.update()
end
@doc """
Archives an entity.
## Examples
iex> archive_entity(entity)
{:ok, %Cashtrail.Entities.Entity{}}
iex> archive_entity(entity)
{:error, %Ecto.Changeset{}}
"""
@spec archive_entity(Cashtrail.Entities.Entity.t()) ::
{:ok, entity()} | {:error, Ecto.Changeset.t(entity())}
def archive_entity(%Entities.Entity{archived_at: nil} = entity) do
entity
|> Entities.Entity.archive_changeset()
|> Repo.update()
end
def archive_entity(%Entities.Entity{}), do: {:error, :already_archived}
@doc """
Unarchives an entity.
## Returns
* `{:ok, %Cashtrail.Entities.Entity{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
* `{:error, :already_archived}` in case of entity being already archived.
## Examples
iex> unarchive_entity(entity)
{:ok, %Cashtrail.Entities.Entity{}}
iex> unarchive_entity(entity)
{:error, %Ecto.Changeset{}}
"""
@spec unarchive_entity(Cashtrail.Entities.Entity.t()) ::
{:ok, entity()} | {:error, Ecto.Changeset.t(entity())}
def unarchive_entity(%Entities.Entity{} = entity) do
entity
|> Entities.Entity.unarchive_changeset()
|> Repo.update()
end
@doc """
Deletes an entity.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` to be deleted.
## Examples
iex> delete_entity(entity)
{:ok, %Cashtrail.Entities.Entity{}}
iex> delete_entity(entity)
{:error, %Ecto.Changeset{}}
"""
@spec delete_entity(entity()) ::
{:ok, entity()}
| {:error, Ecto.Changeset.t(entity())}
def delete_entity(entity, drop_tenants \\ true)
def delete_entity(%Entities.Entity{} = entity, true) do
with {:ok, entity} <- delete_entity(entity, false),
{:ok, _tenant} <- Entities.Tenants.drop(entity) do
{:ok, entity}
end
end
def delete_entity(%Entities.Entity{} = entity, false) do
Repo.delete(entity)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking entity changes.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` to be tracked.
## Examples
iex> change_entity(entity)
%Ecto.Changeset{source: %Cashtrail.Entities.Entity{}}
"""
@spec change_entity(entity()) :: Ecto.Changeset.t(entity())
def change_entity(%Entities.Entity{} = entity) do
Entities.Entity.changeset(entity, %{})
end
@doc """
Transfer the ownership of an entity from one user to another.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` to be transfered.
* from_user - The `%Cashtrail.Users.User{}` to be transfered.
* to_user - The `%Cashtrail.Users.User{}` to be transfered.
## Returns
* `{:ok, %Cashtrail.Entities.Entity{}}` if the entity is transfered successfully.
* `{:error, changeset}` if to_user is invalid or it's not found.
* `{:error, :unauthorized}` if from_user is not the owner of the entity.
## Effects
After the ownership transference, the previous owner (`from_user`) becomes a
member of the entity with `:admin` permissions.
## Examples
iex> transfer_ownership(entity, from_user, to_user)
{:ok, %Cashtrail.Entities.Entity{}}
iex> transfer_ownership(entity, from_user, to_user)
{:error, %Ecto.Changeset{source: %Cashtrail.Entities.Entity{}}}
iex> transfer_ownership(entity, invalid_from, to_user)
{:error, :unauthorized}
"""
@spec transfer_ownership(entity, user, user) ::
{:error, :unauthorized} | {:ok, entity()}
def transfer_ownership(
%Entities.Entity{} = entity,
%Users.User{id: from_user_id} = from_user,
%Users.User{id: to_user_id} = to_user
) do
if entity.owner_id == from_user_id do
changeset = Entities.Entity.transfer_changeset(entity, %{owner_id: to_user_id})
with {:ok, entity} <- Repo.update(changeset) do
remove_member(entity, to_user)
add_member(entity, from_user, :admin)
{:ok, entity}
end
else
{:error, :unauthorized}
end
end
@doc """
Returns a `boolean` that says if the entity belongs to the user.
## Arguments
* user - The `%Cashtrail.Users.User{}` to be deleted.
## Examples
iex> belongs_to?(%Cashtrail.Entities.Entity{owner_id: "aaa"}, %Cashtrail.Users.User{id: "aaa"})
true
iex> belongs_to?(%Cashtrail.Entities.Entity{owner_id: "bbb"}, %Cashtrail.Users.User{id: "aaa"})
false
"""
@spec belongs_to?(entity, user) :: boolean
def belongs_to?(%Entities.Entity{owner_id: owner_id}, %Users.User{id: user_id}) do
owner_id == user_id
end
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of entity_members in the
`:entries` field.
## Arguments
* options - A `keyword` list of the following options:
* `:filter` => filters by following attributes:
* `:permission` or `"permission"`
* `:search` => search users by its user `:name`.
* See `Cashtrail.Paginator.paginate/2` to see paginations options.
See `Cashtrail.Entities.EntityMember` to have more detailed info about the
fields to be filtered or searched.
## Examples
iex> list_entity_members(entity)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.EntityMember{}, ...]}
iex> list_entity_members(entity, filter: %{permission: :read})
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.EntityMember{permission: :read}, ...]}
iex> list_entity_members(entity, search: "my")
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Entities.EntityMember{user: %Cashtrail.Users.User{name: "<NAME>"}}, ...]}
"""
@spec list_members(entity, keyword | map) :: Paginator.Page.t(entity_member)
def list_members(%Entities.Entity{id: entity_id}, options \\ []) do
from(Entities.EntityMember, where: [entity_id: ^entity_id])
|> build_filter(Keyword.get(options, :filter), [:permission])
|> search_members(Keyword.get(options, :search))
|> Paginator.paginate(options)
end
defp search_members(query, term) when is_binary(term) do
term = "%#{term}%"
from q in query,
join: u in assoc(q, :user),
where: ilike(u.first_name, ^term) or ilike(u.last_name, ^term) or ilike(u.email, ^term)
end
defp search_members(query, _), do: query
@doc """
Creates an entity_member for the entity.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the member will be created.
* params - A `map` with the params of the user to be created:
* `:permission` (required) - a `string` with the permission that will be given
to the member. It can be: `:read`, `:write` or `:admin`.
* `:user_id` - A `string` with a reference to one `Cashtrail.Users.User` to
be added as a member to the entity.
* `:user` - A `map` of the `Cashtrail.Users.User` that should be created as a
member of the entity. See `Cashtrail.Users.create_user/1` docs to know more
about the accepted params.
See `Cashtrail.Entities.EntityMember` to have more detailed info about the fields.
## Returns
* `{:ok, %Cashtrail.Entities.EntityMember{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> create_member(entity, %{field: value})
{:ok, %Cashtrail.Entities.EntityMember{}}
iex> create_member(entity, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_member(entity, map) ::
{:ok, entity_member} | {:error, Ecto.Changeset.t(entity_member)}
def create_member(%Entities.Entity{} = entity, attrs) do
email = get_in(attrs, [:user, :email]) || get_in(attrs, ["user", "email"])
attrs =
case Users.get_user_by(email: email) do
%Users.User{} = user ->
attrs |> Map.delete(:user) |> Map.delete("user") |> Map.put(:user_id, user.id)
{:error, :invalid_email} ->
attrs |> Map.put(:user, %{})
_ ->
attrs
end
entity
|> Ecto.build_assoc(:members)
|> Entities.EntityMember.changeset(attrs)
|> Repo.insert()
end
@doc """
Add a user as an entity_member for the entity giving permission.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the member will be added.
* user - A `%Cashtrail.Users.User{}` that is the user to be added as a member.
The user cannot be the owner of the entity, otherwise, it will return an error.
* permission - A `string` with the permission that will be given to the member.
It can be: `:read`, `:write` or `:admin`.
See `Cashtrail.Entities.EntityMember` to have more detailed info about the
permissions.
## Returns
* `{:ok, %Cashtrail.Entities.EntityMember{}}` in case of success.
* `{:error, :invalid}` in case of the user be the owner of the entity.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> add_member(entity, user)
{:ok, %Cashtrail.Entities.EntityMember{}}
iex> add_member(entity, invalid_user)
{:error, %Ecto.Changeset{}}
"""
@spec add_member(entity, user, String.t() | entity_member_permission) ::
{:ok, entity_member} | {:error, :invalid | Ecto.Changeset.t(entity)}
def add_member(entity, user, permission \\ :read)
def add_member(%Entities.Entity{owner_id: owner_id}, %Users.User{id: user_id}, _)
when owner_id == user_id do
{:error, :invalid}
end
def add_member(%Entities.Entity{} = entity, %Users.User{id: user_id}, permission) do
entity
|> Ecto.build_assoc(:members)
|> Entities.EntityMember.changeset(%{user_id: user_id, permission: permission})
|> Repo.insert()
end
@doc """
Removes an entity_member from the entity.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the member will be removed.
* user - A `%Cashtrail.Users.User{}` that is the user to be removed as a member
of the given entity.
## Returns
* `{:ok, %Cashtrail.Entities.EntityMember{}}` in case of success.
* `{:error, :not_found}` if the user is not a member of the entity.
## Examples
iex> delete_entity_member(entity_member)
{:ok, %Cashtrail.Entities.EntityMember{}}
iex> delete_entity_member(entity_member)
{:error, :not_found}
"""
@spec remove_member(entity, user) ::
{:ok, entity_member} | {:error, :not_found}
def remove_member(%Entities.Entity{} = entity, %Users.User{} = user) do
case member_from_user(entity, user) do
%Entities.EntityMember{} = entity_member -> Repo.delete(entity_member)
_ -> {:error, :not_found}
end
end
@doc """
Updates the member's permission.
If the user is not a member or is the owner, it returns an error. The owner will always
have admin permission.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the member will have the
permissions updated.
* user - A `%Cashtrail.Users.User{}` that is the user to have the permissions
updated.
* permission - A `string` with the permission that will be given
to the member. It can be: `:read`, `:write` or `:admin`.
## Returns
* `{:ok, %Cashtrail.Entities.EntityMember{}}` in case of success.
* `{:error, :invalid}` if the user is the owner of the entity.
* `{:error, :not_found}` if the user is not a member of the entity.
* `{:error, %Ecto.Changeset{}}` in case of validation errors.
## Examples
iex> update_member_permission(entity, user, "write")
{:ok, %EntityMember{}}
iex> update_member_permission(entity, user, :write)
{:ok, %EntityMember{}}
iex> update_member_permission(entity, user, :invalid)
{:error, %Ecto.Changeset{}}
iex> update_member_permission(entity, owner, :write)
{:error, :invalid}
iex> update_member_permission(entity, another_user, :write)
{:error, :not_found}
"""
@spec update_member_permission(entity, user, String.t() | entity_member_permission) ::
{:ok, entity_member} | {:error, Ecto.Changeset.t(entity_member) | :invalid | :not_found}
def update_member_permission(
%Entities.Entity{owner_id: owner_id} = entity,
%Users.User{id: user_id} = user,
permission
) do
case member_from_user(entity, user) do
%Entities.EntityMember{} = entity_member ->
entity_member
|> Entities.EntityMember.changeset(%{permission: permission})
|> Repo.update()
_ when owner_id == user_id ->
{:error, :invalid}
_ ->
{:error, :not_found}
end
end
@doc """
Returns the member permission as an atom or :unauthorized if the member is not
found. If the user is the owner, return permission as :admin.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the member belongs.
* user - The `%Cashtrail.Users.User{}` to know the permission.
## Examples
iex> get_member_permission(entity, user)
:admin
iex> get_member_permission(entity, another_user)
:unauthorized
"""
@spec get_member_permission(entity, user) :: atom
def get_member_permission(
%Entities.Entity{owner_id: owner_id} = entity,
%Users.User{id: user_id} = user
) do
case member_from_user(entity, user) do
%Entities.EntityMember{} = entity_member ->
entity_member.permission
_ when owner_id == user_id ->
:admin
_ ->
:unauthorized
end
end
@doc """
Returns the `%Cashtrail.Entities.EntityMember{}` from the user and the entity. Returns
`nil` if the user is not a member of the entity or if it is the owner.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the member belongs.
* user - The `%Cashtrail.Users.User{}` to have the entity_member found.
## Examples
iex> member_from_user(entity, user)
%Cashtrail.Entities.EntityMember{}
iex> member_from_user(entity, owner)
nil
iex> member_from_user(entity, non_member_user)
nil
"""
@spec member_from_user(entity, user) ::
entity_member | nil
def member_from_user(%Entities.Entity{id: entity_id}, %Users.User{id: user_id}) do
Repo.get_by(Entities.EntityMember, entity_id: entity_id, user_id: user_id)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking entity_member changes.
## Arguments
* entity_member - The `%Cashtrail.Entities.EntityMember{}` to be tracked.
## Examples
iex> change_member(entity_member)
%Ecto.Changeset{source: %Cashtrail.Entities.EntityMember{}}
"""
@spec change_member(entity_member) :: Ecto.Changeset.t(entity_member)
def change_member(%Entities.EntityMember{} = entity_member) do
Entities.EntityMember.changeset(entity_member, %{})
end
end
|
apps/cashtrail/lib/cashtrail/entities.ex
| 0.81372 | 0.480722 |
entities.ex
|
starcoder
|
defmodule Game.Map do
@moduledoc """
Map out a zone
"""
alias Data.Exit
@doc """
Find the coordinates for each room in a zone and the size of the zone
1,1 is top left
"""
@spec size_of_map(Zone.t(), integer) ::
{{integer, integer}, {integer, integer}, [{{integer, integer}, Room.t()}]}
def size_of_map(zone, opts \\ [layer: 1])
def size_of_map(%{rooms: []}, _), do: {{0, 0}, {0, 0}, []}
def size_of_map(%{rooms: rooms}, opts) do
layer = Keyword.get(opts, :layer)
map =
rooms
|> Enum.filter(&(&1.map_layer == layer))
|> Enum.map(&{{&1.x, &1.y}, &1})
min_x = map |> Enum.min_by(fn {{x, _y}, _room} -> x end) |> elem(0) |> elem(0)
min_y = map |> Enum.min_by(fn {{_x, y}, _room} -> y end) |> elem(0) |> elem(1)
max_x = map |> Enum.max_by(fn {{x, _y}, _room} -> x end) |> elem(0) |> elem(0)
max_y = map |> Enum.max_by(fn {{_x, y}, _room} -> y end) |> elem(0) |> elem(1)
{{min_x, min_y}, {max_x, max_y}, map}
end
@doc """
Generate a sorted list of lists for a zone map
"""
@spec map(Zone.t()) :: [[Room.t()]]
def map(zone, opts \\ []) do
layer = Keyword.get(opts, :layer, 1)
buffer = Keyword.get(opts, :buffer, true)
{{min_x, min_y}, {max_x, max_y}, map} = size_of_map(zone, layer: layer)
{{min_x, min_y}, {max_x, max_y}} =
case buffer do
true -> {{min_x - 1, min_y - 1}, {max_x + 1, max_y + 1}}
false -> {{min_x, min_y}, {max_x, max_y}}
end
for y <- min_y..max_y do
for x <- min_x..max_x do
case Enum.find(map, fn {coords, _room} -> coords == {x, y} end) do
{{^x, ^y}, room} -> {{x, y}, room}
_ -> {{x, y}, nil}
end
end
end
end
@doc """
Determine which layers are in a map
"""
@spec layers_in_map(Zone.t()) :: [integer]
def layers_in_map(zone) do
zone.rooms
|> Enum.map(& &1.map_layer)
|> Enum.uniq()
end
@doc """
Generate a text view of the zone
"""
@spec display_map(Zone.t(), {integer, integer, integer}, Keyword.t()) :: [String.t()]
def display_map(zone, {x, y, layer}, opts \\ []) do
zone
|> map(layer: layer, buffer: false)
|> mini_map({x, y}, Keyword.get(opts, :mini, false))
|> Enum.map(fn row ->
row |> Enum.map(&display_room(&1, {x, y}))
end)
|> join_rooms()
|> Enum.map(&" #{&1}")
|> Enum.join("\n")
|> String.replace(~r/-\+-/, "---")
|> String.replace("[", "\\[")
|> String.replace("]", "\\]")
end
@doc """
Create a mini map of the whole zone. Restricts to with in 2 spaces of the player
"""
@spec mini_map([], {integer, integer}, opts :: Keyword.t()) :: []
def mini_map(zone, {x, y}, true) do
{min_x, max_x} = {x - 2, x + 2}
{min_y, max_y} = {y - 2, y + 2}
zone
|> Enum.map(fn row ->
row
|> Enum.filter(fn {{x, y}, _} ->
min_x <= x && x <= max_x && min_y <= y && y <= max_y
end)
end)
|> Enum.reject(&(&1 == []))
end
def mini_map(zone, _, false), do: zone
@doc """
Determine what the room looks like with it's walls
"""
def display_room({_, nil}, _) do
[
" ",
" ",
" "
]
end
def display_room({_, room}, coords) do
room_display =
room
|> _display_room(coords)
|> color_room(room_color(room))
[
"#{exits(room, "north west")}#{exits(room, "north")}#{exits(room, "north east")}",
"#{exits(room, "west")}#{room_display}#{exits(room, "east")}",
"#{exits(room, "south west")}#{exits(room, "south")}#{exits(room, "south east")}"
]
end
defp _display_room(%{x: x, y: y}, {x, y}), do: "[X]"
defp _display_room(%{}, _), do: "[ ]"
defp color_room(room_string, nil), do: room_string
defp color_room(room_string, color), do: "{#{color}}#{room_string}{/#{color}}"
defp exits(room, direction) when direction in ["north", "south"] do
case Exit.exit_to(room, direction) do
nil ->
" "
_ ->
" | "
end
end
defp exits(room, direction) when direction in ["north west", "south east"] do
case Exit.exit_to(room, direction) do
nil ->
" "
%{direction: "north west"} ->
"\\ "
%{direction: "south east"} ->
" \\"
end
end
defp exits(room, direction) when direction in ["north east", "south west"] do
case Exit.exit_to(room, direction) do
nil ->
" "
%{direction: "north east"} ->
" /"
%{direction: "south west"} ->
"/ "
end
end
defp exits(room, direction) when direction in ["east", "west"] do
case Exit.exit_to(room, direction) do
nil ->
" "
%{direction: "east"} ->
" -"
%{direction: "west"} ->
"- "
end
end
@doc """
Join map together
The map is a list of rows of rooms, each room row is 3 lines
"""
def join_rooms(rows) do
rows
|> Enum.map(fn row ->
Enum.reduce(row, [], &join_row/2)
end)
|> Enum.reduce([], &join_rows/2)
end
@doc """
Join an individual row of rooms together
"""
def join_row(room, []), do: room
def join_row(room, row) do
row
|> Enum.with_index()
|> Enum.map(fn {line, i} ->
[current_point | line] = line |> String.graphemes() |> Enum.reverse()
line = line |> Enum.reverse()
[room_point | room_line] = room |> Enum.at(i) |> String.graphemes()
point = join_point(current_point, room_point)
(line ++ [point] ++ room_line)
|> Enum.join()
end)
end
@doc """
Join each row of rooms together
Each row of rooms are 3 lines long. The first line of the newly added row
will be joined against the last row of the set.
"""
def join_rows(row, []), do: row
def join_rows(row, rows) do
[line_1 | [line_2 | [line_3]]] = row
[last_line | rows] = rows |> Enum.reverse()
rows = rows |> Enum.reverse()
last_line = join_line(last_line, line_1)
rows ++ [last_line | [line_2 | [line_3]]]
end
@doc """
Join a line together, zipping each point
"""
def join_line(last_line, new_line) do
new_line = new_line |> String.graphemes()
last_line
|> String.graphemes()
|> Enum.with_index()
|> Enum.map(fn {point, i} ->
join_point(point, Enum.at(new_line, i))
end)
end
@doc """
Join a point in the map together
iex> Game.Map.join_point(" ", " ")
" "
iex> Game.Map.join_point("|", "+")
"+"
iex> Game.Map.join_point(" ", "+")
"+"
iex> Game.Map.join_point("+", " ")
"+"
iex> Game.Map.join_point(" ", "-")
"-"
iex> Game.Map.join_point("-", " ")
"-"
iex> Game.Map.join_point("|", " ")
"|"
iex> Game.Map.join_point("|", " ")
"|"
"""
@spec join_point(String.t(), String.t()) :: String.t()
def join_point(" ", " "), do: " "
def join_point(" ", point), do: point
def join_point(point, " "), do: point
def join_point(_, point), do: point
@doc """
Determine the color of the room in the map
iex> Game.Map.room_color(%{ecology: "default"})
"map:default"
"""
@spec room_color(room :: Room.t()) :: String.t()
def room_color(room)
def room_color(%{ecology: ecology}) do
case ecology do
ecology when ecology in ["ocean", "lake", "river"] -> "map:blue"
ecology when ecology in ["mountain", "road"] -> "map:brown"
ecology when ecology in ["hill", "field", "meadow"] -> "map:green"
ecology when ecology in ["forest", "jungle"] -> "map:dark-green"
ecology when ecology in ["town", "dungeon"] -> "map:grey"
ecology when ecology in ["inside"] -> "map:light-grey"
_ -> "map:default"
end
end
def room_color(_room), do: nil
end
|
lib/game/map.ex
| 0.80969 | 0.654757 |
map.ex
|
starcoder
|
defmodule AsNestedSet.Modifiable do
@type position :: :left | :right | :child | :parent
import Ecto.Query
import AsNestedSet.Helper
@spec create(AsNestedSet.t, AsNestedSet.t, position) :: AsNestedSet.executable
def create(new_model, target \\ nil, position) when is_atom(position) do
fn repo ->
case validate_create(new_model, target, position) do
:ok -> do_safe_create(repo, new_model, do_reload(repo, target), position)
error -> error
end
end
end
@spec reload(AsNestedSet.t) :: AsNestedSet.executable
def reload(model) do
fn repo ->
do_reload(repo, model)
end
end
defp validate_create(new_model, parent, position) do
cond do
parent == nil && position != :root -> {:error, :target_is_required}
position != :root && !AsNestedSet.Scoped.same_scope?(parent, new_model) -> {:error, :not_the_same_scope}
true -> :ok
end
end
defp do_safe_create(repo, %{__struct__: struct} = new_model, target, :left) do
left = get_field(target, :left)
left_column = get_column_name(target, :left)
right_column = get_column_name(target, :right)
# update all the left and right column
from(q in struct,
where: field(q, ^left_column) >= ^left,
update: [inc: ^[{left_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
from(q in struct,
where: field(q, ^right_column) > ^left,
update: [inc: ^[{right_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
parent_id_column = get_column_name(target, :parent_id)
parent_id = get_field(target, :parent_id)
# insert the new model
new_model
|> struct.changeset(Map.new([
{left_column, left},
{right_column, left + 1},
{parent_id_column, parent_id}
]))
|> repo.insert!
end
defp do_safe_create(repo, %{__struct__: struct} = new_model, target, :right) do
right = get_field(target, :right)
left_column = get_column_name(target, :left)
# update all the left and right column
from(q in struct,
where: field(q, ^left_column) > ^right,
update: [inc: ^[{left_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
right_column = get_column_name(target, :right)
from(q in struct,
where: field(q, ^right_column) > ^right,
update: [inc: ^[{right_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
parent_id_column = get_column_name(target, :parent_id)
parent_id = get_field(target, :parent_id)
# insert new model
new_model
|> struct.changeset(Map.new([
{left_column, right + 1},
{right_column, right + 2},
{parent_id_column, parent_id}
]))
|> repo.insert!
end
defp do_safe_create(repo, %{__struct__: struct} = new_model, target, :child) do
left_column = get_column_name(target, :left)
right = get_field(target, :right)
from(q in struct,
where: field(q, ^left_column) > ^right,
update: [inc: ^[{left_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
right_column = get_column_name(target, :right)
from(q in struct,
where: field(q, ^right_column) >= ^right,
update: [inc: ^[{right_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
parent_id_column = get_column_name(target, :parent_id)
node_id = get_field(target, :node_id)
new_model
|> struct.changeset(Map.new([
{left_column, right},
{right_column, right + 1},
{parent_id_column, node_id}
]))
|> repo.insert!
end
defp do_safe_create(repo, %{__struct__: struct} = new_model, _target, :root) do
right_most = AsNestedSet.Queriable.right_most(struct, new_model).(repo) || -1
new_model = new_model
|> set_field(:left, right_most + 1)
|> set_field(:right, right_most + 2)
|> set_field(:parent_id, nil)
|> repo.insert!
new_model
end
defp do_safe_create(repo, %{__struct__: struct} = new_model, target, :parent) do
right = get_field(target, :right)
left = get_field(target, :left)
right_column = get_column_name(target, :right)
from(q in struct,
where: field(q, ^right_column) > ^right,
update: [inc: ^[{right_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
left_column = get_column_name(target, :left)
from(q in struct,
where: field(q, ^left_column) > ^right,
update: [inc: ^[{left_column, 2}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
from(q in struct,
where: field(q, ^left_column) >= ^left and field(q, ^right_column) <= ^right,
update: [inc: ^[{right_column, 1}, {left_column, 1}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
parent_id = get_field(target, :parent_id)
new_model = new_model
|> set_field(:left, left)
|> set_field(:right, right + 2)
|> set_field(:parent_id, parent_id)
|> repo.insert!
node_id = get_field(target, :node_id)
node_id_column = get_column_name(target, :node_id)
parent_id_column = get_column_name(target, :parent_id)
new_model_id = get_field(new_model, :node_id)
from(q in struct,
where: field(q, ^node_id_column) == ^node_id,
update: [set: ^[{parent_id_column, new_model_id}]]
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.update_all([])
new_model
end
defp do_reload(_repo, nil), do: nil
defp do_reload(repo, %{__struct__: struct} = target) do
node_id = get_field(target, :node_id)
node_id_column = get_column_name(target, :node_id)
from(q in struct,
where: field(q, ^node_id_column) == ^node_id,
limit: 1
)
|> AsNestedSet.Scoped.scoped_query(target)
|> repo.one
end
@spec delete(AsNestedSet.t) :: AsNestedSet.exectuable
def delete(%{__struct__: struct} = model) do
fn repo ->
left = get_field(model, :left)
right = get_field(model, :right)
width = right - left + 1
left_column = get_column_name(model, :left)
right_column = get_column_name(model, :right)
from(q in struct,
where: field(q, ^left_column) >= ^left and field(q, ^left_column) <= ^right
)
|> AsNestedSet.Scoped.scoped_query(model)
|> repo.delete_all([])
from(q in struct,
where: field(q, ^right_column) > ^right,
update: [inc: ^[{right_column, -width}]]
)
|> AsNestedSet.Scoped.scoped_query(model)
|> repo.update_all([])
from(q in struct,
where: field(q, ^left_column) > ^right,
update: [inc: ^[{left_column, -width}]]
)
|> AsNestedSet.Scoped.scoped_query(model)
|> repo.update_all([])
end
end
@spec move(AsNestedSet.t, AsNestedSet.t, position) :: AsNestedSet.executable
@spec move(AsNestedSet.t, :root) :: AsNestedSet.executable
def move(%{__struct__: _} = model, target \\ nil, position) when is_atom(position) do
fn repo ->
model = do_reload(repo, model)
case validate_move(model, target, position) do
:ok -> do_safe_move(repo, model, do_reload(repo, target), position)
error -> error
end
end
end
defp validate_move(model, target, position) do
cond do
target == nil && position != :root -> {:error, :target_is_required}
position == :parent -> {:error, :cannot_move_to_parent}
target != nil && get_field(model, :left) <= get_field(target, :left) && get_field(model, :right) >= get_field(target, :right) -> {:error, :within_the_same_tree}
position != :root && !AsNestedSet.Scoped.same_scope?(target, model) -> {:error, :not_the_same_scope}
true -> :ok
end
end
defp do_safe_move(repo, model, target, position) do
if target != nil && get_field(model, :node_id) == get_field(target, :node_id) do
model
else
target_bound = target_bound(repo, model, target, position)
left = get_field(model, :left)
right = get_field(model, :right)
case get_bounaries(model, target_bound) do
{bound, other_bound} ->
do_switch(repo, model, {left, right, bound, other_bound}, new_parent_id(target, position))
:no_operation ->
model
end
end
end
def target_bound(repo, model, target, position) do
case position do
:child -> get_field(target, :right)
:left -> get_field(target, :left)
:right -> get_field(target, :right) + 1
:root -> AsNestedSet.right_most(model).(repo) + 1
end
end
def get_bounaries(model, target_bound) do
left = get_field(model, :left)
right = get_field(model, :right)
cond do
target_bound - 1 >= right + 1 ->
{right + 1, target_bound - 1}
target_bound <= left - 1 ->
{target_bound, left - 1}
true ->
:no_operation
end
end
defp new_parent_id(target, position) do
case position do
:child -> get_field(target, :node_id)
:left -> get_field(target, :parent_id)
:right -> get_field(target, :parent_id)
:root -> nil
end
end
defp do_switch(repo, %{__struct__: struct} = model, boundaries, new_parent_id) do
# As we checked the boundaries, the two interval is non-overlapping
[a, b, c, d]= boundaries |> Tuple.to_list |> Enum.sort
node_id = get_field(model, :node_id)
node_id_column = get_column_name(model, :node_id)
parent_id_column = get_column_name(model, :parent_id)
# shift the left part to the temporary position (negative space)
do_shift(repo, model, {a, b}, -b - 1)
do_shift(repo, model, {c, d}, a - c)
do_shift(repo, model, {a - b - 1, -1}, d + 1)
from(n in struct, where: field(n, ^node_id_column) == ^node_id, update: [set: ^[{parent_id_column, new_parent_id}]])
|> AsNestedSet.Scoped.scoped_query(model)
|> repo.update_all([])
do_reload(repo, model)
end
defp do_shift(repo, %{__struct__: struct} = model, {left, right}, delta) do
left_column = get_column_name(model, :left)
right_column = get_column_name(model, :right)
from(struct)
|> where([n], field(n, ^left_column) >= ^left and field(n, ^left_column) <= ^right)
|> update([n], [inc: ^[{left_column, delta}]])
|> AsNestedSet.Scoped.scoped_query(model)
|> repo.update_all([])
from(struct)
|> where([n], field(n, ^right_column) >= ^left and field(n, ^right_column) <= ^right)
|> update([n], [inc: ^[{right_column, delta}]])
|> AsNestedSet.Scoped.scoped_query(model)
|> repo.update_all([])
end
end
|
lib/as_nested_set/modifiable.ex
| 0.67854 | 0.42913 |
modifiable.ex
|
starcoder
|
defmodule Date.Range do
@moduledoc """
Returns an inclusive range between dates.
Ranges must be created with the `Date.range/2` or `Date.range/3` function.
The following fields are public:
* `:first` - the initial date on the range
* `:last` - the last date on the range
* `:step` - (since v1.12.0) the step
The remaining fields are private and should not be accessed.
"""
@type t :: %__MODULE__{
first: Date.t(),
last: Date.t(),
first_in_iso_days: iso_days(),
last_in_iso_days: iso_days(),
step: pos_integer | neg_integer
}
@typep iso_days() :: Calendar.iso_days()
defstruct [:first, :last, :first_in_iso_days, :last_in_iso_days, :step]
defimpl Enumerable do
def member?(%{first: %{calendar: calendar}} = range, %Date{calendar: calendar} = date) do
%{
first_in_iso_days: first_days,
last_in_iso_days: last_days,
step: step
} = range
{days, _} = Date.to_iso_days(date)
cond do
empty?(range) ->
{:ok, false}
first_days <= last_days ->
{:ok, first_days <= days and days <= last_days and rem(days - first_days, step) == 0}
true ->
{:ok, last_days <= days and days <= first_days and rem(days - first_days, step) == 0}
end
end
def member?(_, _) do
{:ok, false}
end
def count(range) do
{:ok, size(range)}
end
def slice(range) do
%{
first_in_iso_days: first,
first: %{calendar: calendar},
step: step
} = range
{:ok, size(range), &slice(first + &1 * step, step, &2, calendar)}
end
defp slice(current, _step, 1, calendar) do
[date_from_iso_days(current, calendar)]
end
defp slice(current, step, remaining, calendar) do
[
date_from_iso_days(current, calendar)
| slice(current + step, step, remaining - 1, calendar)
]
end
def reduce(range, acc, fun) do
%{
first_in_iso_days: first_days,
last_in_iso_days: last_days,
first: %{calendar: calendar},
step: step
} = range
reduce(first_days, last_days, acc, fun, step, calendar)
end
defp reduce(_first_days, _last_days, {:halt, acc}, _fun, _step, _calendar) do
{:halted, acc}
end
defp reduce(first_days, last_days, {:suspend, acc}, fun, step, calendar) do
{:suspended, acc, &reduce(first_days, last_days, &1, fun, step, calendar)}
end
defp reduce(first_days, last_days, {:cont, acc}, fun, step, calendar)
when step > 0 and first_days <= last_days
when step < 0 and first_days >= last_days do
reduce(
first_days + step,
last_days,
fun.(date_from_iso_days(first_days, calendar), acc),
fun,
step,
calendar
)
end
defp reduce(_, _, {:cont, acc}, _fun, _step, _calendar) do
{:done, acc}
end
defp date_from_iso_days(days, Calendar.ISO) do
{year, month, day} = Calendar.ISO.date_from_iso_days(days)
%Date{year: year, month: month, day: day, calendar: Calendar.ISO}
end
defp date_from_iso_days(days, calendar) do
{year, month, day, _, _, _, _} =
calendar.naive_datetime_from_iso_days({days, {0, 86_400_000_000}})
%Date{year: year, month: month, day: day, calendar: calendar}
end
defp size(%Date.Range{first_in_iso_days: first_days, last_in_iso_days: last_days, step: step})
when step > 0 and first_days > last_days,
do: 0
defp size(%Date.Range{first_in_iso_days: first_days, last_in_iso_days: last_days, step: step})
when step < 0 and first_days < last_days,
do: 0
defp size(%Date.Range{first_in_iso_days: first_days, last_in_iso_days: last_days, step: step}),
do: abs(div(last_days - first_days, step)) + 1
defp empty?(%Date.Range{
first_in_iso_days: first_days,
last_in_iso_days: last_days,
step: step
})
when step > 0 and first_days > last_days,
do: true
defp empty?(%Date.Range{
first_in_iso_days: first_days,
last_in_iso_days: last_days,
step: step
})
when step < 0 and first_days < last_days,
do: true
defp empty?(%Date.Range{}), do: false
end
defimpl Inspect do
def inspect(%Date.Range{first: first, last: last, step: 1}, _) do
"#DateRange<" <> inspect(first) <> ", " <> inspect(last) <> ">"
end
def inspect(%Date.Range{first: first, last: last, step: step}, _) do
"#DateRange<" <> inspect(first) <> ", " <> inspect(last) <> ", #{step}>"
end
end
end
|
lib/elixir/lib/calendar/date_range.ex
| 0.870735 | 0.688128 |
date_range.ex
|
starcoder
|
defmodule Chaperon.Scenario.Metrics do
@moduledoc """
This module calculates histogram data for a session's metrics.
It uses the `Histogrex` library to calculate the histograms.
"""
use Histogrex
alias __MODULE__
alias Chaperon.Session
template(:durations, min: 1, max: 10_000_000, precision: 3)
@type metric :: atom | {atom, any}
@type metric_type :: atom
@type filter :: (metric -> boolean) | MapSet.t(metric_type)
@spec config(Keyword.t()) :: filter
def config(options) do
options
|> Keyword.get(:metrics, nil)
|> filter
end
def filter(%{filter: f}), do: filter(f)
def filter(f) when is_function(f), do: f
def filter(types) when is_list(types), do: MapSet.new(types)
def filter(_), do: nil
@doc """
Replaces base metrics for a given `session` with the histogram values for them.
"""
@spec add_histogram_metrics(Session.t()) :: Session.t()
def add_histogram_metrics(session) do
metrics = histogram_metrics(session)
reset()
%{session | metrics: metrics}
end
def reset do
Metrics.delete(:durations)
Metrics.reduce(:ok, fn {name, _}, _ ->
Metrics.delete(:durations, name)
end)
end
@doc false
def histogram_metrics(session = %Session{}) do
use Chaperon.Session.Logging
session
|> log_info("Recording histograms:")
|> record_histograms
Metrics.reduce([], fn {name, hist}, tasks ->
t =
Task.async(fn ->
session
|> log_info(inspect(name))
{name, histogram_vals(hist)}
end)
[t | tasks]
end)
|> Enum.map(&Task.await(&1, :infinity))
|> Enum.into(%{})
end
@percentiles [
10.0,
20.0,
30.0,
40.0,
50.0,
60.0,
75.0,
80.0,
85.0,
90.0,
95.0,
99.0,
99.9,
99.99,
99.999
]
def percentiles, do: @percentiles
@doc false
def histogram_vals({k, hist}) do
{k, histogram_vals(hist)}
end
def histogram_vals(hist) do
hist
|> percentiles
|> Map.merge(%{
:total_count => Metrics.total_count(hist),
:min => Metrics.min(hist),
:mean => Metrics.mean(hist),
:max => Metrics.max(hist)
})
end
def percentiles(hist) do
@percentiles
|> Enum.map(&{{:percentile, &1}, Metrics.value_at_quantile(hist, &1)})
|> Enum.into(%{})
end
@doc false
def record_histograms(session) do
session.metrics
|> Enum.each(fn {k, v} ->
record_metric(k, v)
end)
end
def passes_filter?(types, type) do
if MapSet.member?(types, type) do
true
else
false
end
end
@doc false
def record_metric(_, []), do: :ok
def record_metric(k, [v | vals]) do
record_metric(k, v)
record_metric(k, vals)
end
@doc false
def record_metric(k, {:async, _name, val}) when is_number(val) do
record_metric(k, val)
end
@doc false
def record_metric(k, val) when is_number(val) do
Metrics.record!(:durations, k, val)
end
end
|
lib/chaperon/scenario/metrics.ex
| 0.900662 | 0.512449 |
metrics.ex
|
starcoder
|
defmodule Assoc.Schema do
@moduledoc """
## Usage
```
defmodule MyApp.User do
use MyApp.Schema
use Assoc.Schema, repo: MyApp.Repo
schema "users" do
field :email, :string
field :name, :string
has_many :user_roles, MyApp.UserRole, on_delete: :delete_all, on_replace: :delete
timestamps()
end
def updatable_associations, do: [
user_roles: MyApp.UserRole
]
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:email, :name])
|> validate_required([:email])
end
end
```
Key points:
- The `use Assoc.Schema` line should come after `use MyApp.Schema`.
- Pass the app's Repo into `use Assoc.Schema, repo: MyApp.Repo`
- Define a `updatable_associations` function. For each updatable association:
- The `key` should be the association name
- The `value` should be the association schema module
- The standard `changeset` function does not change.
- Include all the standard code for updating struct values (e.g. name, email) in `changeset`
- The library will create and use a separate `associations_changeset` to manage the associations
"""
@callback updatable_associations :: List.t
defmacro __using__(repo: repo) do
quote do
@doc """
Preload all schema associations.
## Usage
```
MyApp.User.preload_all(user)
```
## Implementation
Builds a list of keys with `Ecto.Association.NotLoaded` values. Then
feeds the list into `Repo.preload`.
"""
def preload_all(struct) do
keys = struct
|> Map.from_struct
|> Enum.reduce([], fn ({key, value}, acc) ->
case value do
%Ecto.Association.NotLoaded{} -> [key|acc]
_ -> acc
end
end)
unquote(repo).preload(struct, keys)
end
@doc """
Update associations defined in `updatable_associations/0` callback.
"""
def associations_changeset(struct, params \\ %{}) do
struct = preload_associations(struct, updatable_associations())
params = include_existing_associations(struct, params)
struct
|> Ecto.Changeset.cast(params, [])
|> put_associations(updatable_associations(), params)
end
@doc """
Preload selected schema associations.
"""
def preload_associations(struct, associations) do
associations = case Keyword.keyword?(associations) do
true -> Keyword.keys(associations)
false -> associations
end
unquote(repo).preload(struct, associations)
end
# Include existing associations in params by merging params into preload struct
defp include_existing_associations(struct, params) do
struct
|> Map.from_struct
|> Map.merge(params)
end
# Dynamically adds `put_assoc` calls to changeset
defp put_associations(changeset, associations, params) do
Enum.reduce(associations, changeset, fn ({key, _}, acc) ->
value = params
|> Assoc.Util.keys_to_atoms()
|> Map.get(key, :omitted)
case value do
:omitted -> acc
value -> Ecto.Changeset.put_assoc(acc, key, value)
end
end)
end
end
end
end
|
lib/assoc/schema.ex
| 0.812756 | 0.714616 |
schema.ex
|
starcoder
|
defmodule Freddy.Publisher do
@moduledoc """
A behaviour module for implementing AMQP publisher processes.
The `Freddy.Publisher` module provides a way to create processes that holds,
monitors, and restarts a channel in case of failure, exports a function to publish
messages to an exchange, and some callbacks to hook into the process lifecycle.
An example `Freddy.Publisher` process that only sends every other message:
defmodule MyPublisher do
use Freddy.Publisher
def start_link(conn, config, opts \\ []) do
Freddy.Publisher.start_link(__MODULE__, conn, config, :ok, opts)
end
def publish(publisher, payload, routing_key) do
Freddy.Publisher.publish(publisher, payload, routing_key)
end
def init(:ok) do
{:ok, %{last_ignored: false}}
end
def before_publication(_payload, _routing_key, _opts, %{last_ignored: false}) do
{:ignore, %{last_ignored: true}}
end
def before_publication(_payload, _routing_key, _opts, %{last_ignored: true}) do
{:ok, %{last_ignored: false}}
end
end
## Channel handling
When the `Freddy.Publisher` starts with `start_link/5` it runs the `init/1` callback
and responds with `{:ok, pid}` on success, like a `GenServer`.
After starting the process it attempts to open a channel on the given connection.
It monitors the channel, and in case of failure it tries to reopen again and again
on the same connection.
## Context setup
The context setup process for a publisher is to declare its exchange.
Every time a channel is opened the context is set up, meaning that the exchange
is declared through the new channel based on the given configuration.
The configuration must be a `Keyword.t` that contains a single key: `:exchange`
whose value is the configuration for the `Freddy.Core.Exchange`.
Check `Freddy.Core.Exchange` for more detailed information.
"""
use Freddy.Core.Actor, exchange: nil
@type routing_key :: String.t()
@type connection_info :: %{channel: Freddy.Core.Channel.t(), exchange: Freddy.Core.Exchange.t()}
@doc """
Called when the `Freddy.Publisher` process has opened and AMQP channel and declared an exchange.
First argument is a map, containing `:channel` and `:exchange` structures.
Returning `{:noreply, state}` will cause the process to enter the main loop
with the given state.
Returning `{:error, state}` will indicate that process failed to perform some critical actions
and must reconnect.
Returning `{:stop, reason, state}` will terminate the main loop and call
`c:terminate/2` before the process exits with reason `reason`.
"""
@callback handle_connected(meta :: connection_info, state) ::
{:noreply, state}
| {:noreply, state, timeout | :hibernate}
| {:error, state}
| {:stop, reason :: term, state}
@doc """
Called before a message will be encoded and published to the exchange.
It receives as argument the message payload, the routing key, the options
for that publication and the internal state.
Returning `{:ok, state}` will cause the message to be sent with no
modification, and enter the main loop with the given state.
Returning `{:ok, payload, routing_key, opts, state}` will cause the
given payload, routing key and options to be used instead of the original
ones, and enter the main loop with the given state.
Returning `{:ignore, state}` will ignore that message and enter the main loop
again with the given state.
Returning `{:stop, reason, state}` will not send the message, terminate the
main loop and call `terminate(reason, state)` before the process exits with
reason `reason`.
"""
@callback before_publication(payload, routing_key, opts :: Keyword.t(), state) ::
{:ok, state}
| {:ok, payload, routing_key, opts :: Keyword.t(), state}
| {:ignore, state}
| {:stop, reason :: term, state}
@doc """
Called before a message will be published to the exchange.
It receives as argument the message payload, the routing key, the options
for that publication and the internal state.
Returning `{:ok, string, state}` will cause the returned `string` to be
published to the exchange, and the process to enter the main loop with the
given state.
Returning `{:ok, string, routing_key, opts, state}` will cause the
given string, routing key and options to be used instead of the original
ones, and enter the main loop with the given state.
Returning `{:ignore, state}` will ignore that message and enter the main loop
again with the given state.
Returning `{:stop, reason, state}` will not send the message, terminate the
main loop and call `terminate(reason, state)` before the process exits with
reason `reason`.
"""
@callback encode_message(payload, routing_key, opts :: Keyword.t(), state) ::
{:ok, String.t(), state}
| {:ok, String.t(), routing_key, opts :: Keyword.t(), state}
| {:ignore, state}
| {:stop, reason :: term, state}
defmacro __using__(_opts \\ []) do
quote location: :keep do
@behaviour Freddy.Publisher
@impl true
def init(initial) do
{:ok, initial}
end
@impl true
def handle_connected(_meta, state) do
{:noreply, state}
end
@impl true
def handle_disconnected(_reason, state) do
{:noreply, state}
end
@impl true
def before_publication(_payload, _routing_key, _opts, state) do
{:ok, state}
end
@impl true
def encode_message(payload, routing_key, opts, state) do
case Jason.encode(payload) do
{:ok, new_payload} ->
opts = Keyword.put(opts, :content_type, "application/json")
{:ok, new_payload, routing_key, opts, state}
{:error, reason} ->
{:stop, reason, state}
end
end
@impl true
def handle_call(message, _from, state) do
{:stop, {:bad_call, message}, state}
end
@impl true
def handle_cast(message, state) do
{:stop, {:bad_cast, message}, state}
end
@impl true
def handle_info(_message, state) do
{:noreply, state}
end
@impl true
def terminate(_reason, _state) do
:ok
end
defoverridable Freddy.Publisher
end
end
@doc """
Publishes a message to an exchange through the `Freddy.Publisher` process or
from `Freddy.Publisher` process using the connection meta information.
When publishing from within the publisher process, the connection_info can be
obtained from `c:handle_connected/2` callback.
## Options
* `:mandatory` - If set, returns an error if the broker can't route the message
to a queue (default `false`);
* `:immediate` - If set, returns an error if the broker can't deliver te message
to a consumer immediately (default `false`);
* `:content_type` - MIME Content type;
* `:content_encoding` - MIME Content encoding;
* `:headers` - Message headers. Can be used with headers Exchanges;
* `:persistent` - If set, uses persistent delivery mode. Messages marked as
`persistent` that are delivered to `durable` queues will be logged to disk;
* `:correlation_id` - application correlation identifier;
* `:priority` - message priority, ranging from 0 to 9;
* `:reply_to` - name of the reply queue;
* `:expiration` - how long the message is valid (in milliseconds);
* `:message_id` - message identifier;
* `:timestamp` - timestamp associated with this message (epoch time);
* `:type` - message type as a string;
* `:user_id` - creating user ID. RabbitMQ will validate this against the active connection user;
* `:app_id` - publishing application ID.
"""
@spec publish(
GenServer.server() | connection_info,
payload :: term,
routing_key :: String.t(),
opts :: Keyword.t()
) :: :ok
def publish(publisher, payload, routing_key \\ "", opts \\ [])
def publish(%{channel: channel, exchange: exchange} = _meta, payload, routing_key, opts) do
Freddy.Core.Exchange.publish(exchange, channel, payload, routing_key, opts)
end
def publish(publisher, payload, routing_key, opts) do
opts = Freddy.Tracer.add_context_to_opts(opts)
cast(publisher, {:"$publish", payload, routing_key, opts})
end
alias Freddy.Core.Exchange
@impl true
def handle_connected(meta, state(config: config) = state) do
case declare_exchange(meta, config) do
{:ok, %{channel: channel, exchange: exchange} = new_meta} ->
handle_mod_connected(new_meta, state(state, channel: channel, exchange: exchange))
{:error, :closed} ->
{:error, state}
{:error, reason} ->
{:stop, reason, state}
end
end
@impl true
def handle_cast({:"$publish", payload, routing_key, opts}, state) do
opts = Freddy.Tracer.attach_context_from_opts(opts)
handle_publish(payload, routing_key, opts, state)
end
def handle_cast(message, state) do
super(message, state)
end
defp declare_exchange(%{channel: channel} = meta, config) do
exchange =
config
|> Keyword.get(:exchange, Exchange.default())
|> Exchange.new()
with :ok <- Exchange.declare(exchange, channel) do
{:ok, Map.put(meta, :exchange, exchange)}
end
end
defp handle_publish(payload, routing_key, opts, state(mod: mod, given: given) = state) do
case mod.before_publication(payload, routing_key, opts, given) do
{:ok, new_given} ->
do_publish(payload, routing_key, opts, state(state, given: new_given))
{:ok, new_payload, new_routing_key, new_opts, new_given} ->
do_publish(new_payload, new_routing_key, new_opts, state(state, given: new_given))
{:ignore, new_given} ->
{:noreply, state(state, given: new_given)}
{:stop, reason, new_given} ->
{:stop, reason, state(state, given: new_given)}
end
end
defp do_publish(
payload,
routing_key,
opts,
state(channel: channel, exchange: exchange, mod: mod, given: given) = state
) do
case mod.encode_message(payload, routing_key, opts, given) do
{:ok, new_payload, new_given} ->
publish(%{exchange: exchange, channel: channel}, new_payload, routing_key, opts)
{:noreply, state(state, given: new_given)}
{:ok, new_payload, new_routing_key, new_opts, new_given} ->
publish(%{exchange: exchange, channel: channel}, new_payload, new_routing_key, new_opts)
{:noreply, state(state, given: new_given)}
{:ignore, new_given} ->
{:noreply, state(state, given: new_given)}
{:stop, reason, new_given} ->
{:stop, reason, state(state, given: new_given)}
end
end
end
|
lib/freddy/publisher.ex
| 0.925331 | 0.557544 |
publisher.ex
|
starcoder
|
defmodule Talib.SMMA do
alias Talib.SMA
@moduledoc ~S"""
Defines a Smoothed Moving Average.
## History
Version: 1.0
Source: http://www2.wealth-lab.com/WL5Wiki/SMMA.ashx
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines a Smoothed Moving Average.
* :period - The period of the SMMA
* :values - List of values resulting from the calculation
"""
@type t :: %Talib.SMMA{period: integer, values: [number]}
defstruct [
period: 0,
values: []
]
@doc """
Gets the SMMA of a list.
Returns `{:ok, smma}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.SMMA.from_list([17, 23, 44], 2)
{:ok, %Talib.SMMA{
period: 2,
values: [nil, 20.0, 32.0]
}}
iex> Talib.SMMA.from_list([], 1)
{:error, :no_data}
iex> Talib.SMMA.from_list([17], 0)
{:error, :bad_period}
"""
@spec from_list([number], integer) :: {:ok, Talib.SMMA.t} | {:error, atom}
def from_list(data, period), do: calculate(data, period)
@doc """
Gets the SMMA of a list.
Raises `NoDataError` if the given list is an empty list.
Raises `BadPeriodError` if the given period is 0.
## Examples
iex> Talib.SMMA.from_list!([17, 23, 44], 2)
%Talib.SMMA{
period: 2,
values: [nil, 20.0, 32.0]
}
iex> Talib.SMMA.from_list!([], 1)
** (NoDataError) no data error
iex> Talib.SMMA.from_list!([17], 0)
** (BadPeriodError) bad period error
"""
@spec from_list!([number], integer) :: Talib.SMMA.t | no_return
def from_list!(data, period) do
case calculate(data, period) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
{:error, :bad_period} -> raise BadPeriodError
end
end
@doc false
@spec calculate([number], integer, [float]) :: {:ok, Talib.SMMA.t}
| {:error, atom}
defp calculate(data, period, results \\ [])
defp calculate(data, 0, _results), do: {:error, :bad_period}
defp calculate(data, period, []) do
{hd, tl} = Enum.split(data, period)
case SMA.from_list(hd, period) do
{:ok, result} ->
calculate(tl, period, result.values)
{:error, reason} ->
{:error, reason}
end
end
defp calculate([], period, results),
do: {:ok, %Talib.SMMA{period: period, values: results}}
defp calculate([hd | tl], period, results) do
[previous_average] = Enum.take(results, -1)
result = (previous_average * (period - 1) + hd) / period
calculate(tl, period, results ++ [result])
end
end
|
lib/talib/smma.ex
| 0.907022 | 0.662935 |
smma.ex
|
starcoder
|
defmodule EdgeDB.RelativeDuration do
@moduledoc """
An immutable value represeting an EdgeDB `cal::relative_duration` value.
```elixir
iex(1)> {:ok, pid} = EdgeDB.start_link()
iex(2)> EdgeDB.query_required_single!(pid, "SELECT <cal::relative_duration>'45.6 seconds'")
#EdgeDB.RelativeDuration<"PT45.6S">
```
"""
defstruct months: 0,
days: 0,
microseconds: 0
@typedoc """
An immutable value represeting an EdgeDB `cal::relative_duration` value.
Fields:
* `:months` - number of months.
* `:days` - number of months.
* `:microseconds` - number of microseconds.
"""
@type t() :: %__MODULE__{
months: pos_integer(),
days: pos_integer(),
microseconds: pos_integer()
}
end
defimpl Inspect, for: EdgeDB.RelativeDuration do
import Inspect.Algebra
@months_per_year 12
@usecs_per_hour 3_600_000_000
@usecs_per_minute 60_000_000
@usecs_per_sec 1_000_000
@impl Inspect
def inspect(%EdgeDB.RelativeDuration{months: 0, days: 0, microseconds: 0}, _opts) do
concat(["#EdgeDB.RelativeDuration<\"", "PT0S", "\">"])
end
@impl Inspect
def inspect(%EdgeDB.RelativeDuration{} = duration, _opts) do
duration_repr =
"P"
|> format_date(duration)
|> format_time(duration)
concat(["#EdgeDB.RelativeDuration<\"", duration_repr, "\">"])
end
defp format_date(formatted_repr, %EdgeDB.RelativeDuration{} = duration) do
formatted_repr
|> maybe_add_time_part(div(duration.months, @months_per_year), "Y")
|> maybe_add_time_part(rem(duration.months, @months_per_year), "M")
|> maybe_add_time_part(duration.days, "D")
end
defp format_time(formatted_repr, %EdgeDB.RelativeDuration{microseconds: 0}) do
formatted_repr
end
defp format_time(formatted_repr, %EdgeDB.RelativeDuration{microseconds: time}) do
formatted_repr = "#{formatted_repr}T"
tfrac = div(time, @usecs_per_hour)
time = time - tfrac * @usecs_per_hour
hour = tfrac
tfrac = div(time, @usecs_per_minute)
time = time - tfrac * @usecs_per_minute
min = tfrac
formatted_repr =
formatted_repr
|> maybe_add_time_part(hour, "H")
|> maybe_add_time_part(min, "M")
sec = div(time, @usecs_per_sec)
fsec = time - sec * @usecs_per_sec
sign =
if min < 0 or fsec < 0 do
"-"
else
""
end
if sec != 0 or fsec != 0 do
formatted_repr = "#{formatted_repr}#{sign}#{abs(sec)}"
formatted_repr =
if fsec != 0 do
fsec = String.trim_trailing("#{abs(fsec)}", "0")
"#{formatted_repr}.#{fsec}"
else
formatted_repr
end
"#{formatted_repr}S"
else
formatted_repr
end
end
defp maybe_add_time_part(formatted_repr, 0, _letter) do
formatted_repr
end
defp maybe_add_time_part(formatted_repr, value, letter) do
"#{formatted_repr}#{value}#{letter}"
end
end
|
lib/edgedb/types/relative_duration.ex
| 0.848015 | 0.665774 |
relative_duration.ex
|
starcoder
|
defmodule Oban.Plugins.Stager do
@moduledoc """
Transition jobs to the `available` state when they reach their scheduled time.
This module is necessary for the execution of scheduled and retryable jobs.
## Options
* `:interval` - the number of milliseconds between database updates. This is directly tied to
the resolution of _scheduled_ jobs. For example, with an `interval` of `5_000ms`, scheduled
jobs are checked every 5 seconds. The default is `1_000ms`.
## Instrumenting with Telemetry
The `Oban.Plugins.Stager` plugin adds the following metadata to the `[:oban, :plugin, :stop]` event:
* :staged_count - the number of jobs that were staged in the database
"""
use GenServer
alias Oban.{Config, Query}
@type option :: {:conf, Config.t()} | {:name, GenServer.name()}
defmodule State do
@moduledoc false
defstruct [
:conf,
:name,
:timer,
interval: :timer.seconds(1),
lock_key: 1_149_979_440_242_868_003
]
end
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl GenServer
def init(opts) do
Process.flag(:trap_exit, true)
state =
State
|> struct!(opts)
|> schedule_staging()
{:ok, state}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:stage, %State{} = state) do
start_metadata = %{config: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], start_metadata, fn ->
case lock_and_schedule_jobs(state) do
{:ok, staged_count} when is_integer(staged_count) ->
{:ok, Map.put(start_metadata, :staged_count, staged_count)}
{:ok, false} ->
{:ok, Map.put(start_metadata, :staged_count, 0)}
error ->
{:error, Map.put(start_metadata, :error, error)}
end
end)
{:noreply, state}
end
defp lock_and_schedule_jobs(state) do
Query.with_xact_lock(state.conf, state.lock_key, fn ->
with {staged_count, [_ | _] = queues} <- Query.stage_scheduled_jobs(state.conf) do
payloads =
queues
|> Enum.uniq()
|> Enum.map(&%{queue: &1})
Query.notify(state.conf, "oban_insert", payloads)
staged_count
end
end)
end
defp schedule_staging(state) do
timer = Process.send_after(self(), :stage, state.interval)
%{state | timer: timer}
end
end
|
lib/oban/plugins/stager.ex
| 0.849066 | 0.560132 |
stager.ex
|
starcoder
|
defmodule Ok do
@moduledoc """
Ok provides functions for handling and returning `{:ok, term}` tuples,
especially within the context of a pipe chain.
"""
@type ok :: {:ok, term}
@type error :: {:error, term}
@type result :: ok | error
@doc """
Takes a value and wraps it in an ok tuple.
"""
@spec ok(value) :: {:ok, value} when value: term
def ok(value), do: {:ok, value}
@doc """
Takes a value (typically an explanatory error reason) and
wraps it in an error tuple.
"""
@spec error(reason) :: {:error, reason} when reason: term
def error(reason), do: {:error, reason}
@doc """
Receives an ok tuple and a single-arity function and calls
the function with the value within the ok tuple as its input.
Returns a result wrapped in an ok or error tuple.
"""
@spec map(result, (term -> term | result)) :: result
def map({:ok, value}, function) when is_function(function, 1) do
case function.(value) do
{:ok, _} = ok -> ok
{:error, _} = error -> error
x -> {:ok, x}
end
end
def map({:error, _reason} = error, _function), do: error
@doc """
Receives an error tuple and a single-arity function and calls the
function with the reason within the error tuple as its input.
Returns an error tuple.
"""
@spec map_if_error(result, (term -> term)) :: result
def map_if_error({:error, reason}, function) when is_function(function, 1) do
{:error, function.(reason)}
end
def map_if_error({:ok, _} = result, _function), do: result
@doc """
Receives an enumerable, an accumulator, and a function and calls
`Enum.reduce_while/3`. Continues to reduce as long as the reducer function
returns an ok tuple, or calls halt if any call to the reducer returns
an error tuple.
Returns the accumulator as an ok tuple or else returns an error tuple
with the explanation for the halted execution.
"""
@spec reduce(
Enum.t(),
Enum.acc(),
(Enum.element(), Enum.acc() -> {:ok, Enum.acc()} | {:error, reason})
) :: {:ok, Enum.acc()} | {:error, reason}
when reason: term
def reduce(enum, initial, function) do
Enum.reduce_while(enum, {:ok, initial}, fn item, {:ok, acc} ->
case function.(item, acc) do
{:ok, new_acc} -> {:cont, {:ok, new_acc}}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
@doc """
Receives an enumerable and a single-arity function and reduces over
the enumerable, calling the function with each element as its input.
If all function executions over the enumerable are successful, a single
`:ok` is returned, else the error tuple explaining where the error occurred
is returned.
"""
@spec each(Enum.t(), (Enum.element() -> term | error)) :: :ok | error
def each(enum, function) when is_function(function, 1) do
reduce(enum, nil, fn item, acc ->
case function.(item) do
{:error, reason} -> {:error, reason}
_ -> {:ok, acc}
end
end)
|> case do
{:ok, _} -> :ok
error -> error
end
end
def each({:error, _} = error), do: error
@doc """
Receives an enumerable and a single-arity function and iterates over the
elements of the enumerable calling the function with each element as its input.
The result is either a single ok tuple containing the enumerable with the transformed
elements or an error tuple explaining where the failure occurred.
"""
@spec transform(Enum.t(), (Enum.element() -> {:ok, Enum.element()} | {:error, reason})) ::
{:ok, Enum.t()} | {:error, reason}
when reason: term
def transform(enum, function) when is_list(enum) and is_function(function, 1) do
reduce(enum, [], fn item, acc ->
function.(item)
|> map(fn result -> [result | acc] end)
end)
|> map(&Enum.reverse/1)
end
@doc """
Receives an enumerable and validates that none of the elements is an error tuple.
"""
@spec all?(Enum.t()) :: boolean
def all?(enum) do
not Enum.any?(enum, &match?({:error, _}, &1))
end
end
|
apps/ok/lib/ok.ex
| 0.881838 | 0.70092 |
ok.ex
|
starcoder
|
defmodule Parser do
def parse(tokens) do
{ast, _} = compound_stmt(tokens)
ast
end
def compound_stmt(tokens, statements \\ []) do
{statement, rest} = stmt(tokens)
# NOTE that this is putting statements into the statement list in REVERSE order
statements = Enum.reverse([statement | Enum.reverse(statements)])
case rest do
[current | rest] ->
cond do
match(current, [:SEMI]) ->
{compound, rest} = compound_stmt(rest, statements)
# Remember we are pushing the statements in REVERSE order
{%Stmt.Compound{statements: compound.statements}, rest}
# true ->
# {%Stmt.Compound{statements: statements}, rest}
end
[] ->
{%Stmt.Compound{statements: statements}, []}
end
end
def stmt(tokens) do
case tokens do
[current | rest] ->
cond do
match(current, [:PRINT]) ->
print_stmt(rest)
match(current, [:VAR]) ->
var_stmt(rest)
true ->
raise RuntimeError, message: "Expected a statement"
end
_ ->
raise RuntimeError, message: "Expected a statement"
end
end
def print_stmt(tokens) do
{expression, rest} = expr(tokens)
{%Stmt.Print{expr: expression}, rest}
end
def var_stmt(tokens) do
case tokens do
[id, %Token{lexeme: "=", type: :EQUAL} | rest] ->
{expression, rest} = expr(rest)
{%Stmt.Assign{name: id, expr: expression}, rest}
_ ->
raise RuntimeError, message: "There is a problem with your var statement"
end
end
def expr(tokens) do
# Get the left-hand side of the expression
{left, rest} = term(tokens)
if rest != [] do
# Extract the current token from the rest of the list
[current | rest] = rest
# If current token is a PLUS or MINUS, calculate the rest of the expression
cond do
match(current, [:PLUS, :MINUS]) ->
{right, rest} = expr(rest)
{%Expr.Binary{left: left, operator: current, right: right}, rest}
match(current, [:EOF]) ->
{left, []}
true ->
{left, [current | rest]}
end
else
{left, []}
end
end
def term(tokens) do
# Get the left-hand side of the term
{left, rest} = factor(tokens)
if rest != [] do
# Extract the current token from the rest of the list
[current | rest] = rest
# If current token is a STAR or SLASH, calculate the rest of the term
cond do
match(current, [:STAR, :SLASH]) ->
{right, rest} = term(rest)
{%Expr.Binary{left: left, operator: current, right: right}, rest}
match(current, [:EOF]) ->
{left, []}
true ->
{left, [current | rest]}
end
else
{left, []}
end
end
def factor(tokens) do
[current | rest] = tokens
cond do
match(current, [:INT]) ->
{%Expr.Literal{value: current.value}, rest}
match(current, [:LPR]) ->
case expr(rest) do
{expression, [%Token{lexeme: ")", type: :RPR} | rest]} ->
{expression, rest}
{_expression, _} ->
raise RuntimeError, message: "Missing ')' at end of grouping expression"
end
match(current, [:IDENTIFIER]) ->
var_expr(tokens)
true ->
raise RuntimeError, message: "Expect expression"
end
end
def var_expr(tokens) do
[current | rest] = tokens
{%Expr.Var{name: current}, rest}
end
defp match(current, types) do
Enum.member?(types, current.type)
end
end
|
lib/parser.ex
| 0.602529 | 0.650148 |
parser.ex
|
starcoder
|
defmodule ExRabbitMQ.Consumer do
@moduledoc """
A behaviour module that abstracts away the handling of RabbitMQ connections and channels.
It abstracts the handling of message delivery and acknowlegement.
It also provides hooks to allow the programmer to wrap the consumption of a message without having to directly
access the AMPQ interfaces.
For a connection configuration example see `ExRabbitMQ.Config.Connection`.
For a queue configuration example see `ExRabbitMQ.Config.Session`.
#### Example usage for a consumer implementing a `GenServer`
```elixir
defmodule MyExRabbitMQConsumer do
@module __MODULE__
use GenServer
use ExRabbitMQ.Consumer, GenServer
def start_link do
GenServer.start_link(@module, :ok)
end
def init(state) do
new_state =
xrmq_init(:my_connection_config, :my_session_config, state)
|> xrmq_extract_state()
{:ok, new_state}
end
# required override
def xrmq_basic_deliver(payload, meta, state) do
# your message delivery logic goes here...
{:noreply, state}
end
# optional override when there is a need to do setup the channel right after the connection has been established.
def xrmq_channel_setup(channel, state) do
# any channel setup goes here...
{:ok, state}
end
# optional override when there is a need to setup the queue and/or exchange just before the consume.
def xrmq_queue_setup(channel, queue, state) do
# The default queue setup uses the exchange, exchange_opts, bind_opts and qos_opts from
# the queue's configuration to setup the QoS, declare the exchange and bind it with the queue.
# Your can override this function, but you can also keep this functionality of the automatic queue setup by
# calling super, eg:
{:ok, state} = super(channel, queue, state)
# any other queue setup goes here...
end
end
```
"""
alias ExRabbitMQ.AST.Common, as: C
require ExRabbitMQ.AST.Common
require ExRabbitMQ.AST.Consumer.GenServer
require ExRabbitMQ.AST.Consumer.GenStage
@type callback_result ::
{:noreply, term}
| {:noreply, term, timeout | :hibernate}
| {:noreply, [term], term}
| {:noreply, [term], term, :hibernate}
| {:stop, term, term}
@doc """
Setup the process for consuming a RabbitMQ queue.
Initiates a connection or reuses an existing one.
When a connection is established then a new channel is opened.
Next, `c:xrmq_channel_setup/2` is called to do any extra work on the opened channel.
If `start_consuming` is `true` then the process will start consume messages from RabbitMQ.
The function accepts the following arguments:
* `connection` - The configuration information for the RabbitMQ connection.
It can either be a `ExRabbitMQ.Config.Connection` struct or an atom that will be used as the `key` for reading the
the `:exrabbitmq` configuration part from the enviroment.
For more information on how to configure the connection, check `ExRabbitMQ.Config.Connection`.
* `queue` - The configuration information for the RabbitMQ queue to consume.
It can either be a `ExRabbitMQ.Config.Session` struct or an atom that will be used as the `key` for reading the
the `:exrabbitmq` configuration part from the enviroment.
For more information on how to configure the consuming queue, check `ExRabbitMQ.Config.Connection`.
* `start_consuming` - When `true` then `c:xrmq_consume/1` is called automatically after the connection and channel has
been established successfully. *Optional: Defaults to `true`.*
* `state` - The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_init(C.connection(), C.queue(), boolean, term) :: C.result()
@doc false
@callback xrmq_open_channel_setup_consume(term, boolean) :: {:ok, term} | {:error, term, term}
@doc """
Returns a part of the `:exrabbitmq` configuration section, specified with the `key` argument.
For the configuration format see the top section of `ExRabbitMQ.Consumer`.
**Deprecated:** Use `ExRabbitMQ.Config.Connection.from_env/2` or `ExRabbitMQ.Config.Session.from_env/2` instead.
"""
@callback xrmq_get_env_config(atom) :: keyword
@doc """
Returns the connection configuration as it was passed to `c:xrmq_init/4`.
This configuration is set in the wrapper process's dictionary.
For the configuration format see the top section of `ExRabbitMQ.Consumer`.
**Deprecated:** Use `ExRabbitMQ.State.get_connection_config/0` instead.
"""
@callback xrmq_get_connection_config :: term
@doc """
Returns the queue configuration as it was passed to `c:xrmq_init/4`.
This configuration is set in the wrapper process's dictionary.
For the configuration format see the top section of `ExRabbitMQ.Consumer`.
**Deprecated:** Use `ExRabbitMQ.State.get_session_config/0` instead.
"""
@callback xrmq_get_session_config :: term
@doc """
This hook is called when a connection has been established and a new channel has been opened.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_channel_setup(AMQP.Channel.t(), term) :: C.result()
@doc """
This hook is called when a connection has been established and a new channel has been opened,
right after `c:xrmq_channel_setup/2`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_channel_open(AMQP.Channel.t(), term) :: C.result()
@doc """
This hook is called automatically, if `start_consuming` was `true` when `c:xrmq_init/4`.
If not, then the user has to call it to start consuming.
It is invoked when a connection has been established and a new channel has been opened.
Its flow is to:
1. Declare the queue
2. Run `c:xrmq_queue_setup/3`
3. Start consuming from the queue
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_consume(term) :: C.result()
@doc """
This hook is called automatically as part of the flow in `c:xrmq_consume/1`.
It allows the user to run extra queue setup steps when the queue has been declared.
The default queue setup uses the exchange, exchange_opts, bind_opts and qos_opts from
the queue's configuration to setup the QoS, declare the exchange and bind it with the queue.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
This callback is the only required callback (i.e., without any default implementation) and
is called as a response to a `:basic_consume` message.
It is passed the `payload` of the request as well as the `meta` object or the message.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_deliver(term, term, term) :: callback_result
@doc """
This overridable hook is called as a response to a `:basic_cancel` message.
It is passed the `cancellation_info` of the request and by default it logs an error and
returns `{:stop, :basic_cancel, state}`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_cancel(term, term) :: callback_result
@doc """
This overridable function can be called whenever `no_ack` is set to `false` and the user
wants to *ack* a message.
It is passed the `delivery_tag` of the request and by default it simply *acks* the message
as per the RabbitMQ API.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_ack(String.t(), term) :: C.result()
@doc """
This overridable function can be called whenever `no_ack` is set to `false` and the user wants
to reject a message.
It is passed the `delivery_tag` of the request and by default it simply rejects the message
as per the RabbitMQ API.
If the `opts` argument is omitted, the default value is `[]`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_reject(String.t(), term, term) :: C.result()
@doc """
This overridable function publishes the `payload` to the `exchange` using the provided `routing_key`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_publish(term, String.t(), String.t(), [term]) :: C.basic_publish_result()
@doc """
Helper function that extracts the `state` argument from the passed in tuple.
"""
@callback xrmq_extract_state({:ok, term} | {:error, term, term}) :: term
# credo:disable-for-next-line
defmacro __using__({:__aliases__, _, [kind]})
when kind in [:GenServer, :GenStage] do
common_ast = ExRabbitMQ.AST.Common.ast()
inner_ast =
if kind === :GenStage do
ExRabbitMQ.AST.Consumer.GenStage.ast()
else
ExRabbitMQ.AST.Consumer.GenServer.ast()
end
# credo:disable-for-next-line
quote location: :keep do
alias ExRabbitMQ.Config.Connection, as: XRMQConnectionConfig
alias ExRabbitMQ.Config.Session, as: XRMQSessionConfig
alias ExRabbitMQ.Constants, as: XRMQConstants
alias ExRabbitMQ.State, as: XRMQState
require Logger
unquote(inner_ast)
def xrmq_init(connection_config, session_config, start_consuming \\ true, state) do
connection_config = XRMQConnectionConfig.get(connection_config)
session_config = XRMQSessionConfig.get(session_config)
case xrmq_connection_setup(connection_config) do
:ok ->
XRMQState.set_session_config(session_config)
xrmq_open_channel_setup_consume(start_consuming, state)
{:error, reason} ->
{:error, reason, state}
end
end
def xrmq_open_channel_setup_consume(start_consuming \\ true, state)
def xrmq_open_channel_setup_consume(start_consuming, state) do
with {:ok, state} <- xrmq_open_channel(state),
{channel, _} <- XRMQState.get_channel_info(),
session_config <- XRMQState.get_session_config(),
{:ok, state} <- xrmq_session_setup(channel, session_config, state),
{:ok, state} <- xrmq_qos_setup(channel, session_config.qos_opts, state),
{:start_consuming, true} <- {:start_consuming, start_consuming} do
xrmq_consume(channel, session_config.queue, session_config.consume_opts, state)
else
{:start_consuming, false} -> {:ok, state}
{:error, _reason, _state} = error -> error
{:error, reason} -> {:error, reason, state}
error -> {:error, error, state}
end
end
def xrmq_consume(state) do
{channel, _} = XRMQState.get_channel_info()
session_config = XRMQState.get_session_config()
xrmq_consume(channel, session_config.queue, session_config.consume_opts, state)
end
def xrmq_consume(channel, queue, consume_opts, state) do
case AMQP.Basic.consume(channel, queue, nil, consume_opts) do
{:ok, _} -> {:ok, state}
{:error, reason} -> {:error, reason, state}
end
end
defp xrmq_qos_setup(_channel, [], state), do: {:ok, state}
defp xrmq_qos_setup(channel, opts, state) do
with :ok <- AMQP.Basic.qos(channel, opts) do
{:ok, state}
end
end
def xrmq_basic_ack(delivery_tag, state) do
case XRMQState.get_channel_info() do
{nil, _} ->
{:error, XRMQConstants.no_channel_error(), state}
{channel, _} ->
try do
case AMQP.Basic.ack(channel, delivery_tag) do
:ok -> {:ok, state}
error -> {:error, error, state}
end
catch
:exit, reason ->
{:error, reason, state}
end
end
end
def xrmq_basic_reject(delivery_tag, opts \\ [], state) do
case XRMQState.get_channel_info() do
{nil, _} ->
{:error, XRMQConstants.no_channel_error(), state}
{channel, _} ->
try do
case AMQP.Basic.reject(channel, delivery_tag, opts) do
:ok -> {:ok, state}
error -> {:error, error, state}
end
catch
:exit, reason ->
{:error, reason, state}
end
end
end
@deprecated "Use ExRabbitMQ.State.get_session_config/0 instead"
def xrmq_get_session_config do
XRMQState.get_session_config()
end
unquote(common_ast)
defoverridable xrmq_basic_cancel: 2,
xrmq_basic_ack: 2,
xrmq_basic_reject: 2,
xrmq_basic_reject: 3
end
end
end
|
lib/ex_rabbit_m_q/consumer.ex
| 0.866951 | 0.851891 |
consumer.ex
|
starcoder
|
defmodule ISO8583.Utils do
@moduledoc false
def slice(payload, lower, upper) when byte_size(payload) > lower and upper < 0 do
<<lower_part::binary-size(lower), upper_part::binary>> = payload
{:ok, lower_part, upper_part}
end
def slice(payload, lower, upper) when byte_size(payload) >= upper do
lower_part =
payload
|> binary_part(lower, upper)
<<_::binary-size(upper), upper_part::binary>> = payload
{:ok, lower_part, upper_part}
end
def encode_bitmap(bitmap, encoding) do
case encoding do
:hex -> bitmap |> hex_to_bytes()
_ -> bitmap
end
end
def iterable_bitmap(hex, length) do
hex
|> hex_to_binary()
|> pad_string("0", length)
|> String.graphemes()
|> Enum.map(&String.to_integer/1)
|> List.replace_at(0, 0)
end
def binary_to_hex(string) do
case Integer.parse(string, 2) do
:error -> {:error, "Binary string is not valid"}
{decimal_no, _} -> Integer.to_string(decimal_no, 16)
end
end
def hex_to_binary(string) do
case Integer.parse(string, 16) do
:error -> {:error, "Hexadecimal string is not valid"}
{decimal_no, _} -> Integer.to_string(decimal_no, 2)
end
end
def hex_to_bytes(hexa_string) do
hexa_string
|> Base.decode16!()
end
def bytes_to_hex(hexa_string) do
hexa_string
|> Base.encode16()
end
def create_bitmap_array(length) do
List.duplicate(0, length) |> List.replace_at(0, 1)
end
def padd_chars(string, pad_length, pad_char) do
string_length = String.length(string)
case string_length < pad_length do
true ->
List.duplicate(pad_char, pad_length - string_length)
|> Enum.join()
|> Kernel.<>(string)
_ ->
string
end
end
def extract_date_time(timestamp) do
padd_chars(Integer.to_string(timestamp.month), 2, "0")
|> Kernel.<>(padd_chars(Integer.to_string(timestamp.day), 2, "0"))
|> Kernel.<>(padd_chars(Integer.to_string(timestamp.hour), 2, "0"))
|> Kernel.<>(padd_chars(Integer.to_string(timestamp.minute), 2, "0"))
|> Kernel.<>(padd_chars(Integer.to_string(timestamp.second), 2, "0"))
end
def extract_time(timestamp) do
padd_chars(Integer.to_string(timestamp.hour), 2, "0")
|> Kernel.<>(padd_chars(Integer.to_string(timestamp.minute), 2, "0"))
|> Kernel.<>(padd_chars(Integer.to_string(timestamp.second), 2, "0"))
end
def attach_timestamps(message) do
timestamp = DateTime.utc_now()
Map.merge(message, %{"7": extract_date_time(timestamp), "12": extract_time(timestamp)})
end
def attach_timestamps(message, timestamp) do
Map.merge(message, %{"7": extract_date_time(timestamp), "12": extract_time(timestamp)})
end
def atomify_map(map) do
atomkeys = fn {k, v}, acc ->
Map.put_new(acc, atomize_binary(k), v)
end
Enum.reduce(map, %{}, atomkeys)
end
defp atomize_binary(value) do
if is_binary(value), do: String.to_atom(value), else: value
end
def construct_field(field, pad) when is_integer(field) do
pad
|> Kernel.<>(Integer.to_string(field))
|> String.to_atom()
end
def construct_field(field, pad) when is_binary(field) do
pad
|> Kernel.<>(field)
|> String.to_atom()
end
def encode_tcp_header(data) do
length = byte_size(data)
part_1 = div(length, 256) |> Integer.to_string(16) |> pad_string("0", 2) |> hex_to_bytes()
part_2 = rem(length, 256) |> Integer.to_string(16) |> pad_string("0", 2) |> hex_to_bytes()
part_1 <> part_2 <> data
end
def extract_tcp_header(hex) do
part_1 = hex |> binary_part(0, 1) |> String.to_integer(16)
part_2 = hex |> binary_part(1, 2) |> String.to_integer(16)
256 * part_1 + part_2
end
def extract_hex_data(message, length, "b") do
case slice(message, 0, div(length, 2)) do
{:ok, part, rem} -> {part |> bytes_to_hex(), rem}
{:error, reason} -> {:error, reason}
end
end
def extract_hex_data(message, length, _) do
case slice(message, 0, length) do
{:ok, part, rem} -> {part, rem}
{:error, reason} -> {:error, reason}
end
end
def pad_string(string, pad, max) do
current_size = byte_size(string)
case current_size < max do
true ->
List.duplicate(pad, max - current_size)
|> Enum.join()
|> Kernel.<>(string)
false ->
string
end
end
def var_len_chars(%{len_type: len_type}) do
[type | _] = len_type |> String.split("var")
byte_size(type)
end
def check_data_length(field, data, max_len) do
case byte_size(data) <= max_len do
true ->
:ok
false ->
{:error,
"Error while decoding field #{field}, data exceeds configured length, expected maximum of #{
max_len
} but found #{byte_size(data)}"}
end
end
end
|
lib/iso_8583/utils/utils.ex
| 0.666171 | 0.509581 |
utils.ex
|
starcoder
|
defmodule Ecto.Adapters.Worker do
@moduledoc """
Defines a worker to be used by adapters.
The worker is responsible for managing the connection to
the database, automatically starting a new one if it crashes.
The `ask/2` and `ask!/2` functions can be used any time to
retrieve the connection and its module.
The worker also adds support for laziness, allowing developers
to create workers but connect to the database only when needed
for the first time. Finally, the worker also provides transaction
semantics, with open/close commands as well as a sandbox mode.
In order to use a worker, adapter developers need to implement
two callbacks in a module, `connect/1` and `disconnect/1` defined
in this module. The worker is started by passing the module that
implements the callbacks and as well as the connection arguments.
## Transaction modes
The worker supports transactions. The idea is that, once a
transaction is open, the worker is going to monitor the client
and disconnect if the client crashes without properly closing
the connection.
Most of the transaction functions are about telling the worker
how to react on crashes, the client is still responsible for
keeping the transaction state.
The worker also supports a sandbox transaction, which means
transaction management is done on the client and opening a
transaction is then disabled.
Finally, operations like `break_transaction/2` can be used
when something goes wrong, ensuring a disconnection happens.
"""
use GenServer
use Behaviour
@type modconn :: {module :: atom, conn :: pid}
@doc """
Connects to the underlying database.
Should return a process which is linked to
the caller process or an error.
"""
defcallback connect(Keyword.t) :: {:ok, pid} | {:error, term}
@doc """
Disconnects the given `pid`.
If the given `pid` no longer exists, it should not raise.
"""
defcallback disconnect(pid) :: :ok
@doc """
Starts a linked worker for the given module and params.
"""
def start_link({module, params}) do
GenServer.start_link(__MODULE__, {module, params})
end
@doc """
Starts a worker for the given module and params.
"""
def start({module, params}) do
GenServer.start(__MODULE__, {module, params})
end
@doc """
Asks for the module and the underlying connection process.
"""
@spec ask(pid, timeout) :: {:ok, modconn} | {:error, Exception.t}
def ask(worker, timeout) do
GenServer.call(worker, :ask, timeout)
end
@doc """
Asks for the module and the underlying connection process.
"""
@spec ask!(pid, timeout) :: modconn | no_return
def ask!(worker, timeout) do
case ask(worker, timeout) do
{:ok, modconn} -> modconn
{:error, err} -> raise err
end
end
@doc """
Opens a transaction.
Invoked when the client wants to open up a connection.
The worker process starts to monitor the caller and
will wipeout all connection state in case of crashes.
It returns an `:ok` tuple if the transaction can be
opened, a `:sandbox` tuple in case the transaction
could not be openned because it is in sandbox mode
or an `:error` tuple, usually when the adapter is
unable to connect.
## FAQ
Question: What happens if `open_transaction/2` is
called when a transaction is already open?
Answer: If a transaction is already open, the previous
transaction along side its connection will be discarded
and a new one will be started transparently. The reasoning
is that if the client is calling `open_transaction/2` when
one is already open, is because the client lost its state,
and we should treat it transparently by disconnecting the
old state and starting a new one.
"""
@spec open_transaction(pid, timeout) :: {:ok, modconn} | {:sandbox, modconn} | {:error, Exception.t}
def open_transaction(worker, timeout) do
GenServer.call(worker, :open_transaction, timeout)
end
@doc """
Closes a transaction.
Both sandbox and open transactions can be closed.
Returns `:not_open` if a transaction was not open.
"""
@spec close_transaction(pid, timeout) :: :not_open | :closed
def close_transaction(worker, timeout) do
GenServer.call(worker, :close_transaction, timeout)
end
@doc """
Breaks a transaction.
Automatically forces the worker to disconnect unless
in sandbox mode. Returns `:not_open` if a transaction
was not open.
"""
@spec break_transaction(pid, timeout) :: :broken | :not_open | :sandbox
def break_transaction(worker, timeout) do
GenServer.call(worker, :break_transaction, timeout)
end
@doc """
Starts a sandbox transaction.
A sandbox transaction is not monitored by the worker.
This functions returns an `:ok` tuple in case a sandbox
transaction has started, a `:sandbox` tuple if it was
already in sandbox mode or `:already_open` if it was
previously open.
"""
@spec sandbox_transaction(pid, timeout) :: {:ok, modconn} | {:sandbox, modconn} | :already_open
def sandbox_transaction(worker, timeout) do
GenServer.call(worker, :sandbox_transaction, timeout)
end
## Callbacks
def init({module, params}) do
Process.flag(:trap_exit, true)
lazy? = Keyword.get(params, :lazy, true)
unless lazy? do
case module.connect(params) do
{:ok, conn} ->
conn = conn
_ ->
:ok
end
end
{:ok, %{conn: conn, params: params, transaction: :closed, module: module}}
end
## Break transaction
def handle_call(:break_transaction, _from, %{transaction: :sandbox} = s) do
{:reply, :sandbox, s}
end
def handle_call(:break_transaction, _from, %{transaction: :closed} = s) do
{:reply, :not_open, s}
end
def handle_call(:break_transaction, _from, s) do
{:reply, :broken, disconnect(s)}
end
## Close transaction
def handle_call(:close_transaction, _from, %{transaction: :sandbox} = s) do
{:reply, :closed, %{s | transaction: :closed}}
end
def handle_call(:close_transaction, _from, %{transaction: :closed} = s) do
{:reply, :not_open, s}
end
def handle_call(:close_transaction, _from, %{transaction: ref} = s) do
Process.demonitor(ref, [:flush])
{:reply, :closed, %{s | transaction: :closed}}
end
## Lazy connection handling
def handle_call(request, from, %{conn: nil, params: params, module: module} = s) do
case module.connect(params) do
{:ok, conn} -> handle_call(request, from, %{s | conn: conn})
{:error, err} -> {:reply, {:error, err}, s}
end
end
## Ask
def handle_call(:ask, _from, s) do
{:reply, {:ok, modconn(s)}, s}
end
## Open transaction
def handle_call(:open_transaction, _from, %{transaction: :sandbox} = s) do
{:reply, {:sandbox, modconn(s)}, s}
end
def handle_call(:open_transaction, {pid, _}, %{transaction: :closed} = s) do
ref = Process.monitor(pid)
{:reply, {:ok, modconn(s)}, %{s | transaction: ref}}
end
def handle_call(:open_transaction, from, %{transaction: _old_ref} = s) do
handle_call(:open_transaction, from, disconnect(s))
end
## Sandbox transaction
def handle_call(:sandbox_transaction, _from, %{transaction: :sandbox} = s) do
{:reply, {:sandbox, modconn(s)}, s}
end
def handle_call(:sandbox_transaction, _from, %{transaction: :closed} = s) do
{:reply, {:ok, modconn(s)}, %{s | transaction: :sandbox}}
end
def handle_call(:sandbox_transaction, _from, %{transaction: _} = s) do
{:reply, :already_open, s}
end
## Info
# The connection crashed. We don't need to notify
# the client if we have an open transaction because
# it will fail with noproc anyway. close_transaction
# and break_transaction will return :not_open after this.
def handle_info({:EXIT, conn, _reason}, %{conn: conn} = s) do
{:noreply, disconnect(%{s | conn: nil})}
end
# The transaction owner crashed without closing.
# We need to assume we don't know the connection state.
def handle_info({:DOWN, ref, _, _, _}, %{transaction: ref} = s) do
{:noreply, disconnect(%{s | transaction: :closed})}
end
def handle_info(_info, s) do
{:noreply, s}
end
def terminate(_reason, %{conn: conn, module: module}) do
conn && module.disconnect(conn)
end
## Helpers
defp modconn(%{conn: conn, module: module}) do
{module, conn}
end
defp disconnect(%{conn: conn, transaction: ref, module: module} = s) do
conn && module.disconnect(conn)
if is_reference(ref) do
Process.demonitor(ref, [:flush])
end
%{s | conn: nil, transaction: :closed}
end
end
|
lib/ecto/adapters/worker.ex
| 0.848878 | 0.60711 |
worker.ex
|
starcoder
|
require Graph
defmodule Utils do
def to_ints(string), do: string |> to_strings |> Enum.map(&to_int/1)
def to_int([single_string]), do: to_int(single_string)
def to_int(string) do
string
|> Integer.parse()
|> elem(0)
end
def to_strings([single_string]), do: to_strings(single_string)
def to_strings(list_of_strings) when is_list(list_of_strings), do: list_of_strings
def to_strings(single_string) when is_binary(single_string),
do: single_string |> String.trim() |> String.split(",")
def output_to_string(map) when is_map(map) do
[min_x, max_x, min_y, max_y] =
map
|> Map.keys()
|> Enum.reduce([0, 0, 0, 0], fn {x, y}, [min_x, max_x, min_y, max_y] ->
min_x = min(min_x, x)
max_x = max(max_x, x)
min_y = min(min_y, y)
max_y = max(max_y, y)
[min_x, max_x, min_y, max_y]
end)
min_y..max_y
|> Enum.map(fn y ->
min_x..max_x
|> Enum.map(fn x -> Map.get(map, {x, y}, 0) end)
end)
|> output_to_string
end
def output_to_string(list) do
list
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
|> Enum.chunk_by(&(&1 == [0, 0, 0, 0, 0, 0]))
|> Enum.reject(&Enum.any?(&1, fn x -> x == [0, 0, 0, 0, 0, 0] end))
|> Enum.map(fn letter ->
case letter do
[
[1, 1, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 1],
[0, 1, 0, 1, 1, 0]
] ->
?B
[
[0, 1, 1, 1, 1, 0],
[1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 1, 0]
] ->
?C
[
[1, 1, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 0, 0]
] ->
?F
[
[1, 1, 1, 1, 1, 1],
[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 1]
] ->
?H
[
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 0]
] ->
?J
[
[1, 1, 1, 1, 1, 1],
[0, 0, 1, 0, 0, 0],
[0, 1, 0, 1, 1, 0],
[1, 0, 0, 0, 0, 1]
] ->
?K
[
[1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1]
] ->
?L
[
[1, 1, 1, 1, 1, 1],
[1, 0, 0, 1, 0, 0],
[1, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1]
] ->
?R
[
[1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 0]
] ->
?U
[
[1, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1],
[0, 0, 1, 0, 0, 0],
[1, 1, 0, 0, 0, 0]
] ->
?Y
_ ->
letter
end
end)
|> to_string
end
def points_to_graph(points) do
ud_edges =
points
|> Enum.sort()
|> Enum.chunk_every(2, 1, :discard)
|> Enum.flat_map(fn
[{x, ay} = a, {x, by} = b] -> if by - ay == 1, do: [{a, b}], else: []
_ -> []
end)
lr_edges =
points
|> Enum.sort_by(fn {x, y} -> {y, x} end)
|> Enum.chunk_every(2, 1, :discard)
|> Enum.flat_map(fn
[{ax, y} = a, {bx, y} = b] -> if bx - ax == 1, do: [{a, b}], else: []
_ -> []
end)
edges = lr_edges ++ ud_edges
g = Graph.add_edges(Graph.new(), edges)
h = Graph.transpose(g)
Graph.add_edges(g, Graph.edges(h))
end
end
|
lib/utils.ex
| 0.522689 | 0.647603 |
utils.ex
|
starcoder
|
defmodule GGity.Geom.Line do
@moduledoc false
alias GGity.{Draw, Geom, Plot}
@type t() :: %__MODULE__{}
@type plot() :: %GGity.Plot{}
@type record() :: map()
@type mapping() :: map()
@linetype_specs %{
solid: "",
dashed: "4",
dotted: "1",
longdash: "6 2",
dotdash: "1 2 3 2",
twodash: "2 2 6 2"
}
defstruct data: nil,
mapping: nil,
stat: :identity,
position: :identity,
key_glyph: :path,
alpha: 1,
color: "black",
linetype: "",
size: 1
@spec new(mapping(), keyword()) :: Geom.Line.t()
def new(mapping, options \\ []) do
linetype_name = Keyword.get(options, :linetype, :solid)
options =
options
|> Keyword.drop([:linetype])
|> Keyword.merge(mapping: mapping, linetype: @linetype_specs[linetype_name])
struct(Geom.Line, options)
end
@spec draw(Geom.Line.t(), list(map()), plot()) :: iolist()
def draw(%Geom.Line{} = geom_line, _data, plot), do: lines(geom_line, plot)
defp lines(%Geom.Line{} = geom_line, plot) do
(geom_line.data || plot.data)
|> Enum.group_by(fn row ->
{
row[geom_line.mapping[:alpha]],
row[geom_line.mapping[:color]],
row[geom_line.mapping[:linetype]],
row[geom_line.mapping[:size]]
}
end)
|> Enum.map(fn {_value, group} -> line(geom_line, group, plot) end)
end
defp line(%Geom.Line{} = geom_line, data, %Plot{scales: scales} = plot) do
scale_transforms =
geom_line.mapping
|> Map.take([:x, :y, :color, :linetype, :size])
|> Map.keys()
|> Enum.reduce(%{}, fn aesthetic, mapped ->
Map.put(mapped, aesthetic, Map.get(scales[aesthetic], :transform))
end)
transforms =
geom_line
|> Map.take([:alpha, :color, :linetype, :size])
|> Enum.reduce(%{}, fn {aesthetic, fixed_value}, fixed ->
Map.put(fixed, aesthetic, fn _value -> fixed_value end)
end)
|> Map.merge(scale_transforms)
coords =
data
|> sort_by_x(geom_line)
|> Stream.map(fn row ->
[
transforms.x.(row[geom_line.mapping.x]),
transforms.y.(row[geom_line.mapping.y])
]
end)
|> Stream.map(fn row -> Map.new(Enum.zip([:x, :y], row)) end)
|> Stream.map(fn row ->
Map.put(row, :y, (plot.width - row.y) / plot.aspect_ratio)
end)
|> Enum.map(fn row -> {row.x + plot.area_padding, row.y + plot.area_padding} end)
row = hd(data)
[alpha, color, linetype, size] = [
transforms.alpha.(row[geom_line.mapping[:alpha]]),
transforms.color.(row[geom_line.mapping[:color]]),
transforms.linetype.(row[geom_line.mapping[:linetype]]),
transforms.size.(row[geom_line.mapping[:size]])
]
Draw.polyline(coords, color, size, alpha, linetype)
end
defp sort_by_x(data, %Geom.Line{} = geom_line) do
case hd(data)[geom_line.mapping.x] do
%Date{} ->
Enum.sort_by(data, fn row -> row[geom_line.mapping.x] end, Date)
_number ->
Enum.sort_by(data, fn row -> row[geom_line.mapping.x] end)
end
end
end
|
lib/ggity/geom/line.ex
| 0.826747 | 0.620679 |
line.ex
|
starcoder
|
defmodule Scidata.FashionMNIST do
alias Scidata.Utils
@default_data_path "tmp/fashionmnist"
@base_url 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/'
@image_file 'train-images-idx3-ubyte.gz'
@label_file 'train-labels-idx1-ubyte.gz'
defp download_images(opts) do
data_path = opts[:data_path] || @default_data_path
transform = opts[:transform_images] || fn out -> out end
<<_::32, n_images::32, n_rows::32, n_cols::32, images::binary>> =
Utils.unzip_cache_or_download(@base_url, @image_file, data_path)
transform.({images, {:u, 8}, {n_images, n_rows, n_cols}})
end
defp download_labels(opts) do
data_path = opts[:data_path] || @default_data_path
transform = opts[:transform_labels] || fn out -> out end
<<_::32, n_labels::32, labels::binary>> =
Utils.unzip_cache_or_download(@base_url, @label_file, data_path)
transform.({labels, {:u, 8}, {n_labels}})
end
@doc """
Downloads the FashionMNIST dataset or fetches it locally.
## Options
* `:datapath` - path where the dataset .gz should be stored locally
* `:transform_images` - accepts accept a tuple like
`{binary_data, tensor_type, data_shape}` which can be used for
converting the `binary_data` to a tensor with a function like
fn {labels_binary, type, _shape} ->
labels_binary
|> Nx.from_binary(type)
|> Nx.new_axis(-1)
|> Nx.equal(Nx.tensor(Enum.to_list(0..9)))
|> Nx.to_batched_list(32)
end
* `:transform_labels` - similar to `:transform_images` but applied to
dataset labels
## Examples
iex> Scidata.FashionMNIST.download()
{{<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...>>,
{:u, 8}, {60000, 28, 28}},
{<<9, 0, 0, 3, 0, 2, 7, 2, 5, 5, 0, 9, 5, 5, 7, 9, 1, 0, 6, 4, 3, 1, 4, 8, 4,
3, 0, 2, 4, 4, 5, 3, 6, 6, 0, 8, 5, 2, 1, 6, 6, 7, 9, 5, 9, 2, 7, ...>>,
{:u, 8}, {60000}}}
"""
def download(opts \\ []),
do: {download_images(opts), download_labels(opts)}
end
|
lib/fashionmnist.ex
| 0.59749 | 0.606528 |
fashionmnist.ex
|
starcoder
|
defmodule Unpoly do
@moduledoc """
A Plug adapter and helpers for Unpoly, the unobtrusive JavaScript framework.
## Options
* `:cookie_name` - the cookie name where the request method is echoed to. Defaults to
`"_up_method"`.
* `:cookie_opts` - additional options to pass to method cookie.
See `Plug.Conn.put_resp_cookie/4` for all available options.
"""
@doc """
Alias for `Unpoly.unpoly?/1`
"""
@spec up?(Plug.Conn.t()) :: boolean()
def up?(conn), do: unpoly?(conn)
@doc """
Returns whether the current request is a [page fragment update](https://unpoly.com/up.replace)
triggered by an Unpoly frontend.
This will eventually just check for the `X-Up-Version header`.
Just in case a user still has an older version of Unpoly running on the frontend,
we also check for the X-Up-Target header.
"""
@spec unpoly?(Plug.Conn.t()) :: boolean()
def unpoly?(conn), do: version(conn) !== nil || target(conn) !== nil
@doc """
Returns the current Unpoly version.
The version is guaranteed to be set for all Unpoly requests.
"""
@spec version(Plug.Conn.t()) :: String.t() | nil
def version(conn), do: get_req_header(conn, "x-up-version")
@doc """
Returns the mode of the targeted layer.
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
"""
@spec mode(Plug.Conn.t()) :: String.t() | nil
def mode(conn), do: get_req_header(conn, "x-up-mode")
@doc """
Returns the mode of the layer targeted for a failed fragment update.
A fragment update is considered failed if the server responds with
a status code other than 2xx, but still renders HTML.
Server-side code is free to render different HTML for different modes.
For example, you might prefer to not render a site navigation for overlays.
"""
@spec fail_mode(Plug.Conn.t()) :: String.t() | nil
def fail_mode(conn), do: get_req_header(conn, "x-up-fail-mode")
@doc """
Returns the CSS selector for a fragment that Unpoly will update in
case of a successful response (200 status code).
The Unpoly frontend will expect an HTML response containing an element
that matches this selector.
Server-side code is free to optimize its successful response by only returning HTML
that matches this selector.
"""
@spec target(Plug.Conn.t()) :: String.t() | nil
def target(conn), do: get_req_header(conn, "x-up-target")
@doc """
Returns the CSS selector for a fragment that Unpoly will update in
case of an failed response. Server errors or validation failures are
all examples for a failed response (non-200 status code).
The Unpoly frontend will expect an HTML response containing an element
that matches this selector.
Server-side code is free to optimize its response by only returning HTML
that matches this selector.
"""
@spec fail_target(Plug.Conn.t()) :: String.t() | nil
def fail_target(conn), do: get_req_header(conn, "x-up-fail-target")
@doc """
Returns whether the given CSS selector is targeted by the current fragment
update in case of a successful response (200 status code).
Note that the matching logic is very simplistic and does not actually know
how your page layout is structured. It will return `true` if
the tested selector and the requested CSS selector matches exactly, or if the
requested selector is `body` or `html`.
Always returns `true` if the current request is not an Unpoly fragment update.
"""
@spec target?(Plug.Conn.t(), String.t()) :: boolean()
def target?(conn, tested_target), do: query_target(conn, target(conn), tested_target)
@doc """
Returns whether the given CSS selector is targeted by the current fragment
update in case of a failed response (non-200 status code).
Note that the matching logic is very simplistic and does not actually know
how your page layout is structured. It will return `true` if
the tested selector and the requested CSS selector matches exactly, or if the
requested selector is `body` or `html`.
Always returns `true` if the current request is not an Unpoly fragment update.
"""
@spec fail_target?(Plug.Conn.t(), String.t()) :: boolean()
def fail_target?(conn, tested_target), do: query_target(conn, fail_target(conn), tested_target)
@doc """
Returns whether the given CSS selector is targeted by the current fragment
update for either a success or a failed response.
Note that the matching logic is very simplistic and does not actually know
how your page layout is structured. It will return `true` if
the tested selector and the requested CSS selector matches exactly, or if the
requested selector is `body` or `html`.
Always returns `true` if the current request is not an Unpoly fragment update.
"""
@spec any_target?(Plug.Conn.t(), String.t()) :: boolean()
def any_target?(conn, tested_target),
do: target?(conn, tested_target) || fail_target?(conn, tested_target)
@doc """
Returns whether the current form submission should be
[validated](https://unpoly.com/input-up-validate) (and not be saved to the database).
"""
@spec validate?(Plug.Conn.t()) :: boolean()
def validate?(conn), do: validate_name(conn) !== nil
@doc """
If the current form submission is a [validation](https://unpoly.com/input-up-validate),
this returns the name attribute of the form field that has triggered
the validation.
"""
@spec validate_name(Plug.Conn.t()) :: String.t() | nil
def validate_name(conn), do: get_req_header(conn, "x-up-validate")
@doc """
Returns the timestamp of an existing fragment that is being reloaded.
The timestamp must be explicitely set by the user as an [up-time] attribute on the fragment.
It should indicate the time when the fragment's underlying data was last changed.
"""
@spec reload_from_time(Plug.Conn.t()) :: String.t() | nil
def reload_from_time(conn) do
with timestamp when is_binary(timestamp) <- get_req_header(conn, "x-up-reload-from-time"),
{timestamp, ""} <- Integer.parse(timestamp),
{:ok, datetime} <- DateTime.from_unix(timestamp) do
datetime
else
_ -> nil
end
end
@doc """
Returns the timestamp of an existing fragment that is being reloaded.
The timestamp must be explicitely set by the user as an [up-time] attribute on the fragment.
It should indicate the time when the fragment's underlying data was last changed.
"""
@spec reload?(Plug.Conn.t()) :: boolean()
def reload?(conn), do: reload_from_time(conn) !== nil
@doc """
Forces Unpoly to use the given string as the document title when processing
this response.
This is useful when you skip rendering the `<head>` in an Unpoly request.
"""
@spec put_title(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_title(conn, new_title), do: Plug.Conn.put_resp_header(conn, "x-up-title", new_title)
# Plug
def init(opts \\ []) do
cookie_name = Keyword.get(opts, :cookie_name, "_up_method")
cookie_opts = Keyword.get(opts, :cookie_opts, http_only: false)
{cookie_name, cookie_opts}
end
def call(conn, {cookie_name, cookie_opts}) do
conn
|> Plug.Conn.fetch_cookies()
|> echo_request_headers()
|> append_method_cookie(cookie_name, cookie_opts)
end
@doc """
Sets the value of the "X-Up-Accept-Layer" response header.
"""
@spec put_resp_accept_layer_header(Plug.Conn.t(), term) :: Plug.Conn.t()
def put_resp_accept_layer_header(conn, value) when is_binary(value) do
Plug.Conn.put_resp_header(conn, "x-up-accept-layer", value)
end
def put_resp_accept_layer_header(conn, value) do
value = Phoenix.json_library().encode_to_iodata!(value)
put_resp_accept_layer_header(conn, to_string(value))
end
@doc """
Sets the value of the "X-Up-Dismiss-Layer" response header.
"""
@spec put_resp_dismiss_layer_header(Plug.Conn.t(), term) :: Plug.Conn.t()
def put_resp_dismiss_layer_header(conn, value) when is_binary(value) do
Plug.Conn.put_resp_header(conn, "x-up-dismiss-layer", value)
end
def put_resp_dismiss_layer_header(conn, value) do
value = Phoenix.json_library().encode_to_iodata!(value)
put_resp_dismiss_layer_header(conn, to_string(value))
end
@doc """
Sets the value of the "X-Up-Events" response header.
"""
@spec put_resp_events_header(Plug.Conn.t(), term) :: Plug.Conn.t()
def put_resp_events_header(conn, value) when is_binary(value) do
Plug.Conn.put_resp_header(conn, "x-up-events", value)
end
def put_resp_events_header(conn, value) do
value = Phoenix.json_library().encode_to_iodata!(value)
put_resp_events_header(conn, to_string(value))
end
@doc """
Sets the value of the "X-Up-Location" response header.
"""
@spec put_resp_location_header(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_resp_location_header(conn, value) do
Plug.Conn.put_resp_header(conn, "x-up-location", value)
end
@doc """
Sets the value of the "X-Up-Method" response header.
"""
@spec put_resp_method_header(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_resp_method_header(conn, value) do
Plug.Conn.put_resp_header(conn, "x-up-method", value)
end
@doc """
Sets the value of the "X-Up-Target" response header.
"""
@spec put_resp_target_header(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_resp_target_header(conn, value) do
Plug.Conn.put_resp_header(conn, "x-up-target", value)
end
defp echo_request_headers(conn) do
conn
|> put_resp_location_header(Phoenix.Controller.current_url(conn))
|> put_resp_method_header(conn.method)
end
defp append_method_cookie(conn, cookie_name, cookie_opts) do
cond do
conn.method != "GET" && !up?(conn) ->
Plug.Conn.put_resp_cookie(conn, cookie_name, conn.method, cookie_opts)
Map.has_key?(conn.req_cookies, "_up_method") ->
Plug.Conn.delete_resp_cookie(conn, cookie_name, cookie_opts)
true ->
conn
end
end
## Helpers
defp get_req_header(conn, key),
do: Plug.Conn.get_req_header(conn, key) |> List.first()
defp query_target(conn, actual_target, tested_target) do
if up?(conn) do
cond do
actual_target == tested_target -> true
actual_target == "html" -> true
actual_target == "body" && tested_target not in ["head", "title", "meta"] -> true
true -> false
end
else
true
end
end
end
|
lib/unpoly.ex
| 0.870721 | 0.644225 |
unpoly.ex
|
starcoder
|
defmodule ExTweet.Parser.Tweet do
defstruct [:id, :datetime, :username, :user_id, :text, :url, :links]
@multiple_space_regex Regex.compile!(~S(\s{1,}), [:caseless, :unicode])
@type t :: %__MODULE__{
id: integer(),
user_id: integer(),
datetime: DateTime.t(),
username: String.t(),
text: String.t(),
url: String.t(),
links: [String.t()]
}
## API
@spec parse_tweet_stream(String.t()) :: [__MODULE__.t()]
def parse_tweet_stream(raw_html) do
split_stream_into_tweets(raw_html)
|> Enum.filter(&has_usernames?/1)
|> Enum.map(&parse_tweet/1)
end
@spec parse_tweet(Floki.html_tree()) :: __MODULE__.t()
def parse_tweet(tweet) do
%__MODULE__{}
|> extract_creation_datetime(tweet)
|> extract_tweet_id(tweet)
|> extract_user_id(tweet)
|> extract_username(tweet)
|> extract_text(tweet)
|> extract_permalink(tweet)
end
def split_stream_into_tweets(raw_html) do
{:ok, tweet_stream} = Floki.parse_fragment(raw_html)
# Remove incomplete tweets withheld by Twitter Guidelines
# Has selector: div.withheld-tweet
tweet_stream =
Floki.traverse_and_update(tweet_stream, fn
{"div", [{"withheld-tweet", _} | _], _children} -> nil
tag -> tag
end)
# Tweets
Floki.find(tweet_stream, "div.js-stream-tweet")
end
## Private
defp extract_creation_datetime(map, tweet) do
[timestamp] =
tweet
|> Floki.find("small.time span.js-short-timestamp")
|> Floki.attribute("data-time")
datetime =
timestamp
|> String.to_integer()
|> DateTime.from_unix!()
Map.put(map, :datetime, datetime)
end
defp extract_text(map, tweet) do
{text, links} = process_text(tweet)
text =
if text == "" do
"#HistoricTwitter:no_text"
else
text
end
map
|> Map.put(:text, text)
|> Map.put(:links, links)
end
defp extract_permalink(map, tweet) do
[perma] = Floki.attribute(tweet, "data-permalink-path")
url = "https://twitter.com" <> perma
Map.put(map, :url, url)
end
defp extract_tweet_id(map, tweet) do
[id] = Floki.attribute(tweet, "data-tweet-id")
Map.put(map, :id, String.to_integer(id))
end
defp extract_user_id(map, tweet) do
# Zeroth IDs is the author of the tweet, 1 --> N IDs correspond
# to the recipients, if any.
[author_id | _recipient_ids] =
tweet
|> Floki.find("a.js-user-profile-link")
|> Floki.attribute("data-user-id")
Map.put(map, :user_id, String.to_integer(author_id))
end
defp has_usernames?(tweet) do
# Sometimes, in rare cases a Twitter search will return a tweet with no
# usernames and hence not be valid.
usernames(tweet) |> length() > 0
end
defp extract_username(map, tweet) do
# Zeroth username is the author of the tweet, 1 --> N usernames correspond
# to the recipients, if any.
[author | _recipients] = usernames(tweet)
Map.put(map, :username, author)
end
defp usernames(tweet) do
tweet
|> Floki.find("span.username.u-dir b")
|> Floki.text(sep: " ")
|> String.split()
end
def process_text(raw_tweet_html) do
[{"p", _attrs, mixed_text}] = Floki.find(raw_tweet_html, "p.js-tweet-text")
normalized_text =
for element <- mixed_text do
# Pack text into `<packed>` tags, as Floki.traverse_and_update can only
# process Floki.tag types and not text between tags. By treating the text
# like this, we can accurately track the last tag in the
# Floki.traverse_and_update accumulator.
if is_binary(element) do
{"packed", [], [element]}
else
element
end
end
acc = %{last_tag: :init, links: []}
{processed_mixed_text, acc} =
Floki.traverse_and_update(normalized_text, acc, fn
{"packed", _, [text]} = packed_tag, acc ->
{text, handle_process_text_accumulator(acc, packed_tag)}
{"a", _, _} = link_tag, acc ->
case process_link_tag(link_tag) do
{text, :no_url} ->
{text, handle_process_text_accumulator(acc, link_tag)}
{text, url} ->
{text, handle_process_text_accumulator(acc, link_tag, url)}
end
{"img", _, _} = img_tag, acc ->
text = process_img_tag(img_tag)
{text, handle_process_text_accumulator(acc, img_tag)}
{"strong", _, _} = strong_tag, acc ->
text = process_strong_tag(strong_tag, acc.last_tag)
{text, handle_process_text_accumulator(acc, strong_tag)}
tag, acc ->
{tag, handle_process_text_accumulator(acc, tag)}
end)
cleaned_text =
processed_mixed_text
|> Floki.text(sep: "")
|> clean_text_whitespace()
{cleaned_text, acc.links}
end
defp handle_process_text_accumulator(acc, tag, url \\ nil)
defp handle_process_text_accumulator(acc, tag, url) do
tag_name =
case tag do
{tag_name, _, _} -> tag_name
_ -> :text
end
acc = %{acc | last_tag: tag_name}
if is_nil(url) do
acc
else
%{acc | links: [url | acc.links]}
end
end
defp clean_text_whitespace(text) do
text
|> String.replace("\n", " ")
|> String.trim(" ")
|> String.replace(@multiple_space_regex, " ")
end
defp process_img_tag({"img", attrs, _children}) do
attrs = Map.new(attrs)
classes = attribute_to_mapset(attrs, "class")
cond do
"Emoji" in classes ->
case Map.get(attrs, "alt", :not_emoji) do
:not_emoji ->
title = Map.get(attrs, "title", "#HistoricTwitter:unknown_emoji")
" Emoji[#{title}]"
emoji ->
" #{emoji}"
end
true ->
nil
end
end
defp process_strong_tag({"strong", _attrs, children}, last_tag) do
text = Floki.text(children)
if last_tag == "strong" do
" " <> text
else
text
end
end
defp process_link_tag({"a", attrs, children}) do
attrs = Map.new(attrs)
classes = attribute_to_mapset(attrs, "class")
cond do
"twitter-hashtag" in classes ->
# format an @user (incase it has a strong tag)
text = format_twitter_idiom(:hashtag, children)
{text, :no_url}
"twitter-atreply" in classes ->
# format an @user (incase it has a strong tag)
text = format_twitter_idiom(:at_user, children)
{text, :no_url}
"twitter-timeline-link" in classes ->
case Map.get(attrs, "data-expanded-url", :no_url) do
:no_url ->
{nil, :no_url}
url ->
url = URI.parse(url) |> URI.to_string()
{nil, url}
end
true ->
{nil, :no_url}
end
end
defp attribute_to_mapset(%{} = attrs, attribute_key) do
attrs
|> Map.get(attribute_key, "")
|> String.split()
|> MapSet.new()
end
defp format_twitter_idiom(:hashtag, children), do: do_format_twitter_idiom("#", children)
defp format_twitter_idiom(:at_user, children), do: do_format_twitter_idiom("@", children)
defp do_format_twitter_idiom(symbol, children) do
text =
Floki.text(children)
|> String.trim_leading(symbol)
|> String.trim_leading(" ")
" #{symbol}" <> text
end
end
|
lib/ex_tweet/parser/tweet.ex
| 0.6488 | 0.450359 |
tweet.ex
|
starcoder
|
alias Graphqexl.Query
alias Graphqexl.Schema
alias Graphqexl.Schema.{
Dsl,
Interface,
Mutation,
Query,
Subscription,
TEnum,
Type,
Union,
}
alias Graphqexl.Tokens
alias Treex.Tree
defmodule Graphqexl.Schema do
@moduledoc """
Structured representation of a GraphQL schema, either built dynamically or
parsed from a JSON document or GQL string.
"""
@moduledoc since: "0.1.0"
defstruct(
context: nil,
enums: %{},
interfaces: %{},
mutations: %{},
queries: %{},
resolvers: %{},
subscriptions: %{},
tree: %Tree{},
types: %{},
unions: %{}
)
@type component::
TEnum.t |
Interface.t |
Mutation.t |
Query.t |
Subscription.t |
Type.t |
Union.t
@type gql:: String.t
@type json:: Map.t
@type t::
%Graphqexl.Schema{
context: (Query.t, Map.t -> Map.t),
enums: %{atom => TEnum.t},
interfaces: %{atom => Interface.t},
mutations: %{atom => Mutation.t},
queries: %{atom => Query.t},
resolvers: %{atom => (Map.t, Map.t, Map.t -> Map.t)},
subscriptions: %{atom => Subscription.t},
tree: Tree.t,
types: %{atom => Type.t},
unions: %{atom => Union.t} ,
}
@doc """
Builds an executable schema containing the schema definition as well as resolver map and context
factory.
Returns: `t:Graphqexl.Schema.t/0`
"""
@doc since: "0.1.0"
@spec executable(gql, Map.t, Map.t | nil):: t
def executable(gql_str, resolvers, context \\ nil) do
gql_str
|> gql
|> validate_resolvers!(resolvers)
|> elem(1)
|> (&(%{&1 | context: context, resolvers: resolvers})).()
end
@doc """
Parses a `t:Graphqexl.Schema.gql/0` string into a `t:Graphqexl.Schema.t/0`.
Returns: `t:Graphqexl.Schema.t/0`
"""
@doc since: "0.1.0"
@spec gql(gql | json) :: t
def gql(gql_str) when is_binary(gql_str) do
gql_str
|> Dsl.preprocess
|> split_lines
|> Enum.reduce(%Graphqexl.Schema{}, &apply_line/2)
end
def gql(_json), do: %Graphqexl.Schema{}
@doc """
Check whether a `t:Graphqexl.Schema.Field.t/0` is defined on the given `t:Graphqexl.Schema.t/0`.
Returns: `t:boolean/0`
"""
@doc since: "0.1.0"
@spec has_field?(Schema.t, atom):: boolean
# TODO: fix
def has_field?(_schema, _field), do: true # !is_nil(Traverse.traverse(schema, &({:continue, &1}), :bfs))
@doc """
Registers the given `t:Graphqexl.Schema.component/0` on the given `t:Graphqexl.Schema.t/0`.
Returns `t:Graphqexl.Schema.t/0`
"""
@doc since: "0.1.0"
@spec register(t, component):: t
def register(schema, %TEnum{} = component), do: schema |> register(:enums, component)
def register(schema, %Interface{} = component), do: schema |> register(:interfaces, component)
def register(schema, %Mutation{} = component), do: schema |> register(:mutations, component)
def register(schema, %Query{} = component), do: schema |> register(:queries, component)
def register(schema, %Type{} = component), do: schema |> register(:types, component)
def register(schema, %Union{} = component), do: schema |> register(:unions, component)
def register(schema, %Subscription{} = component),
do: schema |> register(:subscriptions, component)
@doc false
@spec register(t, atom, component):: t
defp register(schema, key, component),
do: schema |> Map.update(key, %{}, &(&1 |> add_component(component)))
@doc false
@spec add_component(Map.t, component):: Map.t
defp add_component(map, component), do: map |> Map.update(component.name, component, &(&1))
@doc false
@spec apply_line(list(String.t), t):: t
defp apply_line([cmd | args], schema) do
[str_name | fields_or_values] = args
name = str_name |> String.to_atom
case cmd
|> String.replace(:argument_placeholder_separator |> Tokens.get, "")
|> String.to_atom do
:enum -> schema |> Dsl.enum(name, fields_or_values)
:interface -> schema |> Dsl.interface(name, fields_or_values)
:mutation -> schema |> Dsl.mutation(args)
:query -> schema |> Dsl.query(args)
:schema -> schema |> Dsl.schema(args)
:subscription -> schema |> Dsl.subscription(args)
:type ->
cond do
name == :Query ->
fields_or_values
|> List.first
|> String.split(:argument_placeholder_separator |> Tokens.get)
|> Enum.reduce(schema, &(Dsl.query(&2, &1)))
name == :Mutation ->
fields_or_values |> Enum.reduce(schema, &(Dsl.mutation(&2, &1)))
name == :Subscription ->
fields_or_values |> Enum.reduce(schema, &(Dsl.subscription(&2, &1)))
fields_or_values |> is_argument? -> schema |> Dsl.type(name, nil, fields_or_values)
fields_or_values |> is_custom_scalar? ->
schema
|> Dsl.type(
name,
fields_or_values
|> List.first
|> String.replace(:custom_scalar_placeholder |> Tokens.get, "")
)
true ->
{implements, fields} = fields_or_values |> List.pop_at(0)
schema |> Dsl.type(name, implements, fields)
end
:union ->
[_, type1, type2] = args
schema |> Dsl.union(name, type1, type2)
_ -> raise "Unknown token: #{cmd}"
end
end
@doc false
@spec is_custom_scalar?(String.t):: boolean
defp is_custom_scalar?(spec),
do: spec |> list_head_contains(:custom_scalar_placeholder |> Tokens.get)
@doc false
@spec is_argument?(String.t):: boolean
defp is_argument?(spec) do
spec |> list_head_contains(:argument_delimiter |> Tokens.get)
end
@doc false
@spec list_head_contains(list, term):: boolean
defp list_head_contains(list, needle) do
list
|> List.first
|> String.contains?(needle)
end
@doc false
@spec semicolonize(String.t):: String.t
defp semicolonize(value) do
value |> String.replace(" ", :argument_placeholder_separator |> Tokens.get)
end
@doc false
@spec split_lines(String.t):: list(String.t)
defp split_lines(preprocessed) do
preprocessed
|> String.split(:newline |> Tokens.get)
|> Enum.map(&(&1 |> String.replace(
"#{:argument_delimiter |> Tokens.get} ",
:argument_delimiter |> Tokens.get))
)
|> Enum.map(fn spec ->
Regex.replace(
~r/(#{:argument |> Tokens.get |> Map.get(:open) |> Regex.escape}.*#{:argument |> Tokens.get |> Map.get(:close) |> Regex.escape})/,
spec, &semicolonize/1
)
end)
|> Enum.map(&(&1 |> String.split(:space |> Tokens.get)))
end
@doc false
@spec validate_resolvers!(t, String.t):: {:ok, t} | {:error, String.t}
defp validate_resolver!(schema, name) do
if [schema.mutations, schema.queries, schema.subscriptions]
|> Enum.any?(&(&1 |> Map.keys |> Enum.member?(name))) do
{:ok, schema}
else
{:error, "No operation matching resolver #{name}"}
end
end
@doc false
@spec validate_resolvers!(t, Map.t):: {:ok, t} | {:error, String.t}
defp validate_resolvers!(schema, resolvers) do
if resolvers
|> Map.keys
|> Enum.all?(&({:ok, _} = schema |> validate_resolver!(&1))) do
{:ok, schema}
else
# TODO: tighten up this error handling
{:error, "Invalid resolvers"}
end
end
end
|
lib/graphqexl/schema.ex
| 0.778481 | 0.515437 |
schema.ex
|
starcoder
|
defmodule SigilF do
@moduledoc """
This module provides `sigil_f/2` macro that generates formatting function
out of the format string passed as the argument, ex.:
iex> import SigilF
iex> func = ~f(~.3f)
iex> func.(3.14159)
"3.142"
For supported format check out `:io.fwrite/2`.
"""
@modifiers 'cfegswpWPBX#bx+i'
defp error(msg, caller) do
raise CompileError,
file: caller.file,
line: caller.line,
description: msg
end
defmacro sigil_f({:<<>>, _, [format]}, []) do
amount = parse(format)
args = Macro.generate_arguments(amount, __CALLER__.module)
quote do
fn unquote_splicing(args) ->
:binary.list_to_bin(:io_lib.format(unquote(format), [unquote_splicing(args)]))
end
end
catch
:end -> error("Unexpected end of string", __CALLER__)
{:unexpected, c} -> error("Unexpected character '#{<<c>>}'", __CALLER__)
end
defmacro sigil_f({:<<>>, _, [format]}, 'c') do
amount = parse(format)
args = Macro.generate_arguments(amount, __CALLER__.module)
quote do
fn unquote_splicing(args) ->
:io_lib.format(unquote(format), [unquote_splicing(args)])
end
end
catch
:end -> error("Unexpected end of string", __CALLER__)
{:unexpected, c} -> error("Unexpected character '#{<<c>>}'", __CALLER__)
end
defp parse(format), do: parse(format, 0)
defp parse("", count), do: count
defp parse("~" <> rest, count) do
{rest, sub_count} = parse_format(rest)
parse(rest, count + sub_count)
end
defp parse(<<_, rest::binary>>, count), do: parse(rest, count)
defp parse_format(input), do: parse_format(input, 0, :width)
defp parse_format("." <> rest, count, :precision),
do: parse_format(rest, count, :precision)
defp parse_format("*" <> rest, count, :width),
do: parse_format(rest, count + 1, :precision)
defp parse_format("*" <> rest, count, :precision),
do: parse_format(rest, count + 1, :pad)
defp parse_format(".*" <> rest, count, :pad),
do: parse_format(rest, count + 1, :specifier)
defp parse_format(<<".", _, rest::binary>>, count, :pad),
do: parse_format(rest, count, :specifier)
defp parse_format(<<c, rest::binary>>, count, part)
when part in ~w[precision width]a and c in ?0..?9 do
parse_format(rest, count, part)
end
defp parse_format(<<".", _::binary>> = rest, count, :width),
do: parse_format(rest, count, :precision)
# Match specifiers
defp parse_format("tp" <> rest, count, _), do: {rest, count + 1}
defp parse_format("lp" <> rest, count, _), do: {rest, count + 1}
defp parse_format("tP" <> rest, count, _), do: {rest, count + 1}
defp parse_format("lP" <> rest, count, _), do: {rest, count + 1}
defp parse_format("~" <> rest, count, _), do: {rest, count}
defp parse_format("n" <> rest, count, _), do: {rest, count}
defp parse_format(<<c, rest::binary>>, count, _) when c in @modifiers, do: {rest, count + 1}
defp parse_format("", _, _), do: throw(:end)
defp parse_format(<<c, _::binary>>, _, _), do: throw({:unexpected, c})
end
|
lib/sigil_f.ex
| 0.643777 | 0.462959 |
sigil_f.ex
|
starcoder
|
defmodule ColorContrast do
@moduledoc ColorContrast.MixProject.project()[:description]
@brightness_threshold 130
@black_hex "#000000"
@white_hex "#FFFFFF"
@doc """
Calculates the contrast with a background color.
## Examples
Accept a color value in different hex formats.
iex> ColorContrast.calc_contrast("#000000")
{:ok, "#FFFFFF"}
iex> ColorContrast.calc_contrast("#ffffff")
{:ok, "#000000"}
iex> ColorContrast.calc_contrast("000000")
{:ok, "#FFFFFF"}
iex> ColorContrast.calc_contrast("ffffff")
{:ok, "#000000"}
Show an error for an invalid color hex value.
iex> ColorContrast.calc_contrast("#1234567")
{:error, :invalid_bg_color_hex_value}
iex> ColorContrast.calc_contrast("#12345z")
{:error, :invalid_bg_color_hex_value}
iex> ColorContrast.calc_contrast("12345z")
{:error, :invalid_bg_color_hex_value}
"""
@spec calc_contrast(String.t()) :: {:ok, String.t()} | {:error, atom()}
def calc_contrast(<<"#", red::bytes-2, green::bytes-2, blue::bytes-2>> = _bg_color_hex) do
do_calc_contrast(red, green, blue)
end
def calc_contrast(<<red::bytes-2, green::bytes-2, blue::bytes-2>> = _bg_color_hex) do
do_calc_contrast(red, green, blue)
end
def calc_contrast(_invalid_hex) do
{:error, :invalid_bg_color_hex_value}
end
defp do_calc_contrast(red, green, blue) do
case calc_perceived_brightness(red, green, blue) do
:error ->
{:error, :invalid_bg_color_hex_value}
brightness ->
{:ok, fg_color(brightness)}
end
end
defp calc_perceived_brightness(red, green, blue) do
with {red, ""} <- Integer.parse(red, 16),
{green, ""} <- Integer.parse(green, 16),
{blue, ""} <- Integer.parse(blue, 16) do
:math.sqrt(
red * red * 0.241 +
green * green * 0.691 +
blue * blue * 0.068
)
else
_error -> :error
end
end
defp fg_color(brightness) when brightness > @brightness_threshold do
@black_hex
end
defp fg_color(_brightness) do
@white_hex
end
end
|
lib/color_contrast.ex
| 0.916661 | 0.437583 |
color_contrast.ex
|
starcoder
|
defmodule Fastimage.Error do
@moduledoc """
Representations of various errors potentially returned by Fastimage.
Types of errors which may occur include:
- Function execution for unsupported image types eg .tiff
- Errors occurring during streaming
- Errors emanating from the :hackney library, eg an
attempt to access an unauthorized file.
- Exceeding the maximum number of redirects for a url - the
max number of redirects is hardcoded as 3.
- Entering invalid input, Fastimage accepts only valid urls, a
valid file path or a binary which is an image.
## Exception fields
This exception has the following public fields:
* `:message` - (atom) the error reason. This can be a Fastimage
specific reason such as `:invalid_input` or something emanating
from the :hackney library such as an unauthorized file error.
## Error reasons
Fastimage specific reasons:
* `{:unsupported, value}`: may occur when the file type is not supported by Fastimage,
value can be a url or nil when handling a file or a binary.
* `:unexpected_end_of_stream_error`: may occur when the end of a stream
is reached unexpectedly without having determined the image type.
* `:unexpected_binary_streaming_error`: may occur when an unexpected error
occurred whilst streaming bytes from a binary.
* `:unexpected_file_streaming_error`: may occur when an unexpected error
occurred whilst streaming bytes from a file.
* `:invalid_input`: may occur when an invalid type is entered as an
argument to the `size/1` or `type/1` functions. Only a valid url, file path
or binary. Any other type is considered invalid input.
:hackney specific reasons:
* `{:unexpected_http_streaming_error, url}`: may occur when a streaming
error occurs for the given url.
* `{:hackney_response_error, {url, status_code, reason}}`: may occur when
a response error is returned from :hackney. For example, access to the
given url is forbidden without authorization.
* `{:max_redirects_exceeded, {url, num_redirects, max_redirects}}`: may
occur when the maximum number of redirects has been reached. This is
currently programmatically hardcoded as 3 redirects.
unknown reasons:
`reason` may occur when an unknown error occurs.
"""
defexception [:reason]
@supported_types ["gif", "png", "jpg", "bmp"]
alias Fastimage.Stream
def exception(reason) do
%Fastimage.Error{reason: reason}
end
def message(%Fastimage.Error{reason: reason}) do
format_error(reason)
end
# private
defp format_error({:unsupported, %Stream.Acc{source_type: source_type} = acc}) do
unsupported_error(acc, source_type)
end
defp format_error(
{:unexpected_end_of_stream_error, %Stream.Acc{source_type: source_type} = acc}
) do
end_of_stream_error(acc, source_type)
end
defp format_error(:unexpected_binary_streaming_error) do
binary_streaming_error()
end
defp format_error(
{:unexpected_file_streaming_error, %Stream.Acc{stream_ref: %File.Stream{path: path}}}
) do
file_streaming_error(path)
end
defp format_error({:unexpected_http_streaming_error, url, hackney_reason}) do
streaming_error(url, hackney_reason)
end
defp format_error({:hackney_response_error, {url, status_code, reason}}) do
hackney_response_error(url, status_code, reason)
end
defp format_error({:max_redirects_exceeded, {url, num_redirects, max_redirects}}) do
max_redirects_error(url, num_redirects, max_redirects)
end
defp format_error(:invalid_input) do
invalid_input_error()
end
defp format_error(reason) do
unexpected_error(reason)
end
defp end_of_stream_error(_acc, :binary) do
"""
An unexpected streaming error has occurred.
All data in the source has been fetched without
yet determining an image type.
Is the source actually a supported image type?
"""
end
defp end_of_stream_error(source, source_type)
when source_type in [:file, :url] do
"""
An unexpected streaming error has occurred while
streaming #{source}.
All data from #{source} has been fetched without
determining an image type.
Is the source actually supported image type?
"""
end
defp streaming_error(url, hackney_reason) do
"""
An unexpected http streaming error has occurred while
streaming url #{url}.
Hackney reason:
#{hackney_reason}
"""
end
defp binary_streaming_error do
"""
An unexpected binary streaming error has occurred while binary streaming.
"""
end
defp file_streaming_error(filepath) do
"""
An unexpected file streaming error has occurred while
streaming file #{filepath}.
"""
end
defp unsupported_error(_acc, :binary) do
"""
The image type is currently unsupported.
Only the types #{Enum.join(@supported_types, ", ")}are currently supported by this library.
"""
end
defp unsupported_error(acc, :file) do
extension =
acc.stream_ref.path
|> Path.extname()
|> String.trim_leading(".")
"""
The image type #{extension} is currently unsupported.
Only the types #{Enum.join(@supported_types, ", ")}are currently supported by this library.
"""
end
defp unsupported_error(%Stream.Acc{} = acc, :url) do
"""
The image type is currently unsupported for url #{acc.source}.
Only the types #{Enum.join(@supported_types, ", ")}are currently supported by this library.
"""
end
defp unsupported_error(source, :file) do
extension = String.trim_leading(source, ".")
"""
The image type #{extension} is currently unsupported.
Only the types #{Enum.join(@supported_types, ", ")}are currently supported by this library.
"""
end
defp max_redirects_error(url, num_directs, max_redirects) do
"""
#{num_directs} redirects were executed and the
max_redirects threshold of #{max_redirects} has been exceeded.
Is the image url #{url} is valid and reachable?
"""
end
defp invalid_input_error do
"""
An invalid input type was found.
Fastimage expects input as a valid binary, url or file.
"""
end
defp hackney_response_error(url, status_code, reason) do
"""
An error occurred when attempting get the size or type of the url:
#{url}.
***HTTP status code:***
#{status_code}.
***Reason:***
#{reason}
"""
end
defp unexpected_error(reason) do
"""
An unexpected error occurred.
#{inspect(reason)}
"""
end
end
|
lib/fastimage/errors.ex
| 0.838663 | 0.604516 |
errors.ex
|
starcoder
|
defmodule Type.Tuple do
@moduledoc """
represents tuple types.
The associated struct has one parameter:
- `:elements` which may be a list of types, corresponding to the ordered
list of tuple element types. May also be the atom `:any` which
corresponds to the any tuple.
### Examples:
- the any tuple is `%Type.Tuple{elements: :any}`
```
iex> inspect %Type.Tuple{elements: :any}
"tuple()"
```
- generic tuples have their types as lists.
```
iex> inspect %Type.Tuple{elements: [%Type{name: :atom}, %Type{name: :integer}]}
"{atom(), integer()}"
iex> inspect %Type.Tuple{elements: [:ok, %Type{name: :integer}]}
"{:ok, integer()}"
```
### Key functions:
#### comparison
longer tuples come after shorter tuples; tuples are then ordered using cartesian
dictionary order along the elements list.
```
iex> Type.compare(%Type.Tuple{elements: []}, %Type.Tuple{elements: [:foo]})
:lt
iex> Type.compare(%Type.Tuple{elements: [:foo, 1..10]}, %Type.Tuple{elements: [:bar, 10..20]})
```
#### intersection
Tuples of different length do not intersect; the intersection is otherwise the cartesian
intersection of the elements.
```
iex> Type.intersection(%Type.Tuple{elements: []}, %Type.Tuple{elements: [:ok, %Type{name: :integer}]})
%Type{name: :none}
iex> Type.intersection(%Type.Tuple{elements: [:ok, %Type{name: :integer}]},
...> %Type.Tuple{elements: [%Type{name: :atom}, 1..10]})
%Type.Tuple{elements: [:ok, 1..10]}
```
#### union
Only tuple types of the same length can be non-trivially unioned, and then, only if
one tuple type is a subtype of the other, and they must be identical across all but
one dimension.
```
iex> Type.union(%Type.Tuple{elements: [:ok, 11..20]},
...> %Type.Tuple{elements: [:ok, 1..10]})
%Type.Tuple{elements: [:ok, 1..20]}
```
#### subtype?
A tuple type is the subtype of another if its types are subtypes of the other
across all cartesian dimensions.
```
iex> Type.subtype?(%Type.Tuple{elements: [:ok, 1..10]},
...> %Type.Tuple{elements: [%Type{name: :atom}, %Type{name: :integer}]})
true
```
#### usable_as
A tuple type is usable as another if it each of its elements are usable as
the other across all cartesian dimensions. If any element is disjoint, then
it is not usable.
```
iex> Type.usable_as(%Type.Tuple{elements: [:ok, 1..10]},
...> %Type.Tuple{elements: [%Type{name: :atom}, %Type{name: :integer}]})
:ok
iex> Type.usable_as(%Type.Tuple{elements: [:ok, %Type{name: :integer}]},
...> %Type.Tuple{elements: [%Type{name: :atom}, 1..10]})
{:maybe, [%Type.Message{type: %Type.Tuple{elements: [:ok, %Type{name: :integer}]},
target: %Type.Tuple{elements: [%Type{name: :atom}, 1..10]}}]}
iex> Type.usable_as(%Type.Tuple{elements: [:ok, %Type{name: :integer}]},
...> %Type.Tuple{elements: [:error, 1..10]})
{:error, %Type.Message{type: %Type.Tuple{elements: [:ok, %Type{name: :integer}]},
target: %Type.Tuple{elements: [:error, 1..10]}}}
```
"""
@enforce_keys [:elements]
defstruct @enforce_keys
@type t :: %__MODULE__{elements: [Type.t] | :any}
defimpl Type.Properties do
import Type, only: :macros
use Type.Helpers
alias Type.{Message, Tuple, Union}
group_compare do
def group_compare(%{elements: :any}, %Tuple{}), do: :gt
def group_compare(_, %Tuple{elements: :any}), do: :lt
def group_compare(%{elements: e1}, %{elements: e2}) when length(e1) > length(e2), do: :gt
def group_compare(%{elements: e1}, %{elements: e2}) when length(e1) < length(e2), do: :lt
def group_compare(tuple1, tuple2) do
tuple1.elements
|> Enum.zip(tuple2.elements)
|> Enum.each(fn {t1, t2} ->
compare = Type.compare(t1, t2)
unless compare == :eq do
throw compare
end
end)
:eq
catch
compare when compare in [:gt, :lt] -> compare
end
end
usable_as do
# any tuple can be used as an any tuple
def usable_as(_, %Tuple{elements: :any}, _meta), do: :ok
# the any tuple maybe can be used as any tuple
def usable_as(challenge = %{elements: :any}, target = %Tuple{}, meta) do
{:maybe, [Message.make(challenge, target, meta)]}
end
def usable_as(challenge = %{elements: ce}, target = %Tuple{elements: te}, meta)
when length(ce) == length(te) do
ce
|> Enum.zip(te)
|> Enum.map(fn {c, t} -> Type.usable_as(c, t, meta) end)
|> Enum.reduce(&Type.ternary_and/2)
|> case do
:ok -> :ok
# TODO: make our type checking nested, should be possible here.
{:maybe, _} -> {:maybe, [Message.make(challenge, target, meta)]}
{:error, _} -> {:error, Message.make(challenge, target, meta)}
end
end
end
intersection do
def intersection(%{elements: :any}, b = %Tuple{}), do: b
def intersection(a, %Tuple{elements: :any}), do: a
def intersection(%{elements: e1}, %Tuple{elements: e2}) when length(e1) == length(e2) do
elements = e1
|> Enum.zip(e2)
|> Enum.map(fn {t1, t2} ->
case Type.intersection(t1, t2) do
builtin(:none) -> throw :mismatch
any -> any
end
end)
%Tuple{elements: elements}
catch
:mismatch ->
builtin(:none)
end
end
subtype do
# can't simply forward to usable_as, because any of the encapsulated
# types might have a usable_as rule that isn't strictly subtype?
def subtype?(_tuple_type, %Tuple{elements: :any}), do: true
# same nonempty is okay
def subtype?(%{elements: el_c}, %Tuple{elements: el_t})
when length(el_c) == length(el_t) do
el_c
|> Enum.zip(el_t)
|> Enum.all?(fn {c, t} -> Type.subtype?(c, t) end)
end
def subtype?(tuple, %Union{of: types}) do
Enum.any?(types, &Type.subtype?(tuple, &1))
end
end
end
defimpl Inspect do
import Type, only: :macros
def inspect(%{elements: :any}, _opts) do
"tuple()"
end
def inspect(%{elements: [builtin(:module), builtin(:atom), 0..255]}, _opts) do
"mfa()"
end
def inspect(%{elements: elements}, opts) do
elements
|> List.to_tuple()
|> Inspect.inspect(opts)
end
end
end
|
lib/type/tuple.ex
| 0.880463 | 0.897201 |
tuple.ex
|
starcoder
|
defmodule Plymio.Funcio.Enum.ValueAt do
@moduledoc ~S"""
Functions for Specific Values in an Enum.
See `Plymio.Funcio` for overview and other documentation terms.
## Documentation Terms
In the documentation below these terms, usually in *italics*, are used to mean the same thing.
### *index*, *indices* and *index range*
See `Plymio.Funcio.Index`
"""
import Plymio.Fontais.Result,
only: [
normalise1_result: 1
]
import Plymio.Funcio.Map.Utility,
only: [
reduce_map1_funs: 1
]
import Plymio.Funcio.Index,
only: [
normalise_index_range: 1
]
import Plymio.Funcio.Enum.Index,
only: [
create_predicate_index_range_enum: 2,
normalise_index_range_enum: 2,
validate_index_enum: 2
]
import Plymio.Funcio.Enum.Utility,
only: [
enum_reify: 1
]
@type error :: Plymio.Funcio.error()
@type index :: Plymio.Funcio.index()
@type indices :: Plymio.Funcio.indices()
@doc ~S"""
`map_value_at_enum/3` takes an *enum*, an *index range* and a
*map/1*.
For each *index* in the *index range*, it calls the *map/1* with the
current value and then splices the "listified" new value.
The *map/1* must return `{:ok, any}`, `{:error, error}` or `value`
(i.e. a *pattern1* result - See `Plymio.Funcio`)
If any mapped element returns `{:error, error}` the mapping is halted and the `{:error, error}` returned.
## Examples
iex> [1,2,3] |> map_value_at_enum(2, fn v -> v * v end)
{:ok, [1,2,9]}
iex> {:error, error} = [1,2,3] |> map_value_at_enum(0,
...> fn v -> {:error, %ArgumentError{message: "value is #{inspect v}"}} end)
...> error |> Exception.message
"value is 1"
iex> [1,2,3] |> map_value_at_enum(0 .. 2, fn v -> {:ok, v * v} end)
{:ok, [1,4,9]}
iex> [1,2,3] |> map_value_at_enum(0 .. 2,
...> [fn v -> v + 1 end, fn v -> v * v end, fn v -> v - 1 end])
{:ok, [3,8,15]}
iex> [1,2,3] |> map_value_at_enum([0, -1], fn v -> [:a,v] end)
{:ok, [:a, 1, 2, :a, 3]}
iex> [] |> map_value_at_enum(0, fn v -> v end)
{:ok, []}
iex> [1,2,3] |> map_value_at_enum(nil, fn _ -> :a end)
{:ok, [:a, :a, :a]}
iex> {:error, error} = 42 |> map_value_at_enum(0, fn v -> v end)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
iex> {:error, error} = [1,2,3] |> map_value_at_enum(:not_an_index, fn v -> v end)
...> error |> Exception.message
"index range invalid, got: :not_an_index"
"""
@since "0.1.0"
@spec map_value_at_enum(any, any, any) :: {:ok, list} | {:error, error}
def map_value_at_enum(enum, index_range, fun_map)
def map_value_at_enum([], _index, _fun_map) do
{:ok, []}
end
def map_value_at_enum(state, index_range, fun_map) do
with {:ok, state} <- state |> enum_reify,
{:ok, fun_map} <- [fun_map, &normalise1_result/1] |> reduce_map1_funs,
{:ok, fun_pred} <- state |> create_predicate_index_range_enum(index_range) do
state
|> Stream.with_index()
|> Enum.reduce_while(
[],
fn {value, index}, state ->
{value, index}
|> fun_pred.()
|> case do
x when x in [nil, false] ->
{:cont, state ++ List.wrap(value)}
x when x in [true] ->
with {:ok, new_value} <- value |> fun_map.() do
{:cont, state ++ List.wrap(new_value)}
else
{:error, %{__struct__: _}} = result -> {:halt, result}
end
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
state when is_list(state) -> {:ok, state}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`map_with_index_value_at_enum/3` takes an *enum*, an *index range* and a *map/1*.
For each *index* in the *index range*, it calls the *map/1* with the
current `{value, index}` 2tuple and then splices the "listified" new
value.
The *map/1* must return `{:ok, any}`, `{:error, error}` or `value`
(i.e. a *pattern1* result - See `Plymio.Funcio`)
If any mapped element returns `{:error, error}` the mapping is
halted and the `{:error, error}` returned.
## Examples
iex> [1,2,3] |> map_with_index_value_at_enum(2, fn {v,_i} -> v * v end)
{:ok, [1,2,9]}
iex> {:error, error} = [1,2,3] |> map_with_index_value_at_enum(0,
...> fn {v,_i} -> {:error, %ArgumentError{message: "value is #{inspect v}"}} end)
...> error |> Exception.message
"value is 1"
iex> [1,2,3] |> map_with_index_value_at_enum(0 .. 2, fn {v,i} -> {:ok, v + i} end)
{:ok, [1,3,5]}
iex> [1,2,3] |> map_with_index_value_at_enum(0 .. 2,
...> [fn {v,i} -> v + i end, fn v -> v * v end, fn v -> v - 1 end])
{:ok, [0,8,24]}
iex> [1,2,3] |> map_with_index_value_at_enum([0, -1], fn {v,_i} -> [:a,v] end)
{:ok, [:a, 1, 2, :a, 3]}
iex> [] |> map_with_index_value_at_enum(0, fn v -> v end)
{:ok, []}
iex> [1,2,3] |> map_with_index_value_at_enum(nil, fn _ -> :a end)
{:ok, [:a, :a, :a]}
iex> {:error, error} = 42 |> map_with_index_value_at_enum(0, fn v -> v end)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
iex> {:error, error} = [1,2,3] |> map_with_index_value_at_enum(:not_an_index, fn v -> v end)
...> error |> Exception.message
"index range invalid, got: :not_an_index"
iex> {:error, error} = [1,2,3] |> map_with_index_value_at_enum(0, :not_a_fun)
...> error |> Exception.message
"map/1 function invalid, got: :not_a_fun"
"""
@since "0.1.0"
@spec map_with_index_value_at_enum(any, any, any) :: {:ok, list} | {:error, error}
def map_with_index_value_at_enum(derivable_list, index_range, fun_map)
def map_with_index_value_at_enum([], _index, _fun_map) do
{:ok, []}
end
def map_with_index_value_at_enum(state, index_range, fun_map) do
with {:ok, state} <- state |> enum_reify,
{:ok, fun_map} <- [fun_map, &normalise1_result/1] |> reduce_map1_funs,
{:ok, fun_pred} <- state |> create_predicate_index_range_enum(index_range) do
state
|> Stream.with_index()
|> Enum.reduce_while(
[],
fn {value, index}, state ->
{value, index}
|> fun_pred.()
|> case do
x when x in [nil, false] ->
{:cont, state ++ List.wrap(value)}
x when x in [true] ->
with {:ok, new_value} <- {value, index} |> fun_map.() do
{:cont, state ++ List.wrap(new_value)}
else
{:error, %{__struct__: _}} = result -> {:halt, result}
end
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
state when is_list(state) -> {:ok, state}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`insert_value_at_enum/3` takes an *enum*, and *index range* and a value or list of values.
It splices the "listified" (new) values at each index in the *index range*.
If the *index range* is `:append`, the "listified" value is appended to the derived list.
## Examples
iex> [1,2,3] |> insert_value_at_enum(0, :a)
{:ok, [:a, 1, 2, 3]}
iex> [1,2,3] |> insert_value_at_enum(nil, :a)
{:ok, [:a, 1, :a, 2, :a, 3]}
iex> [1,2,3] |> insert_value_at_enum(:append, [:a, :b, :c])
{:ok, [1, 2, 3, :a, :b, :c]}
iex> [1,2,3] |> insert_value_at_enum(0, [:a, :b, :c])
{:ok, [:a, :b, :c, 1, 2, 3]}
iex> [1,2,3] |> insert_value_at_enum(0 .. 2, :a)
{:ok, [:a, 1, :a, 2, :a, 3]}
iex> [1,2,3] |> insert_value_at_enum([0, -1], :a)
{:ok, [:a, 1, 2, :a, 3]}
iex> [1,2,3] |> insert_value_at_enum([0, -1], [:a,:b,:c])
{:ok, [:a, :b, :c, 1, 2, :a, :b, :c, 3]}
iex> [] |> insert_value_at_enum(0, :a)
{:ok, [:a]}
iex> %{a: 1, b: 2, c: 3} |> insert_value_at_enum(1, :x)
{:ok, [{:a, 1}, :x, {:b, 2}, {:c, 3}]}
iex> %{a: 1, b: 2, c: 3} |> insert_value_at_enum(1, [x: 10, y: 11, z: 12])
{:ok, [a: 1, x: 10, y: 11, z: 12, b: 2, c: 3]}
iex> [] |> insert_value_at_enum(-1, :a)
{:ok, [:a]}
iex> [1,:b,3] |> insert_value_at_enum(-1, :d)
{:ok, [1,:b,:d,3]}
iex> {:error, error} = 42 |> insert_value_at_enum(0, :a)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
iex> {:error, error} = [1,2,3] |> insert_value_at_enum(:not_an_index, :a)
...> error |> Exception.message
"index range invalid, got: :not_an_index"
"""
@since "0.1.0"
@spec insert_value_at_enum(any, any, any) :: {:ok, list} | {:error, error}
def insert_value_at_enum(derivable_list, index_range, value)
def insert_value_at_enum([], _index_range, value) do
{:ok, value |> List.wrap()}
end
def insert_value_at_enum(state, :append, value) when is_list(state) do
with {:ok, state} <- state |> enum_reify do
{:ok, state ++ List.wrap(value)}
else
{:error, %{__exception__: true}} = result -> result
end
end
def insert_value_at_enum(state, index_range, value) do
entries =
value
|> case do
x when is_list(x) -> x
x -> [x]
end
with {:ok, state} <- state |> enum_reify,
{:ok, fun_pred} when is_function(fun_pred, 1) <-
state
|> create_predicate_index_range_enum(index_range) do
state
|> Stream.with_index()
|> Enum.reduce_while(
[],
fn {value, index}, state ->
{value, index}
|> fun_pred.()
|> case do
x when x in [nil, false] ->
{:cont, state ++ List.wrap(value)}
x when x in [true] ->
{:cont, state ++ entries ++ [value]}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
state when is_list(state) -> {:ok, state}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`delete_value_at_enum/3` takes an *enum*, and *index range*
and deletes the elements in the *index range*.
Note: If the *index range* is `nil`, the derived list is emptied returning `{:ok, []}`.
## Examples
iex> [1,2,3] |> delete_value_at_enum(0)
{:ok, [2,3]}
iex> [1,2,3] |> delete_value_at_enum(0 .. 2)
{:ok, []}
iex> [1,2,3] |> delete_value_at_enum(nil)
{:ok, []}
iex> [1,2,3] |> delete_value_at_enum([0, -1])
{:ok, [2]}
iex> [] |> delete_value_at_enum(0)
{:ok, []}
iex> %{a: 1, b: 2, c: 3} |> delete_value_at_enum(1)
{:ok, [a: 1, c: 3]}
iex> {:error, error} = 42 |> delete_value_at_enum(0)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
iex> {:error, error} = [1,2,3] |> delete_value_at_enum(:not_an_index)
...> error |> Exception.message
"index range invalid, got: :not_an_index"
"""
@since "0.1.0"
@spec delete_value_at_enum(any, any) :: {:ok, list} | {:error, error}
def delete_value_at_enum(derivable_list, index_range)
def delete_value_at_enum([], _index) do
{:ok, []}
end
def delete_value_at_enum(state, nil) when is_list(state) do
{:ok, []}
end
def delete_value_at_enum(state, index_range) do
with {:ok, state} <- state |> enum_reify,
{:ok, fun_pred} <- state |> create_predicate_index_range_enum(index_range) do
state
|> Stream.with_index()
|> Enum.reduce_while(
[],
fn {value, index}, state ->
{value, index}
|> fun_pred.()
|> case do
# keep the value
x when x in [nil, false] ->
{:cont, state ++ List.wrap(value)}
# drop the value
x when x in [true] ->
{:cont, state}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
state when is_list(state) -> {:ok, state}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`replace_value_at_enum/3` takes an *enum*, an *index range* and a new value.
The new value is "listified" by calling `List.wrap/1`.
For each *index* in the *index range*, it deletes the current value and
then splices the "listified" new value.
## Examples
iex> [1,2,3] |> replace_value_at_enum(0, :a)
{:ok, [:a,2,3]}
iex> [1,2,3] |> replace_value_at_enum(0 .. 2, :a)
{:ok, [:a, :a, :a]}
iex> [1,2,3] |> replace_value_at_enum([0, -1], :a)
{:ok, [:a, 2, :a]}
iex> [] |> replace_value_at_enum(0, :a)
{:ok, []}
iex> [1,2,3] |> replace_value_at_enum(nil, :a)
{:ok, [:a, :a, :a]}
iex> [1,2,3] |> replace_value_at_enum(nil, [:a, :b, :c])
{:ok, [:a, :b, :c, :a, :b, :c, :a, :b, :c]}
iex> %{a: 1, b: 2, c: 3} |> replace_value_at_enum(1, :x)
{:ok, [{:a, 1}, :x, {:c, 3}]}
iex> %{a: 1, b: 2, c: 3} |> replace_value_at_enum(1, [x: 10, y: 11, z: 12])
{:ok, [a: 1, x: 10, y: 11, z: 12, c: 3]}
iex> [1,:b,3] |> replace_value_at_enum(-1, :d)
{:ok, [1,:b,:d]}
iex> {:error, error} = 42 |> replace_value_at_enum(0, :a)
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for 42")
true
iex> {:error, error} = [1,2,3] |> replace_value_at_enum(:not_an_index, :a)
...> error |> Exception.message
"index range invalid, got: :not_an_index"
"""
@since "0.1.0"
@spec replace_value_at_enum(any, any, any) :: {:ok, list} | {:error, error}
def replace_value_at_enum(derivable_list, index_range, value)
def replace_value_at_enum([], _index, _value) do
{:ok, []}
end
def replace_value_at_enum(state, index_range, value) do
entries =
value
|> case do
x when is_list(x) -> x
x -> [x]
end
with {:ok, state} <- state |> enum_reify,
{:ok, fun_pred} <- state |> create_predicate_index_range_enum(index_range) do
state
|> Stream.with_index()
|> Enum.reduce_while(
[],
fn {value, index}, state ->
{value, index}
|> fun_pred.()
|> case do
x when x in [nil, false] ->
{:cont, state ++ List.wrap(value)}
x when x in [true] ->
{:cont, state ++ entries}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
state when is_list(state) -> {:ok, state}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`fetch_value_at_enum/2` takes a *derivable list* and an *index range*, and
returns the `value` at each index as `{:ok, values}`.
An unknown or invalid *index* will cause an error.
Values are returned in the same order as the *index range*. Indices may be
repeated.
If the *index range* is `nil`, all values will be returned.
## Examples
iex> [1,2,3] |> fetch_value_at_enum
{:ok, [1, 2, 3]}
iex> [1,2,3] |> fetch_value_at_enum(0)
{:ok, [1]}
iex> {:error, error} = [] |> fetch_value_at_enum(0)
...> error |> Exception.message
"index invalid, got: 0"
iex> [1,2,3] |> fetch_value_at_enum(1 .. 2)
{:ok, [2, 3]}
iex> [1,2,3] |> fetch_value_at_enum([2, 2, 2])
{:ok, [3, 3, 3]}
iex> [1,2,3] |> fetch_value_at_enum([1 .. 2, 0, 0 .. 2])
{:ok, [2, 3, 1, 1, 2, 3]}
iex> {:error, error} = [1,2,3] |> fetch_value_at_enum(99)
...> error |> Exception.message
"index invalid, got: 99"
iex> {:error, error} = [1,2,3] |> fetch_value_at_enum([99, 123])
...> error |> Exception.message
"indices invalid, got: [99, 123]"
iex> {:error, error} = [1,2,3] |> fetch_value_at_enum([:not_an_index, 99])
...> error |> Exception.message
"index invalid, got: :not_an_index"
"""
@since "0.1.0"
@spec fetch_value_at_enum(any, any) :: {:ok, list} | {:error, error}
def fetch_value_at_enum(derivable_list, index_range \\ nil)
def fetch_value_at_enum(state, nil) do
{:ok, state}
end
def fetch_value_at_enum(state, index_range) do
with {:ok, state} <- state |> enum_reify,
{:ok, range_indices} <- state |> normalise_index_range_enum(index_range) do
{:ok, range_indices |> Enum.map(fn index -> state |> Enum.at(index) end)}
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc ~S"""
`get_value_at_enum/2` takes a *derivable list*, an *index range*, and a default value.
For each *index* in the *index range* it checks if the *index* is in the list and gets its value if so; otherwise the default is used.
It returns `{:ok, values}`.
Values are returned in the same order as the *index range*. Indices may be repeated.
If the *index range* is `nil`, all values will be returned.
> Note there is no constraint on (size of) the *index range*; all unknown *indices* will use the default and could cause the `values` to be a large list.
## Examples
iex> [1,2,3] |> get_value_at_enum
{:ok, [1, 2, 3]}
iex> [1,2,3] |> get_value_at_enum(0)
{:ok, [1]}
iex> [] |> get_value_at_enum(0, 42)
{:ok, [42]}
iex> [1,2,3] |> get_value_at_enum(99, 42)
{:ok, [42]}
iex> [1,2,3] |> get_value_at_enum([0, 3, 4, -1], 42)
{:ok, [1, 42, 42, 3]}
iex> [1,2,3] |> get_value_at_enum(1 .. 2)
{:ok, [2, 3]}
iex> [1,2,3] |> get_value_at_enum([2, 2, 2])
{:ok, [3, 3, 3]}
iex> [1,2,3] |> get_value_at_enum([1 .. 2, 0, 0 .. 2], 42)
{:ok, [2, 3, 1, 1, 2, 3]}
iex> {:error, error} = [1,2,3] |> get_value_at_enum([:not_an_index, 99])
...> error |> Exception.message
"index invalid, got: :not_an_index"
"""
@since "0.1.0"
@spec get_value_at_enum(any, any, any) :: {:ok, list} | {:error, error}
def get_value_at_enum(derivable_list, index_range \\ nil, default \\ nil)
def get_value_at_enum(state, nil, _default) do
{:ok, state}
end
def get_value_at_enum(state, index_range, default) do
with {:ok, state} <- state |> enum_reify,
{:ok, range_indices} <- index_range |> normalise_index_range do
state_map = state |> Stream.with_index() |> Map.new(fn {v, i} -> {i, v} end)
range_indices
|> Enum.reduce(
[],
fn index, values ->
state
|> validate_index_enum(index)
|> case do
{:ok, index} ->
[Map.get(state_map, index) | values]
{:error, %{__struct__: _}} ->
[default | values]
end
end
)
|> case do
indices ->
{:ok, indices |> Enum.reverse()}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
end
|
lib/funcio/enum/value_at/value_at.ex
| 0.857768 | 0.499207 |
value_at.ex
|
starcoder
|
defmodule BoggleEngine.Board do
@moduledoc """
Functions to create and interact with Boggle board.
"""
alias BoggleEngine.Board
alias BoggleEngine.Board.DiceSet
alias BoggleEngine.Neighbor
alias BoggleEngine.Utilities
@boggle_set "../../resource/boggle.txt" |> Path.expand(__DIR__) |> DiceSet.from_file()
@big_boggle_set "../../resource/big_boggle.txt" |> Path.expand(__DIR__) |> DiceSet.from_file()
@super_big_boggle_set "../../resource/super_big_boggle.txt" |> Path.expand(__DIR__) |> DiceSet.from_file()
@dice_sets %{boggle: @boggle_set, big_boggle: @big_boggle_set, super_big_boggle: @super_big_boggle_set}
@board_sizes %{boggle: 4, big_boggle: 5, super_big_boggle: 6}
defstruct [:configuration, :version]
@type t :: %__MODULE__{
configuration: %{required(position) => tile},
version: version
}
@type version :: :boggle | :big_boggle | :super_big_boggle
@type tile :: String.grapheme | String.t
@type position :: Neighbor.position
@type size :: 4 | 5 | 6
@doc """
Creates a random board based on version.
"""
@spec new_board(version) :: t
def new_board(version) do
version
|> roll_dice()
|> from_list(version)
end
@doc """
Creates a board from configuration string. Each board position is defined by
an uppercase letter followed by optional lowercase letters. Configuration will
be truncated if longer than board. Configuration will have blanks appended if
shorter than board.
"""
@spec from_string(String.t, version) :: t
def from_string(string, version) do
string
|> Utilities.chunk_string_on_uppercase()
|> from_list(version)
end
@doc """
Creates a board from configuration list of strings. Configuration will be
truncated if longer than board. Configuration will have blanks appended if
shorter than board.
"""
@spec from_list([tile], version) :: t
def from_list(list, version) do
size = get_size(version)
configuration =
list
|> Utilities.fit_list(size * size, "#")
|> Utilities.list_to_map_with_index()
%Board{configuration: configuration, version: version}
end
@doc """
Gets board configuration as a string.
"""
@spec to_string(t) :: String.t
def to_string(board = %Board{}) do
board
|> to_list()
|> List.to_string()
end
@doc """
Gets board configuration as a list of strings.
"""
@spec to_list(t) :: [tile]
def to_list(%Board{configuration: configuration, version: version}) do
size = get_size(version)
for position <- 0..(size * size - 1) do
configuration[position]
end
end
@doc """
Gets value based on board position.
"""
@spec get_value(t, position) :: tile
def get_value(%Board{configuration: configuration}, position) do
configuration[position]
end
@doc """
Gets board size.
"""
@spec get_size(t | version) :: size
def get_size(%Board{version: version}) do
get_size(version)
end
def get_size(version) do
@board_sizes[version]
end
@doc """
Gets version.
"""
@spec get_version(t) :: version
def get_version(%Board{version: version}) do
version
end
# Creates a random configuration based on version.
@spec roll_dice(version) :: [tile]
defp roll_dice(version) do
@dice_sets[version]
|> Enum.map(&Enum.random/1)
|> Enum.shuffle()
end
@doc """
Verifies board is a valid Boggle game. Accepts `%Board{}`, string, or list of
strings.
"""
@spec valid_board?(t | String.t | [tile]) :: boolean
def valid_board?(board = %Board{}) do
board
|> to_list()
|> valid_board?()
end
def valid_board?(string) when is_binary(string) do
string
|> Utilities.chunk_string_on_uppercase()
|> valid_board?()
end
def valid_board?(list) when is_list(list) do
length = length(list)
if length not in [16, 25, 36] do
false
else
version =
case length do
16 -> :boggle
25 -> :big_boggle
36 -> :super_big_boggle
end
dice =
@dice_sets[version]
|> Enum.map(&MapSet.new/1)
verify_configuration(list, dice, length)
end
end
# Verifies configuration is valid by determining whether there are any
# mismatches between configuration and dice.
@spec verify_configuration([tile], [tile], integer) :: boolean
defp verify_configuration(configuration, dice, length) do
# Runs matching problem in a separate process to avoid manually handling
# garbage collection.
{matches, _flow_details} =
Task.async(fn -> match_faces_to_dice(configuration, dice, length) end)
|> Task.await()
# Configuration is only valid if there are no mismatches.
matches == length
end
# Generates list of maximum matches of faces and dice. This problem can be
# structured as a bipartite graph and solved by finding the maximum flow.
@spec match_faces_to_dice([tile], [tile], integer) :: :graph_lib.flow | (error :: {charlist, charlist})
defp match_faces_to_dice(faces, dice, length) do
graph = build_base_graph(length)
# Adds an edge to graph for each face/die combination where face is on die.
for {face, face_index} <- Enum.with_index(faces),
{die, die_index} <- Enum.with_index(dice),
MapSet.member?(die, face) do
face_vertex = "f" <> Utilities.integer_to_string_with_padding(face_index + 1, 2)
die_vertex = "d" <> Utilities.integer_to_string_with_padding(die_index + 1, 2)
:graph.add_edge(graph, face_vertex, die_vertex)
end
# Solves for maximum flow. `:dfs` mode will use Ford-Fulkerson algorithm.
:edmonds_karp.run(graph, "source", "sink", :dfs)
end
# Builds base graph specified by length.
@spec build_base_graph(integer) :: :graph.graph
defp build_base_graph(length) do
# Creates empty graph, adds source vertex, and adds sink vertex
graph = :graph.empty(:directed, :d)
:graph.add_vertex(graph, "source")
:graph.add_vertex(graph, "sink")
# Adds face vertices, die vertices, source to face edges, and die to sink
# edges.
for index <- 1..length do
formatted_index = Utilities.integer_to_string_with_padding(index, 2)
face_vertex = "f" <> formatted_index
die_vertex = "d" <> formatted_index
:graph.add_vertex(graph, face_vertex)
:graph.add_vertex(graph, die_vertex)
:graph.add_edge(graph, "source", face_vertex)
:graph.add_edge(graph, die_vertex, "sink")
end
graph
end
end
|
lib/boggle_engine/board.ex
| 0.890288 | 0.581125 |
board.ex
|
starcoder
|
defmodule Coxir.Storage.Default do
@moduledoc """
Stores models in ETS.
"""
use Coxir.Storage
use GenServer
@server __MODULE__
@table __MODULE__
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: @server)
end
def init(state) do
:ets.new(@table, [:named_table, :protected, {:read_concurrency, true}])
{:ok, state}
end
def handle_call({:create_table, model}, _from, state) do
table =
with nil <- lookup_table(model) do
table = :ets.new(model, [:public])
:ets.insert(@table, {model, table})
table
end
{:reply, table, state}
end
def put(%model{} = struct) do
table = get_table(model)
key = get_key(struct)
struct =
case :ets.lookup(table, key) do
[record] ->
stored = from_record(model, record)
merge(stored, struct)
_none ->
struct
end
record = to_record(struct)
:ets.insert(table, record)
struct
end
def all(model) do
model
|> get_table()
|> :ets.tab2list()
|> Enum.map(&from_record(model, &1))
end
def all_by(model, clauses) do
pattern = get_pattern(model, clauses)
model
|> get_table()
|> :ets.match_object(pattern)
|> Enum.map(&from_record(model, &1))
end
def get(model, key) do
record =
model
|> get_table()
|> :ets.lookup(key)
|> List.first()
if record do
from_record(model, record)
end
end
def get_by(model, clauses) do
table = get_table(model)
pattern = get_pattern(model, clauses)
case :ets.match_object(table, pattern, 1) do
{[record], _continuation} ->
from_record(model, record)
_other ->
nil
end
end
def delete(model, key) do
model
|> get_table()
|> :ets.delete(key)
:ok
end
def delete_by(model, clauses) do
pattern = get_pattern(model, clauses)
model
|> get_table()
|> :ets.match_delete(pattern)
:ok
end
defp get_pattern(model, clauses) do
fields = get_fields(model)
pattern =
Enum.map(
fields,
fn name ->
Keyword.get(clauses, name, :_)
end
)
List.to_tuple([:_ | pattern])
end
defp to_record(struct) do
key = get_key(struct)
values = get_values(struct)
List.to_tuple([key | values])
end
defp from_record(model, record) do
[_key | values] = Tuple.to_list(record)
fields = get_fields(model)
params = Enum.zip(fields, values)
struct(model, params)
end
defp get_table(model) do
with nil <- lookup_table(model) do
GenServer.call(@server, {:create_table, model})
end
end
defp lookup_table(model) do
case :ets.lookup(@table, model) do
[{_model, table}] ->
table
_none ->
nil
end
end
end
|
lib/coxir/adapters/storage/default.ex
| 0.641198 | 0.414958 |
default.ex
|
starcoder
|
defmodule PartitionSupervisor do
@moduledoc """
A supervisor that starts multiple partitions of the same child.
Certain processes may become bottlenecks in large systems.
If those processes can have their state trivially partitioned,
in a way there is no dependency between them, then they can use
the `PartitionSupervisor` to create multiple isolated and
independent partitions.
Once the `PartitionSupervisor` starts, you can dispatch to its
children using `{:via, PartitionSupervisor, {name, key}}`, where
`name` is the name of the `PartitionSupervisor` and key is used
for routing.
## Example
The `DynamicSupervisor` is a single process responsible for starting
other processes. In some applications, the `DynamicSupervisor` may
become a bottleneck. To address this, you can start multiple instances
of the `DynamicSupervisor` through a `PartitionSupervisor`, and then
pick a "random" instance to start the child on.
Instead of starting a single `DynamicSupervisor`:
children = [
{DynamicSupervisor, name: MyApp.DynamicSupervisor}
]
Supervisor.start_link(children, strategy: :one_for_one)
and starting children on that dynamic supervisor directly:
DynamicSupervisor.start_child(MyApp.DynamicSupervisor, {Agent, fn -> %{} end})
You can do start the dynamic supervisors under a `PartitionSupervisor`:
children = [
{PartitionSupervisor,
child_spec: DynamicSupervisor,
name: MyApp.DynamicSupervisors}
]
Supervisor.start_link(children, strategy: :one_for_one)
and then:
DynamicSupervisor.start_child(
{:via, PartitionSupervisor, {MyApp.DynamicSupervisors, self()}},
{Agent, fn -> %{} end}
)
In the code above, we start a partition supervisor that will by default
start a dynamic supervisor for each core in your machine. Then, instead
of calling the `DynamicSupervisor` by name, you call it through the
partition supervisor using the `{:via, PartitionSupervisor, {name, key}}`
format. We picked `self()` as the routing key, which means each process
will be assigned one of the existing dynamic supervisors. See `start_link/1`
to see all options supported by the `PartitionSupervisor`.
## Implementation notes
The `PartitionSupervisor` uses either an ETS table or a `Registry` to
manage all of the partitions. Under the hood, the `PartitionSupervisor`
generates a child spec for each partition and then acts as a regular
supervisor. The ID of each child spec is the partition number.
For routing, two strategies are used. If `key` is an integer, it is routed
using `rem(abs(key), partitions)` where `partitions` is the number of
partitions. Otherwise it uses `:erlang.phash2(key, partitions)`.
The particular routing may change in the future, and therefore must not
be relied on. If you want to retrieve a particular PID for a certain key,
you can use `GenServer.whereis({:via, PartitionSupervisor, {name, key}})`.
"""
@behaviour Supervisor
@registry PartitionSupervisor.Registry
@typedoc """
The name of the `PartitionSupervisor`.
"""
@type name :: atom() | {:via, module(), term()}
@doc false
def child_spec(opts) when is_list(opts) do
id =
case Keyword.get(opts, :name, DynamicSupervisor) do
name when is_atom(name) -> name
{:via, _module, name} -> name
end
%{
id: id,
start: {PartitionSupervisor, :start_link, [opts]},
type: :supervisor
}
end
@doc """
Starts a partition supervisor with the given options.
This function is typically not invoked directly, instead it is invoked
when using a `PartitionSupervisor` as a child of another supervisor:
children = [
{PartitionSupervisor, child_spec: SomeChild, name: MyPartitionSupervisor}
]
If the supervisor is successfully spawned, this function returns
`{:ok, pid}`, where `pid` is the PID of the supervisor. If the given name
for the partition supervisor is already assigned to a process,
the function returns `{:error, {:already_started, pid}}`, where `pid`
is the PID of that process.
Note that a supervisor started with this function is linked to the parent
process and exits not only on crashes but also if the parent process exits
with `:normal` reason.
## Options
* `:name` - an atom or via tuple representing the name of the partition
supervisor (see `t:name/0`).
* `:partitions` - a positive integer with the number of partitions.
Defaults to `System.schedulers_online()` (typically the number of cores).
* `:strategy` - the restart strategy option, defaults to `:one_for_one`.
You can learn more about strategies in the `Supervisor` module docs.
* `:max_restarts` - the maximum number of restarts allowed in
a time frame. Defaults to `3`.
* `:max_seconds` - the time frame in which `:max_restarts` applies.
Defaults to `5`.
* `:with_arguments` - a two-argument anonymous function that allows
the partition to be given to the child starting function. See the
`:with_arguments` section below.
## `:with_arguments`
Sometimes you want each partition to know their partition assigned number.
This can be done with the `:with_arguments` option. This function receives
the list of arguments of the child specification and the partition. It
must return a new list of arguments that will be passed to the child specification
of children.
For example, most processes are started by calling `start_link(opts)`,
where `opts` is a keyword list. You could inject the partition into the
options given to the child:
with_arguments: fn [opts], partition ->
[Keyword.put(opts, :partition, partition)]
end
"""
@doc since: "1.14.0"
@spec start_link(keyword) :: Supervisor.on_start()
def start_link(opts) when is_list(opts) do
name = opts[:name]
unless name do
raise ArgumentError, "the :name option must be given to PartitionSupervisor"
end
{child_spec, opts} = Keyword.pop(opts, :child_spec)
unless child_spec do
raise ArgumentError, "the :child_spec option must be given to PartitionSupervisor"
end
{partitions, opts} = Keyword.pop(opts, :partitions, System.schedulers_online())
unless is_integer(partitions) and partitions >= 1 do
raise ArgumentError,
"the :partitions option must be a positive integer, got: #{inspect(partitions)}"
end
{with_arguments, opts} = Keyword.pop(opts, :with_arguments, fn args, _partition -> args end)
unless is_function(with_arguments, 2) do
raise ArgumentError,
"the :with_arguments option must be a function that receives two arguments, " <>
"the current call arguments and the partition, got: #{inspect(with_arguments)}"
end
%{start: {mod, fun, args}} = map = Supervisor.child_spec(child_spec, [])
modules = map[:modules] || [mod]
children =
for partition <- 0..(partitions - 1) do
args = with_arguments.(args, partition)
unless is_list(args) do
raise "the call to the function in :with_arguments must return a list, got: #{inspect(args)}"
end
start = {__MODULE__, :start_child, [mod, fun, args, name, partition]}
Map.merge(map, %{id: partition, start: start, modules: modules})
end
{init_opts, start_opts} = Keyword.split(opts, [:strategy, :max_seconds, :max_restarts])
Supervisor.start_link(__MODULE__, {name, partitions, children, init_opts}, start_opts)
end
@doc false
def start_child(mod, fun, args, name, partition) do
case apply(mod, fun, args) do
{:ok, pid} ->
register_child(name, partition, pid)
{:ok, pid}
{:ok, pid, info} ->
register_child(name, partition, pid)
{:ok, pid, info}
other ->
other
end
end
defp register_child(name, partition, pid) when is_atom(name) do
:ets.insert(name, {partition, pid})
end
defp register_child({:via, _, _}, partition, pid) do
Registry.register(@registry, {self(), partition}, pid)
end
@impl true
def init({name, partitions, children, init_opts}) do
init_partitions(name, partitions)
Supervisor.init(children, Keyword.put_new(init_opts, :strategy, :one_for_one))
end
defp init_partitions(name, partitions) when is_atom(name) do
:ets.new(name, [:set, :named_table, :protected, read_concurrency: true])
:ets.insert(name, {:partitions, partitions})
end
defp init_partitions({:via, _, _}, partitions) do
child_spec = {Registry, keys: :unique, name: @registry}
unless Process.whereis(@registry) do
Supervisor.start_child(:elixir_sup, child_spec)
end
Registry.register(@registry, self(), partitions)
end
@doc """
Returns the number of partitions for the partition supervisor.
"""
@doc since: "1.14.0"
@spec partitions(name()) :: pos_integer()
def partitions(name) do
{_name, partitions} = name_partitions(name)
partitions
end
# For whereis_name, we want to lookup on GenServer.whereis/1
# just once, so we lookup the name and partitions together.
defp name_partitions(name) when is_atom(name) do
try do
{name, :ets.lookup_element(name, :partitions, 2)}
rescue
_ -> exit({:noproc, {__MODULE__, :partitions, [name]}})
end
end
defp name_partitions(name) when is_tuple(name) do
with pid when is_pid(pid) <- GenServer.whereis(name),
[name_partitions] <- Registry.lookup(@registry, pid) do
name_partitions
else
_ -> exit({:noproc, {__MODULE__, :partitions, [name]}})
end
end
@doc """
Returns a list with information about all children.
This function returns a list of tuples containing:
* `id` - the partition number
* `child` - the PID of the corresponding child process or the
atom `:restarting` if the process is about to be restarted
* `type` - `:worker` or `:supervisor` as defined in the child
specification
* `modules` - as defined in the child specification
"""
@doc since: "1.14.0"
@spec which_children(name()) :: [
# Inlining [module()] | :dynamic here because :supervisor.modules() is not exported
{:undefined, pid | :restarting, :worker | :supervisor, [module()] | :dynamic}
]
def which_children(name) when is_atom(name) or elem(name, 0) == :via do
Supervisor.which_children(name)
end
@doc """
Returns a map containing count values for the supervisor.
The map contains the following keys:
* `:specs` - the number of partitions (children processes)
* `:active` - the count of all actively running child processes managed by
this supervisor
* `:supervisors` - the count of all supervisors whether or not the child
process is still alive
* `:workers` - the count of all workers, whether or not the child process
is still alive
"""
@doc since: "1.14.0"
@spec count_children(name()) :: %{
specs: non_neg_integer,
active: non_neg_integer,
supervisors: non_neg_integer,
workers: non_neg_integer
}
def count_children(supervisor) when is_atom(supervisor) do
Supervisor.count_children(supervisor)
end
@doc """
Synchronously stops the given partition supervisor with the given `reason`.
It returns `:ok` if the supervisor terminates with the given
reason. If it terminates with another reason, the call exits.
This function keeps OTP semantics regarding error reporting.
If the reason is any other than `:normal`, `:shutdown` or
`{:shutdown, _}`, an error report is logged.
"""
@doc since: "1.14.0"
@spec stop(name(), reason :: term, timeout) :: :ok
def stop(supervisor, reason \\ :normal, timeout \\ :infinity) when is_atom(supervisor) do
Supervisor.stop(supervisor, reason, timeout)
end
## Via callbacks
@doc false
def whereis_name({name, key}) when is_atom(name) or is_tuple(name) do
{name, partitions} = name_partitions(name)
partition =
if is_integer(key), do: rem(abs(key), partitions), else: :erlang.phash2(key, partitions)
whereis_name(name, partition)
end
defp whereis_name(name, partition) when is_atom(name) do
:ets.lookup_element(name, partition, 2)
end
defp whereis_name(name, partition) when is_pid(name) do
@registry
|> Registry.values({name, partition}, name)
|> List.first(:undefined)
end
@doc false
def send(name_key, msg) do
Kernel.send(whereis_name(name_key), msg)
end
@doc false
def register_name(_, _) do
raise "{:via, PartitionSupervisor, _} cannot be given on registration"
end
@doc false
def unregister_name(_, _) do
raise "{:via, PartitionSupervisor, _} cannot be given on unregistration"
end
end
|
lib/elixir/lib/partition_supervisor.ex
| 0.923575 | 0.778565 |
partition_supervisor.ex
|
starcoder
|
defmodule Numeracy.Precision do
@moduledoc """
Algorithms for computing different measures of precision
"""
import Numeracy.BasicMath
@scales [1.25, 2, 2.5, 4, 5, 7.5, 8, 10]
@semi_integer_scales [2, 2.5, 4, 5, 7.5, 8, 10]
@integer_scales [2, 4, 5, 8, 10]
@doc """
Radix - the base that your computer is in
"""
@spec radix :: integer
def radix do
a = 1.0
a = compute_radix_helper(a, 0.0)
b = 1.0
compute_radix(a, b, 0)
end
@spec compute_radix_helper(float, float) :: float
defp compute_radix_helper(a, tmp2) when tmp2 - 1.0 == 0.0, do: a
defp compute_radix_helper(a, _) do
a = a + a
tmp1 = a + 1.0
tmp2 = tmp1 - a
compute_radix_helper(a, tmp2)
end
@spec compute_radix(float, float, integer) :: integer
defp compute_radix(_, _, radix) when radix != 0, do: radix
defp compute_radix(a, b, _) do
b = b + b
tmp = a + b
compute_radix(a, b, round(tmp - a))
end
@doc """
Machine precision - the smallest increment between two successive floating point numbers
"""
@spec machine_precision(integer) :: float
def machine_precision(radix) do
inverse_radix = 1.0 / radix
machine_precision = 1.0
acc = 1.0 + machine_precision
compute_machine_precision(machine_precision, inverse_radix, acc)
end
@spec machine_precision :: float
def machine_precision, do: machine_precision(radix())
@spec compute_machine_precision(float, float, float) :: float
defp compute_machine_precision(machine_precision, _, acc) when acc - 1.0 == 0.0, do: machine_precision
defp compute_machine_precision(machine_precision, inverse_radix, _) do
machine_precision = machine_precision * inverse_radix
acc = 1.0 + machine_precision
compute_machine_precision(machine_precision, inverse_radix, acc)
end
@doc """
Negative machine precision - the smallest decrement between two successive floating point numbers
"""
@spec negative_machine_precision(integer) :: float
def negative_machine_precision(radix) do
inverse_radix = 1.0 / radix
machine_precision = 1.0
acc = 1.0 - machine_precision
compute_negative_machine_precision(machine_precision, inverse_radix, acc)
end
@spec compute_negative_machine_precision(float, float, float) :: float
defp compute_negative_machine_precision(machine_precision, inverse_radix, _) do
machine_precision = machine_precision * inverse_radix
acc = 1.0 - machine_precision
compute_machine_precision(machine_precision, inverse_radix, acc)
end
@doc """
Smallest number - the smallest representable positive number
"""
@spec smallest_number(integer, float) :: float
def smallest_number(radix, negative_machine_precision) do
inverse_radix = 1.0 / radix
full_mantissa_number = 1.0 - radix * negative_machine_precision
compute_smallest_number(full_mantissa_number, inverse_radix)
end
@spec compute_smallest_number(float, float) :: float
defp compute_smallest_number(full_mantissa_number, inverse_radix) when full_mantissa_number * inverse_radix == 0.0 do
full_mantissa_number
end
defp compute_smallest_number(full_mantissa_number, inverse_radix) do
compute_smallest_number(full_mantissa_number * inverse_radix, inverse_radix)
end
@doc """
Largest number - the largest representable positive number
"""
@spec largest_number(integer, float) :: float
def largest_number(radix, negative_machine_precision) do
full_mantissa_number = 1.0 - radix * negative_machine_precision
compute_largest_number(full_mantissa_number, radix)
end
@spec largest_number :: float
def largest_number do
radix = radix()
largest_number(radix, negative_machine_precision(radix))
end
@spec compute_largest_number(float, integer) :: float
defp compute_largest_number(full_mantissa_number, radix) do
try do
compute_largest_number(full_mantissa_number * radix, radix)
rescue
ArithmeticError ->
full_mantissa_number
end
end
@doc """
Largest exponential argument - raising a number > 1 to a power greater than this will exceed bounds
"""
@spec largest_exponential_argument(float) :: float
def largest_exponential_argument(largest_number), do: :math.log(largest_number)
@spec largest_exponential_argument :: float
def largest_exponential_argument, do: largest_exponential_argument(largest_number())
@doc """
Default precision - the relative precision that can be expected of a generic mathematical computation
"""
@spec default_precision(float) :: float
def default_precision(machine_precision), do: :math.sqrt(machine_precision)
@spec default_precision :: float
def default_precision, do: default_precision(machine_precision())
@doc """
Determine if numbers are within a given precision
"""
@spec equal(number, number, float) :: boolean
def equal(a, b, precision) do
norm = max(abs(a), abs(b))
norm < precision || abs(a - b) < precision * norm
end
@doc """
Determine if numbers are within the default precision
"""
@spec equal(number, number) :: boolean
def equal(a, b) do
precision = default_precision()
equal(a, b, precision)
end
@doc """
Perform float rounding to a specified precision
"""
@spec round_to(number, float) :: float
def round_to(value, scale) when scale > 0 and scale < 1 do
round(value / scale) * scale
end
end
|
lib/numeracy/precision.ex
| 0.832066 | 0.738363 |
precision.ex
|
starcoder
|
defmodule StateServer do
@moduledoc """
The StateServer is a module you can use in your modules to implement a simple GenServer
without the need of writting all that GenServer's boilerplate.any()
### Example
defmodule MyModule do
# make this module a GenServer and import some macros to define calls and casts.
use StateServer
# define the init block of the GenServer.
init params do
# this will become the state of our GenServer
fn(params) ->
# cast or prepare the initial state, based on the given params
params
end
end
# define the function `set_value(pid, params)` and the corresponding
# `handle_cast` function.
defcast set_value do
fn(state, [key, value] = _params) ->
# modify the state, based on params and return the new state
Map.put(state, key, value)
end
end
# define the function `get_value(pid, key, default)` and the corresponding
# `handle_call` function
defcall get_value do
fn(state, [key, default]) ->
Map.get(state,key) || default
end
end
end
{:ok, person_pid} = MyModule.start_link(name: "Alex", born: 1997)
MyModule.get_value(person_pid, :name)
# "Alex"
MyModule.set_value(person_pid, [:name, "Alexis"])
MyModule.state(person_pid)
# [ name: "Alexis", born: 1997 ]
By `use StateServer` you will get the default `GenServer`s `start_link/1` function.
We also add a function `state/1` and a corresponding `handle_call` to return the
current state.
Use the macro `definit` to define the code for the `GenServer`'s `init/1` and the
macros `defcall` and `defcast` to define pairs of a function and the corresponding
handle_call or handle_cast.
`handle_call` will never change the state where `handle_cast` does change the state
only. (querry, command separation.)
"""
defmacro __using__(_opts \\ []) do
quote do
use GenServer
import unquote(__MODULE__)
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts)
end
def state(pid) do
GenServer.call(pid, :state)
end
def handle_call(:state, _, state), do: {:reply, state, state}
end
end
defmacro definit(_args \\ [], do: blk) do
quote do
def init(params) do
{:ok, unquote(blk).(params)}
end
end
end
defmacro defcall(fun_name, do: blk) do
quote do
def unquote(fun_name)(pid, params \\ nil) do
GenServer.call(pid, {unquote(fun_name), params})
end
def handle_call({unquote(fun_name), params}, _, state) do
{:reply, unquote(blk).(state, params), state}
end
end
end
defmacro defcast(fun_name, do: blk) do
quote do
def unquote(fun_name)(pid, params \\ nil) do
GenServer.cast(pid, {unquote(fun_name), params})
end
def handle_cast({unquote(fun_name), params}, state) do
state = unquote(blk).(state, params)
{:noreply, state}
end
end
end
end
|
lib/state_server.ex
| 0.734215 | 0.628194 |
state_server.ex
|
starcoder
|
defmodule Deck do
@moduledoc """
Get, shuffle and deal a deck of playing cards of any size (from 4 to 52 cards).
"""
import Deck.Guards, only: [is_valid_number_of_cards: 1]
# T = 10, J = Jack, Q = Queen, K = King, A = Ace
@ranks ~w[2 3 4 5 6 7 8 9 T J Q K A]
# Clubs, diamonds, hearts, spades
@suits ~w[c d h s]
# A full deck of cards ordered by rank
@deck for rank <- @ranks, suit <- @suits, do: "#{rank}#{suit}"
@doc """
Returns a full deck of cards (52 cards) ordered by rank.
## Examples
iex> Deck.new()
["2c", "2d", "2h", "2s", "3c", "3d", "3h", "3s", "4c", "4d", "4h", "4s",
"5c", "5d", "5h", "5s", "6c", "6d", "6h", "6s", "7c", "7d", "7h", "7s",
"8c", "8d", "8h", "8s", "9c", "9d", "9h", "9s", "Tc", "Td", "Th", "Ts",
"Jc", "Jd", "Jh", "Js", "Qc", "Qd", "Qh", "Qs", "Kc", "Kd", "Kh", "Ks",
"Ac", "Ad", "Ah", "As"]
"""
def new do
@deck
end
@doc """
Returns a deck containing high `n_cards` ordered by rank.
`n_cards` must be less than or equal 52 and must be divisible by 4.
## Examples
iex> Deck.new(12)
["Qc", "Qd", "Qh", "Qs", "Kc", "Kd", "Kh", "Ks", "Ac", "Ad", "Ah", "As"]
"""
def new(n_cards) when is_valid_number_of_cards(n_cards) do
new() |> Enum.take(-n_cards)
end
@doc """
Returns a deck containing low `n_cards` ordered by rank.
`n_cards` must be less than or equal 52 and must be divisible by 4.
## Examples
iex> Deck.new_low(12)
["2c", "2d", "2h", "2s", "3c", "3d", "3h", "3s", "4c", "4d", "4h", "4s"]
"""
def new_low(n_cards) when is_valid_number_of_cards(n_cards) do
new() |> Enum.take(n_cards)
end
@doc """
Returns a full shuffled deck of cards (52 cards).
## Examples
iex> Deck.shuffled() != Deck.new()
true
"""
def shuffled do
new() |> shuffle
end
@doc """
Returns a shuffled deck containing high `n_cards`.
`n_cards` must be less than or equal 52 and must be divisible by 4.
## Examples
iex> Deck.shuffled(36) != Deck.new(36)
true
"""
def shuffled(n_cards) when is_valid_number_of_cards(n_cards) do
n_cards |> new |> shuffle
end
@doc """
Returns a shuffled deck containing low `n_cards`.
`n_cards` must be less than or equal 52 and must be divisible by 4.
## Examples
iex> Deck.shuffled_low(36) != Deck.new_low(36)
true
"""
def shuffled_low(n_cards) when is_valid_number_of_cards(n_cards) do
n_cards |> new_low |> shuffle
end
@doc """
Shuffles the `deck` using the modern version of the Fisher–Yates shuffle algorithm.
## Examples
iex> deck = Deck.new()
...> shuffled_deck = Deck.shuffle(deck)
...> shuffled_deck != deck
true
iex> Deck.shuffle(shuffled_deck) != Deck.shuffle(shuffled_deck)
true
"""
def shuffle(deck) when is_list(deck) do
do_shuffle(deck, length(deck), [])
end
defp do_shuffle([last_card | _empty_deck], 1, shuffled_deck), do: [last_card | shuffled_deck]
defp do_shuffle(deck, cards_left, shuffled_deck) do
random_card_index = :rand.uniform(cards_left) - 1
random_card = Enum.at(deck, random_card_index)
{current_last_card, deck_leftovers} = List.pop_at(deck, -1)
deck_leftovers
|> List.replace_at(random_card_index, current_last_card)
|> do_shuffle(cards_left - 1, [random_card | shuffled_deck])
end
@doc """
Returns `n_cards` from the `deck` and the rest of the `deck`.
## Example
iex> Deck.deal(Deck.new(8), 2)
{["Kc", "Kd"], ["Kh", "Ks", "Ac", "Ad", "Ah", "As"]}
"""
def deal(deck, n_cards) when is_list(deck) and is_integer(n_cards) and n_cards > 0 do
deck |> Enum.split(n_cards)
end
@doc """
"Burns" `n_cards` in the `deck` and returns the rest of the `deck`.
## Example
iex> Deck.burn(Deck.new(8), 6)
["Ah", "As"]
"""
def burn(deck, n_cards) when is_list(deck) and is_integer(n_cards) and n_cards > 0 do
deck |> Enum.drop(n_cards)
end
@doc """
Returns number of cards left in the `deck`.
## Example
iex> Deck.size(Deck.new(12))
12
"""
def size(deck) when is_list(deck) do
deck |> Enum.count()
end
end
|
lib/deck.ex
| 0.879509 | 0.511717 |
deck.ex
|
starcoder
|
defmodule TrainLoc.Vehicles.Validator do
@moduledoc """
Intended to validate the expected data ranges and
expected values that a vehicle is allowed to have.
"""
alias TrainLoc.Vehicles.Vehicle
@default_error {:error, :invalid_vehicle}
@block_trip_min_length 3
@doc """
Validates a vehicles to ensure expected values.
Any discrepancy between expected values and actual values results
in an error tuple `{:error, reason}` where `reason` is an atom.
A valid vehicle will result in an `:ok`.
"""
def validate(%Vehicle{} = veh) do
with :ok <- must_be_non_neg_int(veh, :vehicle_id),
:ok <- must_be_datetime(veh, :timestamp),
:ok <- must_be_string(veh, :block),
:ok <- must_not_be_blank(veh, :block),
:ok <- must_have_min_length(veh, :block),
:ok <- must_be_string(veh, :trip),
:ok <- must_not_be_blank(veh, :trip),
:ok <- must_have_min_length(veh, :trip),
:ok <- must_have_valid_latitude(veh),
:ok <- must_have_valid_longitude(veh),
:ok <- must_be_in_range(veh, :heading, 0..359),
:ok <- must_be_non_neg_int(veh, :speed) do
:ok
end
end
def validate(_other) do
{:error, :not_a_vehicle}
end
defp run_validation(veh, field, bool_func) when is_function(bool_func, 1) do
if veh |> Map.get(field) |> bool_func.() do
:ok
else
@default_error
end
end
def is_non_neg_int?(x) do
is_integer(x) and x >= 0
end
def is_blank?(""), do: true
def is_blank?(nil), do: true
def is_blank?(_), do: false
def is_not_blank?(x), do: !is_blank?(x)
def is_datetime?(%DateTime{}), do: true
def is_datetime?(_), do: false
def is_long_enough?(string)
when is_binary(string) and byte_size(string) >= @block_trip_min_length,
do: true
def is_long_enough?(_), do: false
def must_be_non_neg_int(veh, field) do
run_validation(veh, field, &is_non_neg_int?/1)
end
def must_be_string(veh, field) do
run_validation(veh, field, &is_binary/1)
end
def must_not_be_blank(veh, field) do
run_validation(veh, field, &is_not_blank?/1)
end
def must_have_min_length(veh, field) do
run_validation(veh, field, &is_long_enough?/1)
end
def must_be_datetime(veh, field) do
run_validation(veh, field, &is_datetime?/1)
end
def must_be_float(veh, field) do
run_validation(veh, field, &is_float/1)
end
def must_be_in_range(veh, field, _.._ = range) do
in_range? = fn x -> x in range end
run_validation(veh, field, in_range?)
end
@doc """
Validates the type and range value of a Vehicles latitude.
The southernmost station is Wickford Junction (41.5).
The northernmost station is Newburyport (42.8).
`nil` is also acceptable if we don't know where the vehicle is.
"""
def must_have_valid_latitude(%Vehicle{latitude: nil}) do
:ok
end
def must_have_valid_latitude(%Vehicle{latitude: lat})
when is_float(lat) and lat >= 41.5 and lat <= 42.8 do
:ok
end
def must_have_valid_latitude(_) do
@default_error
end
@doc """
Validates the type and range value of a Vehicles longitude.
The westernmost station is Wachusett (-72).
The easternmost station depends on whether the summer
CapeFLYER trains use vehicles that appear in this
feed - either Rockport (-70.6) or Hyannis (-70.25). In this
case, Hyannis (-70.25) was chosen because it is more permissive.
`nil` is also acceptable if we don't know where the vehicle is.
"""
def must_have_valid_longitude(%Vehicle{longitude: nil}) do
:ok
end
def must_have_valid_longitude(%Vehicle{longitude: long})
when is_float(long) and long >= -72.0 and long <= -70.25 do
:ok
end
def must_have_valid_longitude(_) do
@default_error
end
end
|
apps/train_loc/lib/train_loc/vehicles/validator.ex
| 0.792825 | 0.922622 |
validator.ex
|
starcoder
|
defmodule Paasaa do
@moduledoc """
Detects language by analyzing symbol patterns and trigram occurrence in a string.
"""
@script_expressions Paasaa.Data.fetch_script_expressions!()
@trigrams Paasaa.Data.fetch_trigrams!()
@languages Paasaa.Data.fetch_languages!()
@max_difference 300
@type options :: [
min_length: non_neg_integer(),
max_length: non_neg_integer(),
only: [String.t()],
ignore: [String.t()]
]
@doc """
Detects a language. Returns a `Paasaa.Language` struct.
## Options
- `:min_length` - If the text is shorter than `:min_length` it will return `:error`. Default: `10`.
- `:max_length` - Maximum length to analyze. Default: `2048`.
- `:only` - Use only this ISO-639-3 language codes for recognition. Default: `[]`.
- `:ignore` - Skip ISO-639-3 language codes from recognition. Default: `[]`.
## Examples
Detect a string:
iex> Paasaa.detect("Detect this!")
{:ok, %Paasaa.Language{
iso6393: "eng",
name: "English",
iso6391: "en",
iso6392B: "eng",
iso6392T: "eng",
scope: "individual",
type: "living"
}}
With the `:ignore` option:
iex> Paasaa.detect("Detect this!", ignore: ["eng"])
{:ok, %Paasaa.Language{
iso6391: nil,
iso6393: "sco",
name: "Scots",
iso6392B: "sco",
iso6392T: "sco",
scope: "individual",
type: "living"
}}
With the `:min_length` option:
iex> Paasaa.detect("Привет", min_length: 6)
{:ok, %Paasaa.Language{
iso6393: "rus",
name: "Russian",
iso6391: "ru",
iso6392B: "rus",
iso6392T: "rus",
scope: "individual",
type: "living"
}}
It returns `:error` for undetermined language:
iex> Paasaa.detect("1234567890")
:error
"""
@spec detect(string :: String.t(), options :: options()) :: {:ok, Paasaa.Language.t()} | :error
def detect(string, options \\ []) do
case list_language_probabilities(string, options) do
[{iso6393, _weight} | _] ->
{:ok, Map.fetch!(@languages, iso6393)}
[] ->
:error
end
end
@doc """
Detects a language. Returns a list of ISO-639-3 language codes and probability for each language.
Returns empty list if language can not be detected.
For list of available options see `detect/2` for details.
## Examples
Detect language and limit results to 5:
iex> Paasaa.list_language_probabilities("Detect this!") |> Enum.take(5)
[
{"eng", 1.0},
{"sco", 0.8668304668304668},
{"nob", 0.6054054054054054},
{"swe", 0.5921375921375922},
{"nno", 0.5518427518427518}
]
iex> Paasaa.list_language_probabilities(nil)
[]
iex> Paasaa.list_language_probabilities("")
[]
"""
@spec list_language_probabilities(string :: String.t(), options()) :: [
{language_iso6393_code :: String.t(), weight :: number()}
]
def list_language_probabilities(string, options \\ [])
def list_language_probabilities("", _), do: []
def list_language_probabilities(nil, _), do: []
def list_language_probabilities(string, options) do
min_length = Keyword.get(options, :min_length, 10)
max_length = Keyword.get(options, :max_length, 2048)
if String.length(string) < min_length do
[]
else
string = String.slice(string, 0, max_length)
language_probabilities(string, options)
end
end
@spec language_probabilities(string :: String.t(), options()) :: [
{language_iso6393_code :: String.t(), weight :: number()}
]
defp language_probabilities(string, options) do
{script, weight} = get_most_probable_script(string)
cond do
weight == 0 ->
[]
trigrams = Map.get(@trigrams, script) ->
string
|> get_clean_trigrams()
|> get_distances(trigrams, options)
|> normalize(string)
true ->
[{script, 1}]
end
end
@spec get_most_probable_script(string :: String.t()) ::
{script_or_language_iso6393_code :: String.t(), number()}
defp get_most_probable_script(string) do
string_length = String.length(string)
@script_expressions
|> Enum.map(fn {name, regexp} -> {name, get_occurrence(string, regexp, string_length)} end)
|> Enum.max_by(fn {_name, weight} -> weight end)
end
@spec get_occurrence(
string :: String.t(),
regexp :: Regex.t(),
string_length :: non_neg_integer()
) ::
float()
defp get_occurrence(string, regexp, string_length) do
Enum.count(Regex.scan(regexp, string)) / string_length
end
@spec get_distances([String.t()], Enumerable.t(), options()) :: [
{language_iso6393_code :: String.t(), weight :: number()}
]
defp get_distances(trigrams, languages, options) do
languages
|> filter_languages(options)
|> Enum.map(fn {language, model} -> {language, get_distance(trigrams, model)} end)
|> Enum.sort(&(elem(&1, 1) < elem(&2, 1)))
end
@spec get_distance([String.t()], Enumerable.t()) :: number
defp get_distance(trigrams, model) do
Enum.reduce(trigrams, 0, fn {name, val}, distance ->
distance +
if Map.has_key?(model, name) do
abs(val - model[name] - 1)
else
@max_difference
end
end)
end
@spec filter_languages([String.t()], Enumerable.t()) :: Enumerable.t()
defp filter_languages(languages, options) do
allow = Keyword.get(options, :only, [])
ignore = Keyword.get(options, :ignore, [])
if allow == [] and ignore == [] do
languages
else
Enum.filter(languages, fn {language, _weight} ->
language_allowed?(language, allow, ignore)
end)
end
end
defp language_allowed?(_language_or_script, [], []) do
true
end
defp language_allowed?(language_or_script, [], ignore) do
language_or_script not in ignore
end
defp language_allowed?(language_or_script, only, ignore) do
onlyed? = language_or_script in only
ignored? = language_or_script in ignore
onlyed? and not ignored?
end
@spec normalize([{script_or_language_iso6393_code :: String.t(), number()}], String.t()) :: [
{script_or_language_iso6393_code :: String.t(), number()}
]
defp normalize([], _str), do: []
defp normalize(distances, string) do
min = distances |> List.first() |> elem(1)
max = String.length(string) * @max_difference - min
Enum.map(distances, fn {lang, dist} ->
dist =
if max == 0 do
0
else
1 - (dist - min) / max
end
{lang, dist}
end)
end
@spec get_clean_trigrams(String.t()) :: [
{script_or_language_iso6393_code :: String.t(), number()}
]
defp get_clean_trigrams(string) do
string
|> clean()
|> pad()
|> n_grams()
|> Enum.reduce(%{}, fn trigram, acc ->
weight = (acc[trigram] && acc[trigram] + 1) || 1
Map.put(acc, trigram, weight)
end)
|> Map.to_list()
end
@spec clean(string :: String.t()) :: String.t()
defp clean(string) do
string
|> String.replace(~r/[\x{0021}-\x{0040}]+/u, " ", global: true)
|> String.replace(~r/\s+/u, " ", global: true)
|> String.trim()
|> String.downcase()
end
defp pad(string), do: " #{string} "
@spec n_grams(string :: String.t(), n :: number) :: [String.t()]
defp n_grams(string, n \\ 3) do
string
|> String.graphemes()
|> Enum.chunk_every(n, 1, :discard)
|> Enum.map(&Enum.join/1)
end
end
|
lib/paasaa.ex
| 0.902473 | 0.605653 |
paasaa.ex
|
starcoder
|
defmodule ClusterYASD.Strategy do
@moduledoc """
YASD Strategy for libcluster.
Use this module as strategy in your libcluster topologies.
```elixir
topologies = [
my_yasd: [
strategy: ClusterYASD.Strategy,
config: [
base_url: "http://yaasd:4001",
application_name: :my_app,
polling_interval: 10, # seconds
register_interval: 30, # seconds
immidiate_register: true
]
]
]
# Other than `base_url` all other configurations are optional.
```
And finally add it to your supervision tree.
```elixir
{Cluster.Supervisor, [topologies, [name: MyApp.ClusterSupervisor]]}
```
"""
use Cluster.Strategy
use GenServer
require Logger
@default_polling_interval 10
@default_register_interval 30
def start_link([state]) do
Keyword.fetch!(state.config, :base_url)
GenServer.start_link(__MODULE__, state)
end
@impl true
def init(%Cluster.Strategy.State{config: config} = state) do
[app_name, ip] = get_node_info()
app_name = Keyword.get(config, :application_name, app_name)
schedule_next_poll(state)
if Keyword.get(config, :immidiate_register, true) do
send(self(), :register)
end
{:ok, Map.put(state, :config, Keyword.merge(config, application_name: app_name, ip: ip))}
end
@impl true
def handle_info(:load, state) do
with {:ok, nodes} <- get_nodes(state),
:ok <-
Cluster.Strategy.connect_nodes(state.topology, state.connect, state.list_nodes, nodes) do
:ok
else
{:error, bad_nodes} ->
Logger.error("yasd cannot connect: #{inspect(bad_nodes)}")
end
schedule_next_poll(state)
{:noreply, state}
end
def handle_info(:register, state) do
register(state)
schedule_next_register(state)
{:noreply, state}
end
defp register(%{config: config}) do
base_url = Keyword.fetch!(config, :base_url)
app_name = Keyword.fetch!(config, :application_name)
ip = Keyword.fetch!(config, :ip)
url = Path.join(base_url, "/api/v1/service/#{app_name}/register?ip=#{ip}")
case :httpc.request(:put, {to_charlist(url), []}, [], []) do
{:ok, {{_v, s, _}, _headers, _body}} when s >= 200 and s < 300 ->
:ok
{:ok, {{_v, status, _}, _headers, body}} ->
Logger.error("yasd register error: #{status} -> #{body}")
{:error, :yasd_error}
error ->
Logger.error("yasd register httpc error: #{inspect(error)}")
{:error, :httpc_error}
end
end
defp get_nodes(%{config: config}) do
base_url = Keyword.fetch!(config, :base_url)
app_name = Keyword.fetch!(config, :application_name)
url = Path.join(base_url, "/api/v1/service/#{app_name}/nodes")
case :httpc.request(:get, {to_charlist(url), []}, [], []) do
{:ok, {{_v, s, _}, _headers, body}} when s >= 200 and s < 300 ->
nodes =
body
|> to_string
|> Jason.decode!()
|> parse_response(app_name)
{:ok, nodes}
{:ok, {{_v, status, _}, _headers, body}} ->
Logger.error("yasd error: #{status} -> #{body}")
{:error, :yasd_error}
error ->
Logger.error("yasd httpc error: #{inspect(error)}")
{:error, :httpc_error}
end
end
defp schedule_next_poll(state) do
Process.send_after(
self(),
:load,
Keyword.get(state.config, :polling_interval, @default_polling_interval) * 1000
)
end
defp schedule_next_register(state) do
Process.send_after(
self(),
:register,
Keyword.get(state.config, :register_interval, @default_register_interval) * 1000
)
end
defp parse_response(response, app_name) do
response
|> Enum.map(&"#{app_name}@#{&1}")
|> Enum.map(&String.to_atom(&1))
end
defp get_node_info do
Node.self()
|> to_string()
|> String.split("@")
end
end
|
lib/cluster_yasd/strategy.ex
| 0.645902 | 0.657975 |
strategy.ex
|
starcoder
|
defmodule DSMR.Combinators do
@moduledoc false
import NimbleParsec
@separator [?\r, ?\n]
def separator do
empty()
|> utf8_char([Enum.at(@separator, 0)])
|> utf8_char([Enum.at(@separator, 1)])
|> label("separator")
end
@left_parens [?(]
def left_paren do
utf8_char(@left_parens)
|> label("left parenthesis")
end
@right_parens [?)]
def right_paren do
utf8_char(@right_parens)
|> label("right parenthesis")
end
@decimal_places [?.]
def decimal_place do
utf8_char(@decimal_places)
|> label("decimal place character")
end
@unit_places [?*]
def unit_place do
utf8_char(@unit_places)
|> label("unit place character")
end
@digits [?0..?9]
def digits do
ascii_char(@digits)
|> label("digits")
end
@letters [?a..?z, ?A..?Z]
def letters do
ascii_char(@letters)
|> label("letters")
end
def obis_digit(combinator \\ empty()) do
combinator
|> concat(
digits()
|> times(min: 1)
|> reduce({List, :to_integer, []})
)
end
def obis do
obis_digit()
|> ignore(utf8_char([?-]))
|> obis_digit()
|> ignore(utf8_char([?:]))
|> obis_digit()
|> ignore(utf8_char([?.]))
|> obis_digit()
|> ignore(utf8_char([?.]))
|> obis_digit()
|> tag(:obis)
|> label("obis")
end
@invalid_chars @separator ++
@decimal_places ++
@unit_places ++
@left_parens ++
@right_parens
@unit Enum.map(@invalid_chars, fn s -> {:not, s} end)
def unit do
ignore(unit_place())
|> times(utf8_char(@unit), min: 1)
|> reduce({List, :to_string, []})
|> label("unit")
end
def int do
times(digits(), min: 1)
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:integer)
end
def float do
times(digits(), min: 1)
|> concat(decimal_place() |> times(digits(), min: 1))
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:float)
end
def number do
choice([float(), int()])
|> lookahead_not(letters())
|> label("number")
|> optional(unit() |> unwrap_and_tag(:unit))
end
def text do
choice([digits(), letters()])
|> repeat()
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:text)
|> label("text")
end
def timestamp do
digits()
|> times(12)
|> utf8_char([?S, ?W])
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:timestamp)
|> label("timestamp")
end
def value do
ignore(left_paren())
|> choice([timestamp(), obis(), number(), text()])
|> ignore(right_paren())
|> map(:format_value)
|> label("value")
end
def cosem do
optional(obis())
|> times(
value()
|> optional(separator() |> ignore())
|> tag(:value),
min: 1
)
|> tag(:cosem)
end
@manufacturer Enum.map(@separator ++ [?5], fn s -> {:not, s} end)
def manufacturer do
utf8_char(@manufacturer)
|> times(min: 1)
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:manufacturer)
end
@model Enum.map(@separator, fn s -> {:not, s} end)
def model do
utf8_char(@model)
|> times(min: 1)
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:model)
end
def header do
ignore(utf8_char([?/]))
|> concat(manufacturer())
|> ignore(utf8_char([?5]))
|> concat(model())
|> tag(:header)
|> label("header")
end
@footer Enum.map(@separator, fn s -> {:not, s} end)
def footer do
ignore(utf8_char([?!]))
|> repeat(utf8_char(@footer))
|> reduce({List, :to_string, []})
|> unwrap_and_tag(:footer)
|> label("footer")
end
def lines do
header()
|> ignore(separator())
|> ignore(separator())
|> times(
cosem()
|> optional(separator() |> ignore()),
min: 1
)
|> concat(footer())
|> optional(separator() |> ignore())
|> eos()
end
end
|
lib/dsmr/combinators.ex
| 0.556641 | 0.422386 |
combinators.ex
|
starcoder
|
defmodule Riak.Object do
@moduledoc """
The Data wrapper makes it convenient to work with Riak data in Elixir
"""
@doc """
Struct representing a Riak Object. Attributes:
* `type`: String; Bucket Type with a unique name within the cluster namespace
* `bucket`: String; Bucket with a unique name within the bucket type namespace
* `key`: String; Not required; Key with a unique name within the bucket namespace
* `data`: Any; Value to be stored under the key
* `metadata`: Orddict; User specified metadata
* `vclock`: String; Dotted Version Vector / Causal Context for object
* `content_type`: String; Content Type for object
"""
defstruct [bucket: nil, type: nil, key: :undefined, data: nil, metadata: nil, vclock: nil, content_type: "application/json"]
@doc """
Get all metadata entries
"""
def get_metadata(obj, key) do
case :riakc_obj.get_user_metadata_entry(obj.metadata, key) do
:notfound -> nil
val -> val
end
end
def get_all_metadata(obj), do: :riakc_obj.get_user_metadata_entries(obj.metadata)
def delete_metadata(obj, key) do
%{obj | metadata: :riakc_obj.delete_user_metadata_entry(obj.metadata, key)}
end
def delete_all_metadata(obj) do
%{obj | metadata: :riakc_obj.clear_user_metadata_entries(obj.metadata)}
end
def put_metadata(obj, {key, value}) do
%{obj | metadata: :riakc_obj.set_user_metadata_entry(obj.metadata, {key, value})}
end
# Secondary Index
def index_id({:binary_index, name}), do: "#{name}_bin"
def index_id({:integer_index, name}), do: "#{name}_int"
def get_index(obj, {type, name}) do
case :riakc_obj.get_secondary_index(obj.metadata, {type, name}) do
:notfound -> nil
val -> val
end
end
def get_all_indexes(obj) do :riakc_obj.get_secondary_indexes(obj.metadata) end
def delete_index(obj, {type, name}) do
%{obj | metadata: :riakc_obj.delete_secondary_index(obj.metadata, {type, name})}
end
def delete_all_indexes(obj) do
%{obj | metadata: :riakc_obj.clear_secondary_indexes(obj.metadata)}
end
def put_index(obj, {type, name}, values) do
%{obj | metadata: :riakc_obj.add_secondary_index(obj.metadata, [{{type, name}, values}])}
end
# Links
def get_link(obj, tag) do
case :riakc_obj.get_links(obj.metadata, tag) do
:notfound -> nil
val -> val
end
end
def get_all_links(obj) do
:riakc_obj.get_all_links(obj.metadata)
end
def delete_link(obj, tag) do
%{obj | metadata: :riakc_obj.delete_links(obj.metadata, tag)}
end
def delete_all_links(obj) do
%{obj | metadata: :riakc_obj.clear_links(obj.metadata)}
end
def put_link(obj, tag, bucket, key) do
%{obj | metadata: :riakc_obj.add_link(obj.metadata, [{tag, [{bucket,key}]}])}
end
def from_robj(robj) do
%Riak.Object{bucket: :riakc_obj.bucket(robj),
type: :riakc_obj.bucket_type(robj),
key: :riakc_obj.key(robj),
data: :riakc_obj.get_update_value(robj),
metadata: :riakc_obj.get_update_metadata(robj),
vclock: :riakc_obj.vclock(robj),
content_type: :riakc_obj.get_update_content_type(robj)}
end
def to_robj(obj) do
robj = :riakc_obj.new(obj.bucket,
obj.key,
obj.data,
obj.content_type)
if obj.vclock, do: robj = :riakc_obj.set_vclock(robj, obj.vclock)
if obj.metadata, do: robj = :riakc_obj.update_metadata(robj, obj.metadata)
robj
end
def create, do: %Riak.Object{}
def create(args) do
obj = struct(Riak.Object, args)
from_robj(to_robj(obj)) # FIXME
end
end
|
lib/riak/object.ex
| 0.680242 | 0.445891 |
object.ex
|
starcoder
|
defmodule Phone do
@digits Enum.map(?2..?9, &<<&1>>)
defguard is_digits(country_code \\ "1", area_code, phone_number)
when country_code == "1" and area_code in @digits and phone_number in @digits
@doc """
Remove formatting from a phone number.
Returns "0000000000" if phone number is not valid
(10 digits or "1" followed by 10 digits)
## Examples
iex> Phone.number("212-555-0100")
"2125550100"
iex> Phone.number("+1 (212) 555-0100")
"2125550100"
iex> Phone.number("+1 (212) 055-0100")
"0000000000"
iex> Phone.number("(212) 555-0100")
"2125550100"
iex> Phone.number("867.5309")
"0000000000"
"""
@spec number(String.t()) :: String.t()
def number(
<<"+", country_code::binary-size(1), " (", area_code::binary-size(1),
area_code_digs::binary-size(2), ") ", phone_number::binary-size(1),
first_digs::binary-size(2), "-", last_digs::binary-size(4)>>
)
when is_digits(country_code, area_code, phone_number) do
area_code <> area_code_digs <> phone_number <> first_digs <> last_digs
end
def number(
<<"(", area_code::binary-size(1), area_code_digs::binary-size(2), ") ",
phone_number::binary-size(1), first_digs::binary-size(2), "-",
last_digs::binary-size(4)>>
)
when is_digits(area_code, phone_number) do
area_code <> area_code_digs <> phone_number <> first_digs <> last_digs
end
def number(
<<area_code::binary-size(1), area_code_digs::binary-size(2), ".",
phone_number::binary-size(1), first_digs::binary-size(2), ".",
last_digs::binary-size(4)>>
)
when is_digits(area_code, phone_number) do
area_code <> area_code_digs <> phone_number <> first_digs <> last_digs
end
def number(
<<country_code::binary-size(1), area_code::binary-size(1), area_code_digs::binary-size(2),
phone_number::binary-size(1), first_digs::binary-size(2), last_digs::binary-size(4)>>
)
when is_digits(country_code, area_code, phone_number) do
area_code <> area_code_digs <> phone_number <> first_digs <> last_digs
end
def number(
<<area_code::binary-size(1), area_code_digs::binary-size(2), phone_number::binary-size(1),
first_digs::binary-size(2), last_digs::binary-size(4)>>
)
when is_digits(area_code, phone_number) do
area_code <> area_code_digs <> phone_number <> first_digs <> last_digs
end
def number(_raw), do: "0000000000"
@doc """
Extract the area code from a phone number
Returns the first three digits from a phone number,
ignoring long distance indicator
## Examples
iex> Phone.area_code("212-555-0100")
"212"
iex> Phone.area_code("+1 (212) 555-0100")
"212"
iex> Phone.area_code("+1 (012) 555-0100")
"000"
iex> Phone.area_code("867.5309")
"000"
"""
@spec area_code(String.t()) :: String.t()
def area_code(raw), do: String.slice(number(raw), 0, 3)
@doc """
Pretty print a phone number
Wraps the area code in parentheses and separates
exchange and subscriber number with a dash.
## Examples
iex> Phone.pretty("212-555-0100")
"(212) 555-0100"
iex> Phone.pretty("212-155-0100")
"(000) 000-0000"
iex> Phone.pretty("+1 (303) 555-1212")
"(303) 555-1212"
iex> Phone.pretty("867.5309")
"(000) 000-0000"
"""
@spec pretty(String.t()) :: String.t()
def pretty(raw) do
raw
|> number
|> format
end
defp format(
<<area_code::binary-size(1), area_code_digs::binary-size(2),
phone_number::binary-size(1), first_digs::binary-size(2), last_digs::binary-size(4)>>
) do
"(#{area_code}#{area_code_digs}) #{phone_number}#{first_digs}-#{last_digs}"
end
defp format(_raw), do: "(000) 000-0000"
end
|
elixir/phone-number/lib/phone.ex
| 0.59302 | 0.432723 |
phone.ex
|
starcoder
|
defmodule Cognixir.ComputerVision.AnalyzeOptions do
@moduledoc """
Options for function analyze_image. See official api doc for supported options.
## Keys
- visualFeatures: list of strings, comma separated
- details: list of strings, comma separated
- language: which language to return
"""
defstruct visualFeatures: "", details: "", language: "en"
end
defmodule Cognixir.ComputerVision.OCROptions do
@moduledoc """
Options for function recognize_character.
## Keys
- detectOrientation: toggles orientation detection
- language: language of the text, "unk" for auto detection
"""
defstruct detectOrientation: false, language: "en"
end
defmodule Cognixir.ComputerVision do
@moduledoc """
Provides functions for image analytics. Including OCR, image descriptions, tagging and face detection
"""
alias Cognixir.ComputerVision
defp api_base do
"https://api.projectoxford.ai/vision/v1.0/"
end
defp api_key do
Application.get_env(:cognixir, :cv_api_key)
end
defp encode_body(image) do
if String.valid?(image), do: %{"url" => image}, else: image
end
@doc """
Analyze an image specified by an image url. You can set AnalyzeOptions to configure the meta data extraction.
Consulate the official api doc for allowed options.
## Parameters
- image: A string containing valid image url or binary file content of an image
- options: AnalyzeOptions with additional parameters (optional)
## Examples
iex> ComputerVision.analyze_image("http://example.com/images/test.jpg", %ComputerVision.AnalyzeOptions{language: "en", visualFeatures: "Faces,Color", details: "Celebrities"})
{ :ok, response_map }
"""
def analyze_image(image, options \\ %ComputerVision.AnalyzeOptions{}) do
Cognixir.post(
encode_body(image),
api_base() <> "analyze",
api_key(),
Map.from_struct(options)
)
end
@doc """
Describes an image in english sentences. You can set the maximum number of descriptions.
## Parameters
- image: A string containing valid image url or binary file content of an image
- max_candidates: An integer larger then 0
## Examples
iex> ComputerVision.analyze_image("http://example.com/images/test.jpg", 3)
{ :ok, response_map }
"""
def describe_image(image, max_candidates \\ 1) do
Cognixir.post(encode_body(image), api_base() <> "describe", api_key(), %{
maxCandidates: max_candidates
})
end
@doc """
Runs OCR on a specified image. You can set OCROptions to auto-detect the image orientation
## Parameters
- image: A string containing valid image url or binary file content of an image
- options: AnalyzeOptions with additional parameters (optional)
## Examples
iex> ComputerVision.recognize_character("http://example.com/images/test.jpg", %ComputerVision.OCROptions{detectOrientation: true})
{ :ok, response_map }
"""
def recognize_character(image, options \\ %ComputerVision.OCROptions{}) do
Cognixir.post(encode_body(image), api_base() <> "ocr", api_key(), Map.from_struct(options))
end
@doc """
Get tags for an specified image.
## Parameters
- image: A string containing valid image url or binary file content of an image
## Examples
iex> ComputerVision.tag_image("http://example.com/images/test.jpg")
{ :ok, response_map }
"""
def tag_image(image) do
Cognixir.post(encode_body(image), api_base() <> "tag", api_key())
end
end
|
lib/computer_vision.ex
| 0.879639 | 0.533944 |
computer_vision.ex
|
starcoder
|
defmodule PhoenixIntegration.Form.TreeEdit do
@moduledoc false
# Once a tree of `Tag` structures has been created, the values contained
# within it can be overridden by leaves of a different tree provided by
# test.
alias PhoenixIntegration.Form.{Change, Tag, Common}
defstruct valid?: :true, tree: %{}, errors: []
def apply_edits(tree, edit_tree) do
changes = Change.changes(edit_tree)
reducer = fn change, acc ->
case apply_change(acc.tree, change) do
{:ok, new_tree} ->
Common.put_tree(acc, new_tree)
{:error, message_atom, message_context} ->
Common.put_error(acc, message_atom, message_context)
end
end
case Enum.reduce(changes, %__MODULE__{tree: tree}, reducer) do
%{valid?: true, tree: tree} -> {:ok, tree}
%{errors: errors} -> {:error, errors}
end
end
def apply_change!(tree, %Change{} = change) do
{:ok, new_tree} = apply_change(tree, change)
new_tree
end
def apply_change(tree, %Change{} = change) do
try do
{:ok, apply_change(tree, change.path, change)}
catch
{description, context} ->
handle_oddity(description, context, tree, change)
end
end
def handle_oddity(:no_such_name_in_form, %{why: :possible_typo}, tree,
%{ignore_if_missing_from_form: true}),
do: {:ok, tree}
def handle_oddity(description, context, _tree, _change),
do: {:error, description, context}
defp apply_change(tree, [last], %Change{} = change) do
case Map.get(tree, last) do
%Tag{} = tag ->
Map.put(tree, last, combine(tag, change))
nil ->
throw no_such_name_in_form(:possible_typo, tree, last, change)
_ ->
throw no_such_name_in_form(:path_too_short, tree, last, change)
end
end
defp apply_change(tree, [next | rest], %Change{} = change) do
case Map.get(tree, next) do
%Tag{} ->
throw no_such_name_in_form(:path_too_long, tree, next, change)
nil ->
throw no_such_name_in_form(:possible_typo, tree, next, change)
_ ->
Map.update!(tree, next, &(apply_change &1, rest, change))
end
end
defp no_such_name_in_form(why, tree, key, change) do
{:no_such_name_in_form,
%{why: why, tree: tree, last_tried: key, change: change}
}
end
def combine(%Tag{} = tag, %Change{} = change) do
case {is_list(change.value), tag.has_list_value} do
{true, true} ->
%Tag{ tag | values: change.value}
{false, false} ->
%Tag{ tag | values: [change.value]}
_ ->
throw {:arity_clash, %{existing: tag, change: change}}
end
end
end
|
lib/phoenix_integration/form/tree_edit.ex
| 0.755997 | 0.485295 |
tree_edit.ex
|
starcoder
|
defmodule ArtemisWeb.ViewHelper.Numbers do
import Phoenix.HTML.Tag
@doc """
Returns the sign of a given number. Returns:
:positive
:zero
:negative
"""
def number_sign(value) when is_number(value) do
cond do
value > 0 -> :positive
value < 0 -> :negative
true -> :zero
end
end
def number_sign(value = %Decimal{}) do
value
|> Decimal.to_float()
|> number_sign()
end
def number_sign(value) when is_bitstring(value) do
value
|> Integer.parse()
|> elem(0)
|> number_sign()
end
def number_sign(value) when is_atom(value) do
value
|> Atom.to_string()
|> number_sign()
end
@doc """
Returns a bitstring symbol for a given number's sign.
"""
def number_sign_symbol(value, options \\ []) do
case number_sign(value) do
:positive -> "+"
:negative -> "-"
:zero -> Keyword.get(options, :zero, "")
end
end
@doc """
Returns a bitstring of the number and its sign symbol.
Options:
:pretty_print (default: false) Boolean - include comma delimiters in numbers
:precision (default: 0) Integer - number of decimal places to include
:symbol (default: nil) String - prefix result with a symbol, e.g. "$" would return `-$404`
:zero_sign (default: nil) String - symbol to be shown for zero values, e.g. "+" would return `+0.00`
"""
def number_and_sign_symbol(value, options \\ [])
def number_and_sign_symbol(value = %Decimal{}, options) do
value
|> Decimal.to_float()
|> number_and_sign_symbol(options)
end
def number_and_sign_symbol(value, options) do
default_options = [
pretty_print: false,
precision: 0,
symbol: nil,
zero_sign: nil
]
options = Keyword.merge(default_options, options)
zero_sign = Keyword.get(options, :zero_sign)
number_as_string =
value
|> abs()
|> maybe_pretty_print(options)
|> maybe_add_symbol(options)
case number_sign(value) do
:positive -> "+#{number_as_string}"
:negative -> "-#{number_as_string}"
:zero -> "#{zero_sign}#{number_as_string}"
end
end
@doc """
Returns a Semantic UI compatible caret icon class for a given number's sign.
"""
def number_sign_icon_class(value, options \\ []) do
icon = Keyword.get(options, :icon, "caret")
case number_sign(value) do
:positive -> "#{icon} up"
:negative -> "#{icon} down"
:zero -> ""
end
end
@doc """
Returns a Semantic UI compatible caret icon tag for a given number's sign.
"""
def number_sign_icon_tag(value, options \\ []) do
sign = number_sign(value)
class = "ui icon " <> number_sign_icon_class(value)
color =
cond do
Keyword.get(options, :color) == false -> ""
sign == :positive -> "green"
sign == :negative -> "red"
true -> ""
end
html_options =
options
|> Keyword.put(:class, class)
|> Keyword.update!(:class, &"#{&1} #{color}")
case sign do
:zero -> nil
_signed -> content_tag(:i, "", html_options)
end
end
@doc """
Returns a sign icon and value
"""
def number_sign_icon_tag_and_value(value, options \\ []) do
icon = number_sign_icon_tag(value, options)
number =
value
|> abs()
|> Integer.to_string()
case icon do
nil -> number
_ -> content_tag(:span, [icon, number], class: "no-wrap")
end
end
@doc """
Pretty prints number with commas
"""
def pretty_print_number(number, options \\ [])
def pretty_print_number(number = %Decimal{}, options) do
number
|> Decimal.to_float()
|> pretty_print_number(options)
end
def pretty_print_number(number, options) do
default_options = [
absolute_value: false,
precision: 0
]
merged_options = Keyword.merge(default_options, options)
number
|> maybe_absolute_value(merged_options)
|> Number.Delimit.number_to_delimited(merged_options)
end
# Helpers
defp maybe_absolute_value(number, options) do
case Keyword.get(options, :absolute_value) do
true -> abs(number)
_ -> number
end
end
defp maybe_add_symbol(value, options) do
case Keyword.get(options, :symbol) do
nil -> "#{value}"
symbol -> "#{symbol}#{value}"
end
end
defp maybe_pretty_print(number, options) do
case Keyword.get(options, :pretty_print) do
true -> pretty_print_number(number, options)
_ -> number
end
end
end
|
apps/artemis_web/lib/artemis_web/view_helpers/numbers.ex
| 0.844361 | 0.650724 |
numbers.ex
|
starcoder
|
defmodule Mix.Tasks.Docs do
use Mix.Task
@shortdoc "Generate documentation for the project"
@moduledoc ~S"""
Uses ExDoc to generate a static web page from the project documentation.
## Command line options
* `--canonical`, `-n` - Indicate the preferred URL with
rel="canonical" link element, defaults to no canonical path
* `--formatter`, `-f` - Which formatters to use, "html" or
"epub". This option can be given more than once. By default,
both html and epub are generated.
* `--output`, `-o` - Output directory for the generated
docs, default: `"doc"`
* `--language` - Specifies the language to annotate the
EPUB output in valid [BCP 47](https://tools.ietf.org/html/bcp47)
The command line options have higher precedence than the options
specified in your `mix.exs` file below.
## Configuration
ExDoc will automatically pull in information from your project,
like the application and version. However, you may want to set
`:name`, `:source_url` and `:homepage_url` to have a nicer output
from ExDoc, for example:
def project do
[app: :my_app,
version: "0.1.0-dev",
deps: deps(),
# Docs
name: "My App",
source_url: "https://github.com/USER/PROJECT",
homepage_url: "http://YOUR_PROJECT_HOMEPAGE",
docs: [main: "MyApp", # The main page in the docs
logo: "path/to/logo.png",
extras: ["README.md"]]]
end
ExDoc also allows configuration specific to the documentation to
be set. The following options should be put under the `:docs` key
in your project's main configuration. The `:docs` options should
be a keyword list or a function returning a keyword list that will
be lazily executed.
* `:api_reference` - Whether to generate `api-reference.html`; default: `true`.
If this is set to false, `:main` must also be set.
* `:assets` - Path to a directory that will be copied as is to the "assets"
directory in the output path. Its entries may be referenced in your docs
under "assets/ASSET.EXTENSION"; defaults to no assets directory.
* `:before_closing_body_tag` - a function that takes as argument an atom specifying
the formatter being used (`:html` or `:epub`) and returns a literal HTML string
to be included just before the closing body tag (`</body>`).
The atom given as argument can be used to include different content in both formats.
Useful to inject custom assets, such as Javascript.
* `:before_closing_head_tag` - a function that takes as argument an atom specifying
the formatter being used (`:html` or `:epub`) and returns a literal HTML string
to be included just before the closing head tag (`</head>`).
The atom given as argument can be used to include different content in both formats.
Useful to inject custom assets, such as CSS stylesheets.
* `:canonical` - String that defines the preferred URL with the rel="canonical"
element; defaults to no canonical path.
* `:deps` - A keyword list application names and their documentation URL.
ExDoc will by default include all dependencies and assume they are hosted on
HexDocs. This can be overridden by your own values. Example: `[plug: "https://myserver/plug/"]`
* `:extra_section` - String that defines the section title of the additional
Markdown and plain text pages; default: "PAGES". Example: "GUIDES"
* `:extras` - List of keywords, each key must indicate the path to additional
Markdown or plain text pages, the value for each keyword (optional) gives you more control
about the PATH and the title of the output files; default: `[]`. Example:
`["README.md", "LICENSE", "CONTRIBUTING.md": [filename: "contributing", title: "Contributing"]]`
* `:filter_prefix` - Include only modules that match the given prefix in
the generated documentation. Example: "MyApp.Core". If you set this option,
remember to also set the `:main` option to a module that will be included
, for example `main: "MyApp.Core.Inner"`.
* `:formatters` - Formatter to use; default: ["html", "epub"], options: "html", "epub".
* `:groups_for_extras`, `:groups_for_modules`, `:groups_for_functions` - See the "Groups" section
* `:javascript_config_path` - Path of an additional JavaScript file to be included on all pages
to provide up-to-date data for features like the version dropdown - See the "Additional
JavaScript config" section. Example: `"../versions.js"`
* `:nest_modules_by_prefix` - See the "Nesting" section
* `:language` - Identify the primary language of the documents, its value must be
a valid [BCP 47](https://tools.ietf.org/html/bcp47) language tag; default: "en"
* `:logo` - Path to the image logo of the project (only PNG or JPEG accepted)
The image size will be 64x64. When specified, the logo will be placed under
the "assets" directory in the output path under the name "logo" and the
appropriate extension.
* `:cover` - Path to the epub cover image (only PNG or JPEG accepted)
The image size should be around 1600x2400. When specified, the cover will be placed under
the "assets" directory in the output path under the name "cover" and the
appropriate extension. This option has no effect when using the "html" formatter.
* `:authors` - List of authors for the generated docs or epub.
* `:main` - Main page of the documentation. It may be a module or a
generated page, like "Plug" or "api-reference"; default: "api-reference".
* `:markdown_processor` - The markdown processor to use;
* `:markdown_processor_options` - Configuration options for the markdown processor;
* `:source_beam` - Path to the beam directory; default: mix's compile path.
* `:source_ref` - The branch/commit/tag used for source link inference;
default: "master".
* `:source_url_pattern` - Public URL of the project for source links. This is derived
automatically from the project's `:source_url` and `:source_ref` when using one of
the supported public hosting services (currently GitHub, GitLab, or Bitbucket). If
you are using one of those services with their default public hostname, you do not
need to set this configuration.
However, if using a different solution, or self-hosting, you will need to set this
configuration variable to a pattern for source code links. The value must be a string
of the full URI to use for links with the following variables available for interpolation:
* `%{path}`: the path of a file in the repo
* `%{line}`: the line number in the file
For GitLab/GitHub:
```text
https://mydomain.org/user_or_team/repo_name/blob/master/%{path}#L%{line}
```
For Bitbucket:
```text
https://mydomain.org/user_or_team/repo_name/src/master/%{path}#cl-%{line}
```
* `:output` - Output directory for the generated docs; default: "doc".
May be overridden by command line argument.
* `:ignore_apps` - Apps to be ignored when generating documentation in an umbrella project.
Receives a list of atoms. Example: `[:first_app, :second_app]`.
* `:skip_undefined_reference_warnings_on` - ExDoc warns when it can't create a `Mod.fun/arity`
reference in the current project docs e.g. because of a typo. This list controls where to
skip the warnings, for a given module/function/callback/type (e.g.: `["Foo", "Bar.baz/0"]`)
or on a given file (e.g.: `["pages/deprecations.md"]`); default: `[]`.
## Groups
ExDoc content can be organized in groups. This is done via the `:groups_for_extras`
and `:groups_for_modules`. For example, imagine you are storing extra guides in
your documentation which are organized per directory. In the extras section you
have:
extras: [
"guides/introduction/foo.md",
"guides/introduction/bar.md",
...
"guides/advanced/baz.md",
"guides/advanced/bat.md"
]
You can have those grouped as follows:
groups_for_extras: [
"Introduction": Path.wildcard("guides/introduction/*.md"),
"Advanced": Path.wildcard("guides/advanced/*.md")
]
Or via a regex:
groups_for_extras: [
"Introduction": ~r"/introduction/",
"Advanced": ~r"/advanced/"
]
Similar can be done for modules:
groups_for_modules: [
"Data types": [Atom, Regex, URI],
"Collections": [Enum, MapSet, Stream]
]
A regex or the string name of the module is also supported.
### Grouping functions
Functions inside a module can also be organized in groups. This is done via
the `:groups_for_functions` configuration which is a keyword list of group
titles and filtering functions that receive the documentation metadata of
functions as argument.
For example, imagine that you have an API client library with a large surface
area for all the API endpoints you need to support. It would be helpful to
group the functions with similar responsibilities together. In this case in
your module you might have:
defmodule APIClient do
@doc section: :auth
def refresh_token(params \\ [])
@doc subject: :object
def update_status(id, new_status)
@doc permission: :grant
def grant_privilege(resource, privilege)
end
And then in the configuration you can group these with:
groups_for_functions: [
Authentication: & &1[:section] == :auth,
Resource: & &1[:subject] == :object,
Admin: & &1[:permission] in [:grant, :write]
]
A function can belong to a single group only. If multiple group filters match,
the first will take precedence. Functions that don't have a custom group will
be listed under the default "Functions" group.
## Additional JavaScript config
Since version `0.20.0` ExDoc includes a way to enrich the documentation
with new information without having to re-generate it, through a JavaScript
file that can be shared across documentation for multiple versions of the
package. If `:javascript_config_path` is set when building the documentation,
this script will be referenced in each page's `<head>` using a `<script>` tag.
The script should define data in global JavaScript variables that will be
interpreted by `ex_doc` when viewing the documentation.
Currenly supported variables:
### `versionNodes`
This global JavaScript variable should be providing an array of objects that
define all versions of this Mix package which should appear in the package
versions dropdown in the documentation sidebar. The versions dropdown allows
for switching between package versions' documentation.
Example:
```javascript
var versionNodes = [
{
version: "v0.0.0", // version number or name (required)
url: "https://hexdocs.pm/ex_doc/0.19.3/" // documentation URL (required)
}
]
```
## Nesting
ExDoc also allows module names in the sidebar to appear nested under a given
prefix. The `:nest_modules_by_prefix` expects a list of module names, such as
`[Foo.Bar, Bar.Baz]`. In this case, a module named `Foo.Bar.Baz` will appear
nested within `Foo.Bar` and only the name `Baz` will be shown in the sidebar.
Note the `Foo.Bar` module itself is not affected.
This option is mainly intended to improve the display of long module names in
the sidebar, particularly when they are too long for the sidebar or when many
modules share a long prefix. If you mean to group modules logically or call
attention to them in the docs, you should probably use `:groups_for_modules`
(which can be used in conjuction with `:nest_modules_by_prefix`).
## Umbrella project
ExDoc can be used in an umbrella project and generates a single documentation
for all child apps. You can use the `:ignore_apps` configuration to exclude
certain projects in the umbrella from documentation.
Generating documentation per each child app can be achieved by running:
mix cmd mix docs
See `mix help cmd` for more information.
"""
@switches [
canonical: :string,
formatter: :keep,
language: :string,
output: :string
]
@aliases [n: :canonical, f: :formatter, o: :output]
@doc false
def run(args, config \\ Mix.Project.config(), generator \\ &ExDoc.generate_docs/3) do
Mix.Task.run("compile")
{:ok, _} = Application.ensure_all_started(:ex_doc)
unless Code.ensure_loaded?(ExDoc.Config) do
Mix.raise(
"Could not load ExDoc configuration. Please make sure you are running the " <>
"docs task in the same Mix environment it is listed in your deps"
)
end
{cli_opts, args, _} = OptionParser.parse(args, aliases: @aliases, switches: @switches)
if args != [] do
Mix.raise("Extraneous arguments on the command line")
end
project =
to_string(
config[:name] || config[:app] ||
raise("expected :name or :app to be found in the project definition in mix.exs")
)
version = config[:version] || "dev"
options =
config
|> get_docs_opts()
|> Keyword.merge(cli_opts)
# accepted at root level config
|> normalize_source_url(config)
# accepted at root level config
|> normalize_homepage_url(config)
|> normalize_source_beam(config)
|> normalize_apps(config)
|> normalize_main()
|> normalize_deps()
Mix.shell().info("Generating docs...")
for formatter <- get_formatters(options) do
index = generator.(project, version, Keyword.put(options, :formatter, formatter))
Mix.shell().info([:green, "View #{inspect(formatter)} docs at #{inspect(index)}"])
index
end
end
defp get_formatters(options) do
case Keyword.get_values(options, :formatter) do
[] -> options[:formatters] || ["html", "epub"]
values -> values
end
end
defp get_docs_opts(config) do
docs = config[:docs]
cond do
is_function(docs, 0) -> docs.()
is_nil(docs) -> []
true -> docs
end
end
defp normalize_source_url(options, config) do
if source_url = config[:source_url] do
Keyword.put(options, :source_url, source_url)
else
options
end
end
defp normalize_homepage_url(options, config) do
if homepage_url = config[:homepage_url] do
Keyword.put(options, :homepage_url, homepage_url)
else
options
end
end
defp normalize_source_beam(options, config) do
compile_path =
if Mix.Project.umbrella?(config) do
umbrella_compile_paths(Keyword.get(options, :ignore_apps, []))
else
Mix.Project.compile_path()
end
Keyword.put_new(options, :source_beam, compile_path)
end
defp umbrella_compile_paths(ignored_apps) do
build = Mix.Project.build_path()
for {app, _} <- Mix.Project.apps_paths(),
app not in ignored_apps do
Path.join([build, "lib", Atom.to_string(app), "ebin"])
end
end
defp normalize_apps(options, config) do
if Mix.Project.umbrella?(config) do
ignore = Keyword.get(options, :ignore_apps, [])
apps =
for {app, _} <- Mix.Project.apps_paths(), app not in ignore do
app
end
Keyword.put(options, :apps, apps)
else
Keyword.put(options, :apps, List.wrap(config[:app]))
end
end
defp normalize_main(options) do
main = options[:main]
cond do
is_nil(main) ->
Keyword.delete(options, :main)
is_atom(main) ->
Keyword.put(options, :main, inspect(main))
is_binary(main) ->
options
end
end
defp normalize_deps(options) do
user_deps = Keyword.get(options, :deps, [])
deps =
for {app, doc} <- Keyword.merge(get_deps(), user_deps),
lib_dir = :code.lib_dir(app),
is_list(lib_dir),
do: {app, doc}
Keyword.put(options, :deps, deps)
end
defp get_deps do
for {key, _} <- Mix.Project.deps_paths(),
_ = Application.load(key),
vsn = Application.spec(key, :vsn) do
{key, "https://hexdocs.pm/#{key}/#{vsn}/"}
end
end
end
|
lib/mix/tasks/docs.ex
| 0.889373 | 0.715772 |
docs.ex
|
starcoder
|
defmodule Timex.Parse.DateTime.Helpers do
@moduledoc false
import Combine.Parsers.Base
import Combine.Parsers.Text, except: [integer: 0, integer: 1]
alias Combine.Parsers.Text
use Timex.Constants
def months, do: @month_names
def to_month(month) when is_integer(month), do: [month: month]
def to_month_num(m) when m in ["January", "Jan"], do: to_month(1)
def to_month_num(m) when m in ["February", "Feb"], do: to_month(2)
def to_month_num(m) when m in ["March", "Mar"], do: to_month(3)
def to_month_num(m) when m in ["April", "Apr"], do: to_month(4)
def to_month_num(m) when m in ["May", "May"], do: to_month(5)
def to_month_num(m) when m in ["June", "Jun"], do: to_month(6)
def to_month_num(m) when m in ["July", "Jul"], do: to_month(7)
def to_month_num(m) when m in ["August", "Aug"], do: to_month(8)
def to_month_num(m) when m in ["September", "Sep"], do: to_month(9)
def to_month_num(m) when m in ["October", "Oct"], do: to_month(10)
def to_month_num(m) when m in ["November", "Nov"], do: to_month(11)
def to_month_num(m) when m in ["December", "Dec"], do: to_month(12)
def is_weekday(name) do
n = String.downcase(name)
cond do
n in @weekday_abbrs_lower -> true
n in @weekday_names_lower -> true
true -> false
end
end
def to_weekday(name) do
n = String.downcase(name)
case n do
n when n in ["mon", "monday"] -> 1
n when n in ["tue", "tuesday"] -> 2
n when n in ["wed", "wednesday"] -> 3
n when n in ["thu", "thursday"] -> 4
n when n in ["fri", "friday"] -> 5
n when n in ["sat", "saturday"] -> 6
n when n in ["sun", "sunday"] -> 7
end
end
def to_sec_ms(fraction) do
precision = byte_size(fraction)
n = String.to_integer(fraction)
n = n * div(1_000_000, trunc(:math.pow(10, precision)))
case n do
0 -> [sec_fractional: {0,0}]
_ -> [sec_fractional: {n, precision}]
end
end
def parse_milliseconds(ms) do
n = ms |> String.trim_leading("0")
n = if n == "", do: 0, else: String.to_integer(n)
n = n * 1_000
[sec_fractional: Timex.DateTime.Helpers.construct_microseconds(n, -1)]
end
def parse_microseconds(us) do
n_width = byte_size(us)
trailing = n_width - byte_size(String.trim_trailing(us, "0"))
cond do
n_width == trailing ->
[sec_fractional: {0, n_width}]
:else ->
p = n_width - trailing
p = if p > 6, do: 6, else: p
n = us |> String.trim("0") |> String.to_integer
[sec_fractional: {n * trunc(:math.pow(10, 6-p)), p}]
end
end
def to_ampm("am"), do: [am: "am"]
def to_ampm("AM"), do: [AM: "AM"]
def to_ampm("pm"), do: [am: "pm"]
def to_ampm("PM"), do: [AM: "PM"]
def integer(opts \\ []) do
min_width =
case Keyword.get(opts, :padding) do
:none ->
1
_ ->
get_in(opts, [:min]) || 1
end
max_width = get_in(opts, [:max])
padding = get_in(opts, [:padding])
case {padding, min_width, max_width} do
{:zeroes, _, nil} -> Text.integer
{:zeroes, min, max} -> choice(Enum.map(max..min, &(fixed_integer(&1))))
{:spaces, -1, nil} -> skip(spaces()) |> Text.integer
{:spaces, min, nil} -> skip(spaces()) |> fixed_integer(min)
{:spaces, _, max} -> skip(spaces()) |> choice(Enum.map(max..1, &(fixed_integer(&1))))
{_, -1, nil} -> Text.integer
{_, min, nil} -> fixed_integer(min)
{_, min, max} -> choice(Enum.map(max..min, &(fixed_integer(&1))))
end
end
end
|
lib/parse/datetime/helpers.ex
| 0.550124 | 0.503235 |
helpers.ex
|
starcoder
|
defmodule Dispenser.MonitoredBuffer do
@moduledoc """
A `MonitoredBuffer` contains most of the logic required to implement a `GenServer` that wraps a `Buffer`.
`MonitoredBuffer` combines a `Buffer` to track events and a `SubscriptionManager` to track subscribers.
"""
alias Dispenser.Buffer
alias Dispenser.SubscriptionManager
@typedoc """
Various statistics exposed by the `MonitoredBuffer` for use by debugging and metrics.
See `stats/1`
"""
@type stats() :: %{
buffered: non_neg_integer(),
subscribed: non_neg_integer(),
demand: non_neg_integer()
}
@typedoc """
The opaque internal state of the `MonitoredBuffer`.
"""
@opaque t(event) :: %__MODULE__{
subscription_manager: SubscriptionManager.t(),
buffer: Buffer.t(event, pid())
}
@enforce_keys [:subscription_manager, :buffer]
defstruct [:subscription_manager, :buffer]
@doc """
Create a new `MonitoredBuffer` that wraps the given `Buffer`.
"""
@spec new(Buffer.t(event, pid())) :: t(event)
when event: any()
def new(buffer) do
%__MODULE__{buffer: buffer, subscription_manager: SubscriptionManager.new()}
end
@doc """
Add events to the `MonitoredBuffer`.
If the `MonitoredBuffer` reaches its capacity, events will be dropped.
"""
@spec append(t(event), [event]) :: {t(event), dropped :: non_neg_integer()}
when event: any()
def append(%__MODULE__{} = state, events) do
{buffer, dropped} = Buffer.append(state.buffer, events)
state = %__MODULE__{state | buffer: buffer}
{state, dropped}
end
@doc """
Ask for events from the `Buffer`.
These demands are met by calls to `assign_events/1`
"""
@spec ask(t(event), subscriber :: pid(), demand :: non_neg_integer()) :: t(event)
when event: any()
def ask(%__MODULE__{} = state, subscriber, 0) when is_pid(subscriber) do
state
end
def ask(%__MODULE__{} = state, subscriber, demand) when is_pid(subscriber) and demand > 0 do
buffer = Buffer.ask(state.buffer, subscriber, demand)
subscription_manager = SubscriptionManager.monitor(state.subscription_manager, subscriber)
%__MODULE__{state | buffer: buffer, subscription_manager: subscription_manager}
end
@doc """
Given the current events and demands, returns the events to send to each subscriber.
"""
@spec assign_events(t(event)) :: {t(event), [{subscriber :: pid(), [event]}]}
when event: any()
def assign_events(%__MODULE__{} = state) do
{buffer, assignments} = Buffer.assign_events(state.buffer)
state = %__MODULE__{state | buffer: buffer}
{state, assignments}
end
@doc """
Handle the down signal from a monitored subscriber.
"""
@spec down(t(event), subscriber :: pid(), reference()) ::
{:ok, t(event)} | {:error, :wrong_ref} | {:error, :not_subscribed}
when event: any()
def down(%__MODULE__{} = state, subscriber, ref) do
case SubscriptionManager.down(state.subscription_manager, subscriber, ref) do
{:ok, subscription_manager} ->
buffer = Buffer.delete(state.buffer, subscriber)
state = %__MODULE__{state | buffer: buffer, subscription_manager: subscription_manager}
{:ok, state}
error ->
error
end
end
@doc """
Stop monitoring and remove all demand from the given subscriber.
"""
@spec delete(t(event), subscriber :: pid()) :: {:ok, t(event)} | {:error, :not_subscribed}
when event: any()
def delete(%__MODULE__{} = state, subscriber) do
case SubscriptionManager.demonitor(state.subscription_manager, subscriber) do
{:ok, subscription_manager} ->
buffer = Buffer.delete(state.buffer, subscriber)
state = %__MODULE__{state | buffer: buffer, subscription_manager: subscription_manager}
{:ok, state}
{:error, :not_subscribed} ->
{:error, :not_subscribed}
end
end
@doc """
Get the number of events in the `MonitoredBuffer`.
"""
@spec size(t(event)) :: non_neg_integer() when event: any()
def size(%__MODULE__{} = state) do
Buffer.size(state.buffer)
end
@doc """
Get various statistics about the `MonitoredBuffer` for use when debugging and generating metrics.
"""
@spec stats(t(event)) :: stats() when event: any()
def stats(%__MODULE__{} = state) do
buffer_stats = Buffer.stats(state.buffer)
%{
buffered: buffer_stats.buffered,
demand: buffer_stats.demand,
subscribed: SubscriptionManager.size(state.subscription_manager)
}
end
end
|
lib/dispenser/monitored_buffer.ex
| 0.869216 | 0.583322 |
monitored_buffer.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Quickbooks do
@moduledoc """
Implements an ÜeberauthQuickbooks strategy for authentication with quickbooks.com.
When configuring the strategy in the Üeberauth providers, you can specify some defaults.
* `oauth2_module` - The OAuth2 module to use. Default Ueberauth.Strategy.Quickbooks.OAuth
````elixir
config :ueberauth, Ueberauth,
providers: [
quickbooks: { Ueberauth.Strategy.Quickbooks }
]
"""
@oauth2_module Ueberauth.Strategy.Quickbooks.OAuth
use Ueberauth.Strategy,
default_scope: "com.intuit.quickbooks.accounting",
oauth2_module: @oauth2_module
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
# When handling the request just redirect to Quickbooks
@doc false
def handle_request!(conn) do
scope = conn.params["scope"] || option(conn, :default_scope)
opts = [
redirect_uri: callback_url(conn),
scope: scope,
state: random_string(32)
]
opts =
if conn.params["state"], do: Keyword.put(opts, :state, conn.params["state"]), else: opts
redirect!(conn, apply(@oauth2_module, :authorize_url!, [opts]))
end
# When handling the callback, if there was no errors we need to
# make two calls. The first, to fetch the Quickbooks auth is so that we can get hold of
# the user id so we can make a query to fetch the user info.
# So that it is available later to build the auth struct, we put it in the private section of the conn.
@doc false
def handle_callback!(%Plug.Conn{params: %{"code" => code, "realmId" => realm_id}} = conn) do
params = [
code: code,
redirect_uri: callback_url(conn)
]
token = apply(@oauth2_module, :get_token!, [params])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
conn
|> store_realm_id(realm_id)
|> store_token(token)
end
end
# If we don't match code, then we have an issue
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc false
defp store_realm_id(conn, realm_id) do
put_private(conn, :quickbooks_realm_id, realm_id)
end
# We store the token for use later when fetching the Quickbooks auth and user and constructing the auth struct.
@doc false
defp store_token(conn, token) do
put_private(conn, :quickbooks_token, token)
end
# Remove the temporary storage in the conn for our data. Run after the auth struct has been built.
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:quickbooks_realm_id, nil)
|> put_private(:quickbooks_token, nil)
end
# The structure of the requests is such that it is difficult to provide cusomization for the uid field.
# instead, we allow selecting any field from the info struct
@doc false
def uid(conn) do
conn.private[:quickbooks_realm_id]
end
@doc false
def credentials(conn) do
token = conn.private.quickbooks_token
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: []
}
end
@doc false
def info(_conn) do
%Info{}
end
@doc false
def extra(conn) do
token = conn.private.quickbooks_token
%Extra{
raw_info: %{
refresh_token_expires_in: token.other_params["x_refresh_token_expires_in"]
}
}
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
def random_string(length) do
:crypto.strong_rand_bytes(length)
|> Base.url_encode64()
|> binary_part(0, length)
end
end
|
lib/ueberauth/strategy/quickbooks.ex
| 0.740268 | 0.483039 |
quickbooks.ex
|
starcoder
|
defmodule Surface.Components.Markdown do
@moduledoc """
A simple macro component that converts **markdown** into **HTML** at compile-time.
## Global configuration (optional)
A set of global options you can set in `config.exs`. Available options are:
* `default_class` - The default CSS class for the wrapping `<div>`. It
can be overridden using propety `class`.
* `default_opts` - The default set of options to be passed down to `Earmark.as_html/2`.
It can be overridden using propety `opts`.
## CSS Styling
Some CSS libs define their own styles for tags like `<p>`, `<ul>`, `<ol>`, `<strong>`,
`<h1>` to `<h6>`, etc. This can make the rendered HTML look different from what you
expect. One way to fix that is to customize the CSS class on the outer `<div>` of the
generated code.
For instance, in `Bulma`, you can use the class `content` to handle WYSIWYG content
like the HTML generated by the Markdown component.
You can have a default class applied globally using the `default_class` config:
```
config :surface, :components, [
{Surface.Components.Markdown, default_class: "content"}
]
```
Or you can set/override it individually for each `<#Markdown>` instance using
the `class` property.
"""
use Surface.MacroComponent
alias Surface.MacroComponent
alias Surface.Translator.IOHelper
@doc "The CSS class for the wrapping `<div>`"
property class, :string
@doc "Removes the wrapping `<div>`, if `true`"
property unwrap, :boolean, default: false
@doc """
Keyword list with options to be passed down to `Earmark.as_html/2`.
For a full list of available options, please refer to the
[Earmark.as_html/2](https://hexdocs.pm/earmark/Earmark.html#as_html/2)
documentation.
"""
property opts, :keyword, default: []
@doc "The markdown text to be translated to HTML"
slot default
@impl true
def translate({_, attributes, children, %{line: tag_line}}, caller) do
props = MacroComponent.eval_static_props!(__MODULE__, attributes, caller)
class = props[:class] || MacroComponent.get_config(__MODULE__, :default_class)
unwrap = props[:unwrap] || false
config_opts = MacroComponent.get_config(__MODULE__, :default_opts, [])
opts = Keyword.merge(config_opts, props[:opts] || [])
html =
children
|> IO.iodata_to_binary()
|> trim_leading_space()
|> markdown_as_html!(caller, tag_line, opts)
class_attr = if class, do: ~s( class="#{class}"), else: ""
{open_div, close_div} =
if unwrap do
{[], []}
else
{"<div#{class_attr}>\n", "</div>"}
end
open = [
"<% require(#{inspect(__MODULE__)}) %>",
open_div
]
close = [close_div]
{open, html, close}
end
defp trim_leading_space(markdown) do
lines =
markdown
|> String.split("\n")
|> Enum.drop_while(fn str -> String.trim(str) == "" end)
case lines do
[first | _] ->
[space] = Regex.run(~r/^\s*/, first)
lines
|> Enum.map(fn line -> String.replace_prefix(line, space, "") end)
|> Enum.join("\n")
_ ->
""
end
end
defp markdown_as_html!(markdown, caller, tag_line, opts) do
markdown
|> Earmark.as_html(struct(Earmark.Options, opts))
|> handle_result!(caller, tag_line)
end
defp handle_result!({_, html, messages}, caller, tag_line) do
{errors, warnings_and_deprecations} =
Enum.split_with(messages, fn {type, _line, _message} -> type == :error end)
Enum.each(warnings_and_deprecations, fn {_type, line, message} ->
actual_line = caller.line + tag_line + line
Surface.Translator.IOHelper.warn(message, caller, fn _ -> actual_line end)
end)
if errors != [] do
[{_type, line, message} | _] = errors
actual_line = caller.line + tag_line + line
IOHelper.compile_error(message, caller.file, actual_line)
end
html
end
end
|
lib/surface/components/markdown.ex
| 0.892868 | 0.767994 |
markdown.ex
|
starcoder
|
import Kernel, except: [round: 1]
defmodule Float do
@moduledoc """
Functions for working with floating point numbers.
"""
import Bitwise
@power_of_2_to_52 4503599627370496
@doc """
Parses a binary into a float.
If successful, returns a tuple in the form of `{float, remainder_of_binary}`;
when the binary cannot be coerced into a valid float, the atom `:error` is
returned.
If the size of float exceeds the maximum size of `1.7976931348623157e+308`,
the `ArgumentError` exception is raised.
If you want to convert a string-formatted float directly to a float,
`String.to_float/1` can be used instead.
## Examples
iex> Float.parse("34")
{34.0, ""}
iex> Float.parse("34.25")
{34.25, ""}
iex> Float.parse("56.5xyz")
{56.5, "xyz"}
iex> Float.parse("pi")
:error
"""
@spec parse(binary) :: {float, binary} | :error
def parse("-" <> binary) do
case parse_unsigned(binary) do
:error -> :error
{number, remainder} -> {-number, remainder}
end
end
def parse("+" <> binary) do
parse_unsigned(binary)
end
def parse(binary) do
parse_unsigned(binary)
end
defp parse_unsigned(<<digit, rest::binary>>) when digit in ?0..?9, do:
parse_unsigned(rest, false, false, <<digit>>)
defp parse_unsigned(binary) when is_binary(binary), do:
:error
defp parse_unsigned(<<digit, rest::binary>>, dot?, e?, acc) when digit in ?0..?9, do:
parse_unsigned(rest, dot?, e?, <<acc::binary, digit>>)
defp parse_unsigned(<<?., digit, rest::binary>>, false, false, acc) when digit in ?0..?9, do:
parse_unsigned(rest, true, false, <<acc::binary, ?., digit>>)
defp parse_unsigned(<<exp_marker, digit, rest::binary>>, dot?, false, acc) when exp_marker in 'eE' and digit in ?0..?9, do:
parse_unsigned(rest, true, true, <<add_dot(acc, dot?)::binary, ?e, digit>>)
defp parse_unsigned(<<exp_marker, sign, digit, rest::binary>>, dot?, false, acc) when exp_marker in 'eE' and sign in '-+' and digit in ?0..?9, do:
parse_unsigned(rest, true, true, <<add_dot(acc, dot?)::binary, ?e, sign, digit>>)
defp parse_unsigned(rest, dot?, _e?, acc), do:
{:erlang.binary_to_float(add_dot(acc, dot?)), rest}
defp add_dot(acc, true), do: acc
defp add_dot(acc, false), do: acc <> ".0"
@doc """
Rounds a float to the largest integer less than or equal to `num`.
`floor/2` also accepts a precision to round a floating point value down
to an arbitrary number of fractional digits (between 0 and 15).
The operation is performed on the binary floating point, without a
conversion to decimal.
The behaviour of `floor/2` for floats can be surprising. For example:
iex> Float.floor(12.52, 2)
12.51
One may have expected it to floor to 12.52. This is not a bug.
Most decimal fractions cannot be represented as a binary floating point
and therefore the number above is internally represented as 12.51999999,
which explains the behaviour above.
This function always returns a float. `Kernel.trunc/1` may be used instead to
truncate the result to an integer afterwards.
## Examples
iex> Float.floor(34.25)
34.0
iex> Float.floor(-56.5)
-57.0
iex> Float.floor(34.259, 2)
34.25
"""
@spec floor(float, 0..15) :: float
def floor(number, precision \\ 0) when is_float(number) and precision in 0..15 do
round(number, precision, :floor)
end
@doc """
Rounds a float to the smallest integer greater than or equal to `num`.
`ceil/2` also accepts a precision to round a floating point value down
to an arbitrary number of fractional digits (between 0 and 15).
The operation is performed on the binary floating point, without a
conversion to decimal.
The behaviour of `ceil/2` for floats can be surprising. For example:
iex> Float.ceil(-12.52, 2)
-12.51
One may have expected it to ceil to -12.52. This is not a bug.
Most decimal fractions cannot be represented as a binary floating point
and therefore the number above is internally represented as -12.51999999,
which explains the behaviour above.
This function always returns floats. `Kernel.trunc/1` may be used instead to
truncate the result to an integer afterwards.
## Examples
iex> Float.ceil(34.25)
35.0
iex> Float.ceil(-56.5)
-56.0
iex> Float.ceil(34.251, 2)
34.26
"""
@spec ceil(float, 0..15) :: float
def ceil(number, precision \\ 0) when is_float(number) and precision in 0..15 do
round(number, precision, :ceil)
end
@doc """
Rounds a floating point value to an arbitrary number of fractional
digits (between 0 and 15).
The rounding direction always ties to half up. The operation is
performed on the binary floating point, without a conversion to decimal.
This function only accepts floats and always returns a float. Use
`Kernel.round/1` if you want a function that accepts both floats
and integers and always returns an integer.
The behaviour of `round/2` for floats can be surprising. For example:
iex> Float.round(5.5675, 3)
5.567
One may have expected it to round to the half up 5.568. This is not a bug.
Most decimal fractions cannot be represented as a binary floating point
and therefore the number above is internally represented as 5.567499999,
which explains the behaviour above. If you want exact rounding for decimals,
you must use a decimal library. The behaviour above is also in accordance
to reference implementations, such as "Correctly Rounded Binary-Decimal and
Decimal-Binary Conversions" by <NAME>.
## Examples
iex> Float.round(12.5)
13.0
iex> Float.round(5.5674, 3)
5.567
iex> Float.round(5.5675, 3)
5.567
iex> Float.round(-5.5674, 3)
-5.567
iex> Float.round(-5.5675)
-6.0
iex> Float.round(12.341444444444441, 15)
12.341444444444441
"""
@spec round(float, 0..15) :: float
# This implementation is slow since it relies on big integers.
# Faster implementations are available on more recent papers
# and could be implemented in the future.
def round(float, precision \\ 0) when is_float(float) and precision in 0..15 do
round(float, precision, :half_up)
end
defp round(float, precision, rounding) do
<<sign::1, exp::11, significant::52-bitstring>> = <<float::float>>
{num, count, _} = decompose(significant)
count = count - exp + 1023
cond do
count <= 0 or # There is no decimal precision
(0 == exp and <<0::52>> == significant) -> #zero or minus zero
float
count >= 104 -> # Precision beyond 15 digits
case rounding do
:ceil when sign === 0 -> 1 / power_of_10(precision)
:floor when sign === 1 -> -1 / power_of_10(precision)
_ -> 0.0
end
count <= precision -> # We are asking more precision than we have
float
true ->
# Difference in precision between float and asked precision
# We subtract 1 because we need to calculate the remainder too
diff = count - precision - 1
# Get up to latest so we calculate the remainder
power_of_10 = power_of_10(diff)
# Convert the numerand to decimal base
num = num * power_of_5(count)
# Move to the given precision - 1
num = div(num, power_of_10)
div = div(num, 10)
num = rounding(rounding, sign, num, div)
# Convert back to float without loss
# http://www.exploringbinary.com/correct-decimal-to-floating-point-using-big-integers/
den = power_of_10(precision)
boundary = den <<< 52
cond do
num == 0 ->
0.0
num >= boundary ->
{den, exp} = scale_down(num, boundary, 52)
decimal_to_float(sign, num, den, exp)
true ->
{num, exp} = scale_up(num, boundary, 52)
decimal_to_float(sign, num, den, exp)
end
end
end
defp scale_up(num, boundary, exp) when num >= boundary, do: {num, exp}
defp scale_up(num, boundary, exp), do: scale_up(num <<< 1, boundary, exp - 1)
defp scale_down(num, den, exp) do
new_den = den <<< 1
if num < new_den do
{den >>> 52, exp}
else
scale_down(num, new_den, exp + 1)
end
end
defp decimal_to_float(sign, num, den, exp) do
quo = div(num, den)
rem = num - quo * den
tmp =
case den >>> 1 do
den when rem > den -> quo + 1
den when rem < den -> quo
_ when (quo &&& 1) === 1 -> quo + 1
_ -> quo
end
tmp = tmp - @power_of_2_to_52
<<tmp::float>> = <<sign::1, (exp + 1023)::11, tmp::52>>
tmp
end
defp rounding(:floor, 1, _num, div), do: div + 1
defp rounding(:ceil, 0, _num, div), do: div + 1
defp rounding(:half_up, _sign, num, div) do
case rem(num, 10) do
rem when rem < 5 -> div
rem when rem >= 5 -> div + 1
end
end
defp rounding(_, _, _, div), do: div
Enum.reduce 0..104, 1, fn x, acc ->
defp power_of_10(unquote(x)), do: unquote(acc)
acc * 10
end
Enum.reduce 0..104, 1, fn x, acc ->
defp power_of_5(unquote(x)), do: unquote(acc)
acc * 5
end
@doc """
Returns a pair of integers whose ratio is exactly equal
to the original float and with a positive denominator.
## Examples
iex> Float.ratio(3.14)
{7070651414971679, 2251799813685248}
iex> Float.ratio(-3.14)
{-7070651414971679, 2251799813685248}
iex> Float.ratio(1.5)
{3, 2}
iex> Float.ratio(-1.5)
{-3, 2}
iex> Float.ratio(16.0)
{16, 1}
iex> Float.ratio(-16.0)
{-16, 1}
"""
def ratio(float) when is_float(float) do
<<sign::1, exp::11, significant::52-bitstring>> = <<float::float>>
{num, _, den} = decompose(significant)
num = sign(sign, num)
case exp - 1023 do
exp when exp > 0 ->
{den, exp} = shift_right(den, exp)
{shift_left(num, exp), den}
exp when exp < 0 ->
{num, shift_left(den, -exp)}
0 ->
{num, den}
end
end
defp decompose(significant) do
decompose(significant, 1, 0, 2, 1, 1)
end
defp decompose(<<1::1, bits::bitstring>>, count, last_count, power, _last_power, acc) do
decompose(bits, count + 1, count, power <<< 1, power, shift_left(acc, count - last_count) + 1)
end
defp decompose(<<0::1, bits::bitstring>>, count, last_count, power, last_power, acc) do
decompose(bits, count + 1, last_count, power <<< 1, last_power, acc)
end
defp decompose(<<>>, _count, last_count, _power, last_power, acc) do
{acc, last_count, last_power}
end
defp sign(0, num), do: num
defp sign(1, num), do: -num
defp shift_left(num, 0), do: num
defp shift_left(num, times), do: shift_left(num <<< 1, times - 1)
defp shift_right(num, 0), do: {num, 0}
defp shift_right(1, times), do: {1, times}
defp shift_right(num, times), do: shift_right(num >>> 1, times - 1)
@doc """
Returns a charlist which corresponds to the text representation
of the given float.
It uses the shortest representation according to algorithm described
in "Printing Floating-Point Numbers Quickly and Accurately" in
Proceedings of the SIGPLAN '96 Conference on Programming Language
Design and Implementation.
## Examples
iex> Float.to_charlist(7.0)
'7.0'
"""
@spec to_charlist(float) :: charlist
def to_charlist(float) when is_float(float) do
:io_lib_format.fwrite_g(float)
end
@doc """
Returns a binary which corresponds to the text representation
of the given float.
It uses the shortest representation according to algorithm described
in "Printing Floating-Point Numbers Quickly and Accurately" in
Proceedings of the SIGPLAN '96 Conference on Programming Language
Design and Implementation.
## Examples
iex> Float.to_string(7.0)
"7.0"
"""
@spec to_string(float) :: String.t
def to_string(float) when is_float(float) do
IO.iodata_to_binary(:io_lib_format.fwrite_g(float))
end
# TODO: Remove by 2.0
# (hard-deprecated in elixir_dispatch)
@doc false
def to_char_list(float), do: Float.to_charlist(float)
@doc false
# TODO: Remove by 2.0
# (hard-deprecated in elixir_dispatch)
def to_char_list(float, options) do
:erlang.float_to_list(float, expand_compact(options))
end
@doc false
# TODO: Remove by 2.0
# (hard-deprecated in elixir_dispatch)
def to_string(float, options) do
:erlang.float_to_binary(float, expand_compact(options))
end
defp expand_compact([{:compact, false} | t]), do: expand_compact(t)
defp expand_compact([{:compact, true} | t]), do: [:compact | expand_compact(t)]
defp expand_compact([h | t]), do: [h | expand_compact(t)]
defp expand_compact([]), do: []
end
|
lib/elixir/lib/float.ex
| 0.927571 | 0.645825 |
float.ex
|
starcoder
|
defmodule Phoenix.Slack do
@moduledoc """
The main feature provided by this module is the ability to set the HTML and/or
text body of an message by rendering templates.
It has been designed to integrate with Phoenix view, template and layout system.
"""
import Phoenix.Slack.ChannelMessage
defmacro __using__(opts) do
unless view = Keyword.get(opts, :view) do
raise ArgumentError, "no view was set, " <>
"you can set one with `use Phoenix.Slack, view: MyApp.SlackView`"
end
layout = Keyword.get(opts, :layout)
quote bind_quoted: [view: view, layout: layout] do
alias Phoenix.Slack.ChannelMessage
import Phoenix.Slack.ChannelMessage
import Phoenix.Slack, except: [render_body: 3]
@view view
@layout layout || false
def render_body(message, template, assigns \\ %{}) do
message
|> put_new_layout(@layout)
|> put_new_view(@view)
|> Phoenix.Slack.render_body(template, assigns)
end
def local_text_template(), do: local_module_name() <> ".text"
defp local_module_name() do
__MODULE__
|> Module.split
|> List.last
|> Macro.underscore
end
end
end
@doc """
Renders the given `template` and `assigns` based on the `message`.
Once the template is rendered the resulting string is stored on the message field `text_body`
## Arguments
* `message` - the `Phoenix.Slack.ChannelMessage` struct
* `template` - may be an atom or a string. If an atom, like `:welcome`, it
will render both the HTML and text template and stores them respectively on
the message. If the template is a string it must contain the extension too,
like `welcome.text`.
* `assigns` - a dictionnary with the assigns to be used in the view. Those
assigns are merged and have higher order precedence than the message assigns.
(`message.assigns`)
## Example
defmodule Sample.UserSlack do
use Phoenix.Slack, view: Sample.SlackView
def welcome(user) do
%ChannelMessage{}
|> subject("Hello, Avengers!")
|> render_body("welcome.text", %{username: user.message})
end
end
The example above renders a template `welcome.text` from `Sample.SlackView` and
stores the resulting string onto the text_body field of the message.
(`message.text_body`)
## Layouts
Templates are often rendered inside layouts. If you wish to do so you will have
to specify which layout you want to use when using the `Phoenix.Slack` module.
defmodule Sample.UserSlack do
use Phoenix.Slack, view: Sample.SlackView, layout: {Sample.LayoutView, :message}
def welcome(user) do
%Slack{}
|> from("<EMAIL>")
|> to(user.message)
|> subject("Hello, Avengers!")
|> render_body("welcome.text", %{username: user.message})
end
end
The example above will render the `welcome.text` template inside an
`message.text` template specified in `Sample.LayoutView`. `put_layout/2` can be
used to change the layout, similar to how `put_view/2` can be used to change
the view.
"""
def render_body(message, template, assigns) when is_atom(template) do
message
|> do_render_body(template_name(template, "text"), "text", assigns)
end
def render_body(message, template, assigns) when is_binary(template) do
case Path.extname(template) do
"." <> format ->
do_render_body(message, template, format, assigns)
"" ->
raise "cannot render template #{inspect template} without format. Use an atom if you " <>
"want to set both the text and text body."
end
end
defp do_render_body(message, template, format, assigns) do
assigns = Enum.into(assigns, %{})
message =
message
|> put_private(:phoenix_template, template)
|> prepare_assigns(assigns, format)
view = Map.get(message.private, :phoenix_view) ||
raise "a view module was not specified, set one with put_view/2"
content = Phoenix.View.render_to_string(view, template, Map.put(message.assigns, :message, message))
Map.put(message, :"#{format}_body", content)
end
@doc """
Stores the layout for rendering.
The layout must be a tuple, specifying the layout view and the layout
name, or false. In case a previous layout is set, `put_layout` also
accepts the layout name to be given as a string or as an atom. If a
string, it must contain the format. Passing an atom means the layout
format will be found at rendering time, similar to the template in
`render_body/3`. It can also be set to `false`. In this case, no
layout would be used.
## Examples
iex> layout(message)
false
iex> message = put_layout message, {LayoutView, "message.text"}
iex> layout(message)
{LayoutView, "message.text"}
iex> message = put_layout message, "message.text"
iex> layout(message)
{LayoutView, "message.text"}
iex> message = put_layout message, :message
iex> layout(message)
{AppView, :message}
"""
def put_layout(message, layout) do
do_put_layout(message, layout)
end
defp do_put_layout(message, false) do
put_private(message, :phoenix_layout, false)
end
defp do_put_layout(message, {mod, layout}) when is_atom(mod) do
put_private(message, :phoenix_layout, {mod, layout})
end
defp do_put_layout(message, layout) when is_binary(layout) or is_atom(layout) do
update_in message.private, fn private ->
case Map.get(private, :phoenix_layout, false) do
{mod, _} -> Map.put(private, :phoenix_layout, {mod, layout})
false -> raise "cannot use put_layout/2 with atom/binary when layout is false, use a tuple instead"
end
end
end
@doc """
Stores the layout for rendering if one was not stored yet.
"""
def put_new_layout(message, layout)
when (is_tuple(layout) and tuple_size(layout) == 2) or layout == false do
update_in message.private, &Map.put_new(&1, :phoenix_layout, layout)
end
@doc """
Retrieves the current layout of an message.
"""
def layout(message), do: message.private |> Map.get(:phoenix_layout, false)
@doc """
Stores the view for rendering.
"""
def put_view(message, module) do
put_private(message, :phoenix_view, module)
end
@doc """
Stores the view for rendering if one was not stored yet.
"""
def put_new_view(message, module) do
update_in message.private, &Map.put_new(&1, :phoenix_view, module)
end
defp prepare_assigns(message, assigns, format) do
layout =
case layout(message, assigns, format) do
{mod, layout} -> {mod, template_name(layout, format)}
false -> false
end
update_in message.assigns,
& &1 |> Map.merge(assigns) |> Map.put(:layout, layout)
end
defp layout(message, assigns, format) do
if format in ["text"] do
case Map.fetch(assigns, :layout) do
{:ok, layout} -> layout
:error -> layout(message)
end
else
false
end
end
defp template_name(name, format) when is_atom(name), do:
Atom.to_string(name) <> "." <> format
defp template_name(name, _format) when is_binary(name), do:
name
end
|
lib/phoenix_slack.ex
| 0.91442 | 0.439988 |
phoenix_slack.ex
|
starcoder
|
defmodule Livebook.Notebook.Explore do
@moduledoc false
defmodule NotFoundError do
@moduledoc false
defexception [:slug, plug_status: 404]
def message(%{slug: slug}) do
"could not find an example notebook matching #{inspect(slug)}"
end
end
@type notebook_info :: %{
ref: atom() | nil,
slug: String.t(),
livemd: String.t(),
title: String.t(),
images: images(),
details: details() | nil
}
@type images :: %{String.t() => binary()}
@type details :: %{
description: String.t(),
cover_url: String.t()
}
@type group_info :: %{
title: String.t(),
description: String.t(),
cover_url: String.t(),
notebook_infos: list(notebook_info())
}
images_dir = Path.expand("explore/images", __DIR__)
welcome_config = %{
path: Path.join(__DIR__, "explore/intro_to_livebook.livemd"),
details: %{
description: "Get to know Livebook, see how it works and explore its features.",
cover_url: "/images/logo.png"
}
}
other_configs = [
%{
path: Path.join(__DIR__, "explore/distributed_portals_with_elixir.livemd"),
image_paths: [
Path.join(images_dir, "portal-drop.jpeg"),
Path.join(images_dir, "portal-list.jpeg")
],
details: %{
description:
"A fast-paced introduction to the Elixir language by building distributed data-transfer portals.",
cover_url: "/images/elixir-portal.jpeg"
}
},
%{
path: Path.join(__DIR__, "explore/elixir_and_livebook.livemd"),
details: %{
description: "Learn how to use some of Elixir and Livebook's unique features together.",
cover_url: "/images/elixir.png"
}
},
%{
path: Path.join(__DIR__, "explore/intro_to_vega_lite.livemd"),
details: %{
description: "Learn how to quickly create numerous plots for your data.",
cover_url: "/images/vega_lite.png"
}
},
%{
path: Path.join(__DIR__, "explore/intro_to_nx.livemd"),
details: %{
description:
"Enter Numerical Elixir, experience the power of multi-dimensional arrays of numbers.",
cover_url: "/images/nx.png"
}
},
# %{
# path: Path.join(__DIR__, "explore/intro_to_axon.livemd"),
# details: %{
# description: "Build Neural Networks in Elixir using a high-level, composable API.",
# cover_url: "/images/axon.png"
# }
# },
%{
ref: :kino_intro,
path: Path.join(__DIR__, "explore/kino/intro_to_kino.livemd")
},
%{
ref: :kino_vm_introspection,
path: Path.join(__DIR__, "explore/kino/vm_introspection.livemd")
},
%{
ref: :kino_chat_app,
path: Path.join(__DIR__, "explore/kino/chat_app.livemd")
},
%{
ref: :kino_pong,
path: Path.join(__DIR__, "explore/kino/pong.livemd")
},
%{
ref: :kino_custom_kinos,
path: Path.join(__DIR__, "explore/kino/custom_kinos.livemd")
}
]
user_configs = Application.compile_env(:livebook, :explore_notebooks, [])
notebook_configs = [welcome_config] ++ user_configs ++ other_configs
notebook_infos =
for config <- notebook_configs do
path =
config[:path] ||
raise "missing required :path attribute in notebook configuration: #{inspect(config)}"
@external_resource path
markdown = File.read!(path)
# Parse the file to ensure no warnings and read the title.
# However, in the info we keep just the file contents to save on memory.
{notebook, warnings} = Livebook.LiveMarkdown.Import.notebook_from_markdown(markdown)
if warnings != [] do
items = Enum.map(warnings, &("- " <> &1))
raise "found warnings while importing #{path}:\n\n" <> Enum.join(items, "\n")
end
images =
config
|> Map.get(:image_paths, [])
|> Map.new(fn image_path ->
image_name = Path.basename(image_path)
content = File.read!(image_path)
{image_name, content}
end)
slug =
config[:slug] || path |> Path.basename() |> Path.rootname() |> String.replace("_", "-")
%{
ref: config[:ref],
slug: slug,
livemd: markdown,
title: notebook.name,
images: images,
details:
if config_details = config[:details] do
description =
config_details[:description] ||
raise "missing required :description attribute in notebook details: #{inspect(config_details)}"
cover_url =
config_details[:cover_url] ||
(config_details[:cover_path] &&
Livebook.Utils.read_as_data_url!(config_details.cover_path)) ||
raise "expected either :cover_path or :cover_url in notebooks details: #{inspect(config_details)}"
%{description: description, cover_url: cover_url}
end
}
end
@doc """
Returns a list of example notebooks with metadata.
"""
@spec notebook_infos() :: list(notebook_info())
def notebook_infos(), do: unquote(Macro.escape(notebook_infos))
@doc """
Same as `notebook_infos/0`, but returns only notebooks that have
additional details.
"""
@spec visible_notebook_infos() :: list(notebook_info())
def visible_notebook_infos() do
notebook_infos() |> Enum.filter(& &1.details)
end
@doc """
Finds explore notebook by slug and returns the parsed data structure.
Returns the notebook along with the images it uses as preloaded binaries.
"""
@spec notebook_by_slug!(String.t()) :: {Livebook.Notebook.t(), images()}
def notebook_by_slug!(slug) do
notebook_infos()
|> Enum.find(&(&1.slug == slug))
|> case do
nil ->
raise NotFoundError, slug: slug
notebook_info ->
{notebook, []} = Livebook.LiveMarkdown.Import.notebook_from_markdown(notebook_info.livemd)
{notebook, notebook_info.images}
end
end
@group_configs [
%{
title: "Interactions with Kino",
description:
"Kino is an Elixir package for displaying and controlling rich, interactive widgets in Livebook. Learn how to make your notebooks more engaging with inputs, plots, tables, and much more!",
cover_url: "/images/kino.png",
notebook_refs: [
:kino_intro,
:kino_vm_introspection,
:kino_chat_app,
:kino_pong,
:kino_custom_kinos
]
}
]
@doc """
Returns a list of all defined notebook groups.
"""
@spec group_infos() :: list(group_info())
def group_infos() do
notebook_infos = notebook_infos()
for config <- @group_configs do
%{
title: config.title,
description: config.description,
cover_url: config.cover_url,
notebook_infos:
for(
ref <- config.notebook_refs,
info = Enum.find(notebook_infos, &(&1[:ref] == ref)),
do: info
)
}
end
end
end
|
lib/livebook/notebook/explore.ex
| 0.798226 | 0.516535 |
explore.ex
|
starcoder
|
defmodule Port do
@moduledoc ~S"""
Functions for interacting with the external world through ports.
Ports provide a mechanism to start operating system processes external
to the Erlang VM and communicate with them via message passing.
## Example
iex> port = Port.open({:spawn, "cat"}, [:binary])
iex> send(port, {self(), {:command, "hello"}})
iex> send(port, {self(), {:command, "world"}})
iex> flush()
{#Port<0.1444>, {:data, "hello"}}
{#Port<0.1444>, {:data, "world"}}
iex> send(port, {self(), :close})
:ok
iex> flush()
{#Port<0.1464>, :closed}
:ok
In the example above, we have created a new port that executes the
program `cat`. `cat` is a program available on UNIX systems that
receives data from multiple inputs and concatenates them in the output.
After the port was created, we sent it two commands in the form of
messages using `Kernel.send/2`. The first command has the binary payload
of "hello" and the second has "world".
After sending those two messages, we invoked the IEx helper `flush()`,
which printed all messages received from the port, in this case we got
"hello" and "world" back. Notice the messages are in binary because we
passed the `:binary` option when opening the port in `Port.open/2`. Without
such option, it would have yielded a list of bytes.
Once everything was done, we closed the port.
Elixir provides many conveniences for working with ports and some drawbacks.
We will explore those below.
## Message and function APIs
There are two APIs for working with ports. It can be either asynchronous via
message passing, as in the example above, or by calling the functions on this
module.
The messages supported by ports and their counterpart function APIs are
listed below:
* `{pid, {:command, binary}}` - sends the given data to the port.
See `command/3`.
* `{pid, :close}` - closes the port. Unless the port is already closed,
the port will reply with `{port, :closed}` message once it has flushed
its buffers and effectively closed. See `close/1`.
* `{pid, {:connect, new_pid}}` - sets the `new_pid` as the new owner of
the port. Once a port is opened, the port is linked and connected to the
caller process and communication to the port only happens through the
connected process. This message makes `new_pid` the new connected processes.
Unless the port is dead, the port will reply to the old owner with
`{port, :connected}`. See `connect/2`.
On its turn, the port will send the connected process the following messages:
* `{port, {:data, data}}` - data sent by the port
* `{port, :closed}` - reply to the `{pid, :close}` message
* `{port, :connected}` - reply to the `{pid, {:connect, new_pid}}` message
* `{:EXIT, port, reason}` - exit signals in case the port crashes. If reason
is not `:normal`, this message will only be received if the owner process
is trapping exits
## Open mechanisms
The port can be opened through four main mechanisms.
As a short summary, prefer to using the `:spawn` and `:spawn_executable`
options mentioned below. The other two options, `:spawn_driver` and `:fd`
are for advanced usage within the VM. Also consider using `System.cmd/3`
if all you want is to execute a program and retrieve its return value.
### spawn
The `:spawn` tuple receives a binary that is going to be executed as a
full invocation. For example, we can use it to invoke "echo hello" directly:
iex> port = Port.open({:spawn, "echo hello"}, [:binary])
iex> flush()
{#Port<0.1444>, {:data, "hello\n"}}
`:spawn` will retrieve the program name from the argument and traverse your
OS `$PATH` environment variable looking for a matching program.
Although the above is handy, it means it is impossible to invoke an executable
that has whitespaces on its name or in any of its arguments. For those reasons,
most times it is preferable to execute `:spawn_executable`.
### spawn_executable
Spawn executable is a more restricted and explicit version of spawn. It expects
full file paths to the executable you want to execute. If they are in your `$PATH`,
they can be retrieved by calling `System.find_executable/1`:
iex> path = System.find_executable("echo")
iex> port = Port.open({:spawn_executable, path}, [:binary, args: ["hello world"]])
iex> flush()
{#Port<0.1380>, {:data, "hello world\n"}}
When using `:spawn_executable`, the list of arguments can be passed via
the `:args` option as done above. For the full list of options, see the
documentation for the Erlang function `:erlang.open_port/2`.
### fd
The `:fd` name option allows developers to access `in` and `out` file
descriptors used by the Erlang VM. You would use those only if you are
reimplementing core part of the Runtime System, such as the `:user` and
`:shell` processes.
## Zombie OS processes
A port can be closed via the `close/1` function or by sending a `{pid, :close}`
message. However, if the VM crashes, a long-running program started by the port
will have its stdin and stdout channels closed but **it won't be automatically
terminated**.
While most UNIX command line tools will exit once its communication channels
are closed, not all command line applications will do so. While we encourage
graceful termination by detecting if stdin/stdout has been closed, we do not
always have control over how 3rd party software terminates. In those cases,
you can wrap the application in a script that checks for stdin. Here is such
script in bash:
#!/bin/sh
"$@" &
pid=$!
while read line ; do
:
done
kill -KILL $pid
Now instead of:
Port.open({:spawn_executable, "/path/to/program"},
[args: ["a", "b", "c"]])
You may invoke:
Port.open({:spawn_executable, "/path/to/wrapper"},
[args: ["/path/to/program", "a", "b", "c"]])
"""
@type name ::
{:spawn, charlist | binary}
| {:spawn_driver, charlist | binary}
| {:spawn_executable, charlist | atom}
| {:fd, non_neg_integer, non_neg_integer}
@doc """
Opens a port given a tuple `name` and a list of `options`.
The module documentation above contains documentation and examples
for the supported `name` values, summarized below:
* `{:spawn, command}` - runs an external program. `command` must contain
the program name and optionally a list of arguments separated by space.
If passing programs or arguments with space in their name, use the next option.
* `{:spawn_executable, filename}` - runs the executable given by the absolute
file name `filename`. Arguments can be passed via the `:args` option.
* `{:spawn_driver, command}` - spawns so-called port drivers.
* `{:fd, fd_in, fd_out}` - accesses file descriptors, `fd_in` and `fd_out`
opened by the VM.
For more information and the list of options, see `:erlang.open_port/2`.
Inlined by the compiler.
"""
@spec open(name, list) :: port
def open(name, options) do
:erlang.open_port(name, options)
end
@doc """
Closes the `port`.
For more information, see `:erlang.port_close/1`.
Inlined by the compiler.
"""
@spec close(port) :: true
def close(port) do
:erlang.port_close(port)
end
@doc """
Sends `data` to the port driver `port`.
For more information, see `:erlang.port_command/2`.
Inlined by the compiler.
"""
@spec command(port, iodata, [:force | :nosuspend]) :: boolean
def command(port, data, options \\ []) do
:erlang.port_command(port, data, options)
end
@doc """
Associates the `port` identifier with a `pid`.
For more information, see `:erlang.port_connect/2`.
Inlined by the compiler.
"""
@spec connect(port, pid) :: true
def connect(port, pid) do
:erlang.port_connect(port, pid)
end
@doc """
Returns information about the `port` or `nil` if the port is closed.
For more information, see `:erlang.port_info/1`.
"""
def info(port) do
nillify(:erlang.port_info(port))
end
@doc """
Returns information about the `port` or `nil` if the port is closed.
For more information, see `:erlang.port_info/2`.
"""
@spec info(port, atom) :: {atom, term} | nil
def info(port, spec)
def info(port, :registered_name) do
case :erlang.port_info(port, :registered_name) do
:undefined -> nil
[] -> {:registered_name, []}
other -> other
end
end
def info(port, item) do
nillify(:erlang.port_info(port, item))
end
@doc """
Starts monitoring the given `port` from the calling process.
Once the monitored port process dies, a message is delivered to the
monitoring process in the shape of:
{:DOWN, ref, :port, object, reason}
where:
* `ref` is a monitor reference returned by this function;
* `object` is either the `port` being monitored (when monitoring by port id)
or `{name, node}` (when monitoring by a port name);
* `reason` is the exit reason.
See `:erlang.monitor/2` for more info.
Inlined by the compiler.
"""
@doc since: "1.6.0"
@spec monitor(port | {name :: atom, node :: atom} | name :: atom) :: reference
def monitor(port) do
:erlang.monitor(:port, port)
end
@doc """
Demonitors the monitor identified by the given `reference`.
If `monitor_ref` is a reference which the calling process
obtained by calling `monitor/1`, that monitoring is turned off.
If the monitoring is already turned off, nothing happens.
See `:erlang.demonitor/2` for more info.
Inlined by the compiler.
"""
@doc since: "1.6.0"
@spec demonitor(reference, options :: [:flush | :info]) :: boolean
defdelegate demonitor(monitor_ref, options \\ []), to: :erlang
@doc """
Returns a list of all ports in the current node.
Inlined by the compiler.
"""
@spec list :: [port]
def list do
:erlang.ports()
end
@compile {:inline, nillify: 1}
defp nillify(:undefined), do: nil
defp nillify(other), do: other
end
|
lib/elixir/lib/port.ex
| 0.902008 | 0.603494 |
port.ex
|
starcoder
|
defmodule Membrane.Core.Element.DemandHandler do
@moduledoc false
# Module handling demands requested on output pads.
use Bunch
alias Membrane.Core.InputBuffer
alias Membrane.Core.Child.PadModel
alias Membrane.Core.Element.{
BufferController,
CapsController,
DemandController,
EventController,
State
}
alias Membrane.Pad
require Membrane.Core.Child.PadModel
require Membrane.Core.Message
require Membrane.Logger
@doc """
Updates demand on the given input pad that should be supplied by future calls
to `supply_demand/2` or `check_and_supply_demands/2`.
"""
@spec update_demand(
Pad.ref_t(),
pos_integer,
State.t()
) :: State.stateful_try_t()
def update_demand(pad_ref, size, state) when is_integer(size) do
state = PadModel.set_data!(state, pad_ref, :demand, size)
{:ok, state}
end
def update_demand(pad_ref, size_fun, state) when is_function(size_fun) do
PadModel.update_data(
state,
pad_ref,
:demand,
fn demand ->
new_demand = size_fun.(demand)
if new_demand < 0 do
{:error, :negative_demand}
else
{:ok, new_demand}
end
end
)
end
@doc """
Delays executing redemand until all current processing is finished.
Works similar to `delay_supply/3`, but only `:sync` mode is supported. See
doc for `delay_supply/3` for more info.
"""
@spec delay_redemand(Pad.ref_t(), State.t()) :: State.t()
def delay_redemand(pad_ref, state) do
state
|> Map.update!(:delayed_demands, &MapSet.put(&1, {pad_ref, :redemand}))
end
@spec handle_delayed_demands(State.t()) :: State.stateful_try_t()
def handle_delayed_demands(%State{delayed_demands: del_dem} = state)
when del_dem == %MapSet{} do
{:ok, state}
end
def handle_delayed_demands(%State{delayed_demands: del_dem} = state) do
# Taking random element of `:delayed_demands` is done to keep data flow
# balanced among pads, i.e. to prevent situation where demands requested by
# one pad are supplied right away while another one is waiting for buffers
# potentially for a long time.
[{pad_ref, action}] = del_dem |> Enum.take_random(1)
state = %State{state | delayed_demands: del_dem |> MapSet.delete({pad_ref, action})}
res =
case action do
:supply ->
do_supply_demand(pad_ref, state)
:redemand ->
handle_redemand(pad_ref, state)
end
with {:ok, state} <- res do
handle_delayed_demands(state)
end
end
@doc """
Called when redemand action was returned.
* If element is currently supplying demand it means that after finishing supply_demand it will call
`handle_delayed_demands`.
* If element isn't supplying demand at the moment `handle_demand` is invoked right away, and it will
invoke handle_demand callback, which will probably return :redemand and :buffers actions and in
that way source will synchronously supply demand.
"""
@spec handle_redemand(Pad.ref_t(), State.t()) :: {:ok, State.t()}
def handle_redemand(pad_ref, %State{supplying_demand?: true} = state) do
state =
state
|> Map.update!(:delayed_demands, &MapSet.put(&1, {pad_ref, :redemand}))
{:ok, state}
end
def handle_redemand(pad_ref, state) do
DemandController.handle_demand(pad_ref, 0, state)
end
@doc """
If element is not supplying demand currently, this function supplies
demand right away by taking buffers from the InputBuffer of the given pad
and passing it to proper controllers.
If element is currently supplying demand it delays supplying demand until all
current processing is finished.
This is necessary due to the case when one requests a demand action while previous
demand is being supplied. This could lead to a situation where buffers are taken
from InputBuffer and passed to callbacks, while buffers being currently supplied
have not been processed yet, and therefore to changing order of buffers.
"""
@spec supply_demand(
Pad.ref_t(),
State.t()
) :: {:ok, State.t()} | {{:error, any()}, State.t()}
def supply_demand(pad_ref, %State{supplying_demand?: true} = state) do
state =
state
|> Map.update!(:delayed_demands, &MapSet.put(&1, {pad_ref, :supply}))
{:ok, state}
end
def supply_demand(pad_ref, state) do
with {:ok, state} <- do_supply_demand(pad_ref, state) do
handle_delayed_demands(state)
end
end
defp do_supply_demand(pad_ref, state) do
# marking is state that actual demand supply has been started (note changing back to false when finished)
state = %State{state | supplying_demand?: true}
pad_data = state |> PadModel.get_data!(pad_ref)
{{_buffer_status, data}, new_input_buf} =
InputBuffer.take_and_demand(
pad_data.input_buf,
pad_data.demand,
pad_data.pid,
pad_data.other_ref
)
state = PadModel.set_data!(state, pad_ref, :input_buf, new_input_buf)
with {:ok, state} <- handle_input_buf_output(pad_ref, data, state) do
{:ok, %State{state | supplying_demand?: false}}
else
{{:error, reason}, state} ->
Membrane.Logger.error("""
Error while supplying demand on pad #{inspect(pad_ref)} of size #{inspect(pad_data.demand)}
""")
{{:error, {:supply_demand, reason}}, %State{state | supplying_demand?: false}}
end
end
@spec handle_input_buf_output(
Pad.ref_t(),
[InputBuffer.output_value_t()],
State.t()
) :: State.stateful_try_t()
defp handle_input_buf_output(pad_ref, data, state) do
data
|> Bunch.Enum.try_reduce(state, fn v, state ->
do_handle_input_buf_output(pad_ref, v, state)
end)
end
@spec do_handle_input_buf_output(
Pad.ref_t(),
InputBuffer.output_value_t(),
State.t()
) :: State.stateful_try_t()
defp do_handle_input_buf_output(pad_ref, {:event, e}, state),
do: EventController.exec_handle_event(pad_ref, e, state)
defp do_handle_input_buf_output(pad_ref, {:caps, c}, state),
do: CapsController.exec_handle_caps(pad_ref, c, state)
defp do_handle_input_buf_output(
pad_ref,
{:buffers, buffers, size},
state
) do
state = PadModel.update_data!(state, pad_ref, :demand, &(&1 - size))
BufferController.exec_buffer_handler(pad_ref, buffers, state)
end
end
|
lib/membrane/core/element/demand_handler.ex
| 0.837753 | 0.522811 |
demand_handler.ex
|
starcoder
|
if Code.ensure_loaded?(Plug.Router) do
defmodule PromEx.Plugins.PlugRouter do
@moduledoc """
This plugin captures HTTP request metrics emitted by `Plug.Router` and `Plug.Telemetry`.
This plugin is heavily inspired on plugin `Plug.Cowboy`, and exposes the following metric group:
- `:plug_router_http_event_metrics`
## Plugin options
- `routers`: **Required** This is a list with the full module names of your Routers (e.g MyAppWeb.Router).
Metrics produced by routers not in this list will be discarded.
- `event_prefix`: **Required**, allows you to set the event prefix defined in your `Plug.Telemetry` configuration:
```
defmodule WebApp.Router do
use Plug.Router
plug PromEx.Plug, prom_ex_module: WebApp.PromEx, path: "/metrics"
plug Plug.Telemetry, event_prefix: [:webapp, :router]
...
end
```
With the above configuration, this plugin will subscribe to `[:webapp, :router, :stop]` telemetry events
produced by `Plug.Telemetry`. These events will be fired **before** the response is actually sent, therefore this
plugin will be able to export response body size metrics, since the `Plug.Conn` struct in the metadata of the
telemetry measurement still contains the response body.
However, `Plug.Telemetry` does not use `:telemetry.span/3`, which means the `:stop` event might not always be fired
(eg. if the process handling the request crashes). For this reason, this PromEx plugin also subscribes to
`[:plug, :router_dispatch, :exception]` telemetry events fired by `Plug.Router`, which are fired within a
`:telemetry.span/3` call.
Unfortunately, we cannot safely rely on `[:plug, :router_dispatch, :stop]` events produced by `Plug.Router` since
these are fired **after** the response is sent (as opposed to `Plug.Telemetry` `:stop` events). As a consequence,
the response body is no longer available in the `Plug.Conn` struct attached to the telemetry measurement metadata.
- `metric_prefix`: This option is OPTIONAL and is used to override the default metric prefix of
`[otp_app, :prom_ex, :plug_router]`. If this changes you will also want to set `plug_router_metric_prefix`
in your `dashboard_assigns` to the snakecase version of your prefix, the default
`plug_router_metric_prefix` is `{otp_app}_prom_ex_plug_router`.
- `ignore_routes`: This option is OPTIONAL and is used to ignore certain paths.
To use plugin in your application, add the following to your PromEx module:
```
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
alias PromEx.Plugins
@impl true
def plugins do
[
...
{Plugins.PlugRouter,
event_prefix: [:webapp, :router], metric_prefix: [:prom_ex, :router], routers: [WebApp.Router]}
]
end
@impl true
def dashboard_assigns do
[
datasource_id: "...",
plug_router_metric_prefix: "prom_ex_router"
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "plug_router.json"}
]
end
end
```
To ignore certain paths, pass a list of routes using the `:ignore_routes` option
```
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{PromEx.Plugins.PlugRouter,
event_prefix: [:webapp, :router], metric_prefix: [:prom_ex, :router], routers: [WebApp.Router],
ignore_routes: ["/metrics"]}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "plug_router.json"}
]
end
end
```
"""
use PromEx.Plugin
require Logger
alias Plug.Conn
@stop_event [:prom_ex, :router, :stop]
@default_event_prefix [:plug, :router_dispatch]
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :plug_router))
event_prefix = Keyword.fetch!(opts, :event_prefix)
set_up_telemetry_proxy(event_prefix)
[
http_events(metric_prefix, opts)
]
end
defp set_up_telemetry_proxy(event_prefix) do
:telemetry.attach(
{__MODULE__, :stop},
event_prefix ++ [:stop],
&__MODULE__.handle_proxy_router_event/4,
%{}
)
:telemetry.attach(
{__MODULE__, :exception},
@default_event_prefix ++ [:exception],
&__MODULE__.handle_proxy_router_event/4,
%{}
)
end
@doc false
def handle_proxy_router_event(_, measurements, meta, _) do
:telemetry.execute(@stop_event, measurements, meta)
end
defp http_events(metric_prefix, opts) do
http_metrics_tags = [:status, :method, :path]
routers =
opts
|> Keyword.fetch!(:routers)
|> MapSet.new()
ignore_routes =
opts
|> Keyword.get(:ignore_routes, [])
|> MapSet.new()
Event.build(
:plug_router_http_event_metrics,
[
# Capture request duration information
distribution(
metric_prefix ++ [:http, :request, :duration, :milliseconds],
event_name: @stop_event,
measurement: :duration,
description: "The time it takes for the application to process HTTP requests.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
drop: drop_ignored(ignore_routes, routers),
tag_values: &get_tags(&1),
tags: http_metrics_tags,
unit: {:native, :millisecond}
),
distribution(
metric_prefix ++ [:http, :response, :size, :bytes],
event_name: @stop_event,
description: "The size of the HTTP response payload.",
reporter_options: [
buckets: exponential!(1, 4, 12)
],
measurement: &resp_body_size/2,
drop: drop_ignored(ignore_routes, routers),
tag_values: &get_tags(&1),
tags: http_metrics_tags,
unit: :byte
),
counter(
metric_prefix ++ [:http, :requests, :total],
event_name: @stop_event,
description: "The number of requests that have been serviced.",
drop: drop_ignored(ignore_routes, routers),
tag_values: &get_tags(&1),
tags: http_metrics_tags
)
]
)
end
defp resp_body_size(_, metadata) do
case metadata.conn.resp_body do
nil -> 0
_ -> :erlang.iolist_size(metadata.conn.resp_body)
end
end
defp route(%Plug.Conn{private: %{plug_route: {route, _}}}) do
route
end
defp route(_conn) do
"Unknown"
end
defp route_or_path(conn) do
case Map.get(conn.private, :plug_route) do
{route, _} ->
route
nil ->
conn.request_path
end
end
defp get_tags(%{conn: conn = %Conn{}}) do
%{
status: conn.status || 500,
method: conn.method,
path: route(conn)
}
end
defp drop_ignored(ignored_routes, routers) do
fn
%{conn: conn = %Conn{}, router: router} ->
value = route_or_path(conn)
disallowed_router? = !Enum.member?(routers, router)
ignored_route? = MapSet.member?(ignored_routes, value)
disallowed_router? || ignored_route?
%{conn: conn = %Conn{}} ->
value = route_or_path(conn)
ignored_route? = MapSet.member?(ignored_routes, value)
ignored_route?
_meta ->
false
end
end
end
else
defmodule PromEx.Plugins.PlugRouter do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "Plug.Router")
end
end
end
|
lib/prom_ex/plugins/plug_router.ex
| 0.908056 | 0.69601 |
plug_router.ex
|
starcoder
|
defmodule Nerves.Firmware.Fwup do
use GenServer
require Logger
@moduledoc """
A Port interface to stream firmware to fwup
Caller process will receive the following messages:
* `{:fwup, :ok}`: firmware upgrade succesfully
* `{:fwup, {:progress, integer()}}`: firmware upgrade progress (percentage)
* `{:fwup, {:error, {integer(), binary()}}}`: firmware upgrade reports error
* `{:fwup, {:error, binary()}}`: firmware upgrade reports unexpected data
* `{:fwup, {:warn, {integer(), binary()}}}`: firmware upgrade reports warning
## Example
```
file = File.read!(/path/to/my.fw)
{:ok, pid} = Nerves.Firmware.Fwup.start_link([device: "/tmp/test.img", task: "complete"])
"/path/to/my.fw"
|> File.stream!([], 4096)
|> Enum.each(fn data ->
Nerves.Firmware.Fwup.stream_chunk(pid, data)
end)
Nerves.Firmware.Fwup.stop(pid)
```
"""
@timeout 120_000
def start_link(opts \\ []) do
opts = Keyword.put_new(opts, :callback, self())
GenServer.start_link(__MODULE__, opts)
end
def stop(pid) do
GenServer.stop(pid)
end
def stream_chunk(pid, chunk) do
Logger.debug "Sending Chunk: #{inspect chunk}"
GenServer.call(pid, {:stream_chunk, chunk}, @timeout)
end
def init(opts) do
Process.flag(:trap_exit, true)
device = opts[:device] || Application.get_env(:nerves_firmware, :device, "/dev/mmcblk0")
task = opts[:task] || "upgrade"
fwup = System.find_executable("fwup")
callback = opts[:callback]
port = Port.open({:spawn_executable, fwup},
[{:args, ["-aFU", "-d", device, "-t", task]},
{:packet, 4},
:use_stdio,
:binary,
:exit_status])
{:ok, %{
port: port,
byte_size: 0,
callback: callback
}}
end
def handle_call({:stream_chunk, chunk}, _from, s) do
Port.command(s.port, chunk)
{:reply, :ok, s}
end
def handle_info({_port, {:data, <<"OK", _code :: integer-16>>}}, s) do
send(s.callback, {:fwup, :ok})
{:noreply, s}
end
def handle_info({_port, {:data, <<"ER", code :: integer-16, message :: binary>>}}, s) do
send(s.callback, {:fwup, {:error, {code, message}}})
{:noreply, s}
end
def handle_info({_port, {:data, <<warning :: binary-2, code :: integer-16, message :: binary>>}}, s)
when warning in ["WA", "WN"] do
send(s.callback, {:fwup, {:warn, {code, message}}})
{:noreply, s}
end
def handle_info({_port, {:data, <<"PR", progress :: integer-16>>}}, s) do
send(s.callback, {:fwup, {:progress, progress}})
{:noreply, s}
end
def handle_info({_port, {:data, resp}}, s) do
send(s.callback, {:error, resp})
{:noreply, s}
end
def handle_info(_msg, s) do
{:noreply, s}
end
@doc """
Apply the firmware in `input` to the given `device`, executing `task`.
`args` is a list of arguments to be passed to fwup.
Not implemented using ports, because ports cant send EOF, so it's not possible
to stream firmware through a port. Porcelain doesn't work because `goon` isn't
easy to compile for the target in Nerves.
The simple file-based I/O allows using named pipes to solve the streaming issues.
"""
@spec apply(String.t, String.t, String.t, [binary]) :: :ok | {:error, term}
def apply(input, device, task, args \\ []) do
Logger.info "Firmware: applying #{task} to #{device}"
fwup_args =
["-aqU", "--no-eject", "-i", input, "-d", device, "-t", task] ++ args
case System.cmd("fwup", fwup_args) do
{_out, 0} ->
:ok
{error, _} ->
Logger.error error
{:error, :fwup_error}
end
end
end
|
lib/firmware/fwup.ex
| 0.821653 | 0.585101 |
fwup.ex
|
starcoder
|
defmodule Elasticfusion.Search.Lexer do
@moduledoc """
This module exposes functions for lexical scanning operations on a string.
State tracking is explicit: functions receieve a state
and return a tuple of {match, new_state}. Initial state is set up
through `initialize/1`.
All functions match the beginning of input (e.g. a matcher for "AND" matches
"AND something", but not "something AND something") and consume all
insignificant whitespace past the match.
"""
@tokens [and: ~w{AND ,}, or: ~w{OR |}, not: ~w{NOT -}]
@field_qualifiers ["less than", "more than", "earlier than", "later than"]
@safe_string_until ~w{AND OR , | " ( )}
@string_with_balanced_parentheses_until ~w{AND OR , |}
import String, only: [trim: 1, trim_leading: 1]
def initialize(input, keyword_field, queryable_fields, field_transform) do
%{
input: trim_leading(input),
keyword_field: keyword_field,
queryable_fields: queryable_fields,
field_transform: field_transform
}
end
for {key, token} <- @tokens do
def unquote(:"match_#{key}")(state),
do: match_pattern(state, unquote(token))
end
def match_field(%{queryable_fields: []} = state),
do: {nil, state}
def match_field(%{queryable_fields: fields} = state) do
case match_pattern(state, fields) do
{field, %{input: rest} = new_state} when is_binary(field) ->
case rest do
":" <> rest ->
{field, %{new_state | input: trim_leading(rest)}}
_ ->
{nil, state}
end
_ ->
{nil, state}
end
end
def match_field_qualifier(state),
do: match_pattern(state, @field_qualifiers)
@doc """
May contain words, numbers, spaces, dashes, and underscores.
"""
def safe_sting(state),
do: match_until(state, @safe_string_until)
def string_with_balanced_parantheses(%{} = state) do
case match_until(state, @string_with_balanced_parentheses_until) do
{nil, _state} = no_match ->
no_match
{match, %{input: rest}} ->
opening_parens =
length(String.split(match, "(")) - 1
balanced =
match
|> String.split(")")
|> Enum.slice(0..opening_parens)
|> Enum.join(")")
balanced_len = byte_size(balanced)
<<balanced::binary-size(balanced_len), cutoff::binary>> = match
{trim(balanced), %{state | input: trim_leading(cutoff) <> rest}}
end
end
def quoted_string(%{input: input} = state) do
case Regex.run(~r/"((?:\\.|[^"])*)"/, input, return: :index, capture: :all_but_first) do
[{1, len}] ->
<<quotemark::binary-size(1),
quoted::binary-size(len),
quotemark::binary-size(1),
rest::binary>> = input
quoted =
quoted
|> String.replace(~r/\\"/, "\"")
|> String.replace(~r/\\\\/, "\\")
{quoted, %{state | input: trim_leading(rest)}}
_ ->
{nil, state}
end
end
def left_parentheses(%{input: input} = state) do
case Regex.run(~r/^(\(\s*)+/, input, capture: :first) do
[match] ->
match_len = byte_size(match)
<<_::binary-size(match_len), rest::binary>> = input
count =
match
|> String.graphemes
|> Enum.count(&Kernel.==(&1, "("))
{count, %{state | input: rest}}
_ ->
{nil, state}
end
end
def right_parentheses(%{input: input} = state, count) do
case Regex.run(~r/^(\)\s*){#{count}}/, input, capture: :first) do
[match] ->
match_len = byte_size(match)
<<_::binary-size(match_len), rest::binary>> = input
{count, %{state | input: rest}}
_ ->
{nil, state}
end
end
# Internal
def match_pattern(%{input: input} = state, pattern) do
case :binary.match(input, pattern) do
{0, len} ->
<<match::binary-size(len), rest::binary>> = input
{match, %{state | input: trim_leading(rest)}}
_ ->
{nil, state}
end
end
defp match_until(%{input: input} = state, pattern) do
case :binary.match(input, pattern) do
{len, _} ->
<<matched::binary-size(len), rest::binary>> = input
{trim(matched), %{state | input: trim_leading(rest)}}
:nomatch ->
{input, %{state | input: ""}}
end
end
end
|
lib/elasticfusion/search/lexer.ex
| 0.802865 | 0.611498 |
lexer.ex
|
starcoder
|
defmodule Vnu do
@moduledoc "General purpose validation functions for HTML, CSS, and SVG documents."
alias Vnu.{Error, Result, Validator}
@doc ~S"""
Validates the given HTML document.
Returns `{:ok, %Vnu.Result{}}` if the validation process finished successfully, and `{:error, %Vnu.Error{}}` otherwise.
Note that the `{:ok, %Vnu.Result{}}` return value does not mean necessarily that the document is valid.
See `Vnu.valid?/1` and `Vnu.Message` for interpreting the result.
## Options
- `:server_url` - The URL of [the Checker server](https://github.com/validator/validator).
Defaults to `http://localhost:8888`.
- `:filter` - A module implementing the `Vnu.MessageFilter` behavior that will be used to exclude messages matching the filter from the result.
Defaults to `nil` (no excluded messages).
- `:http_client` - A module implementing the `Vnu.HTTPClient` behaviour that will be used to make the HTTP request to the server.
Defaults to `Vnu.HTTPClient.Hackney`.
## Examples
iex> Vnu.validate_html(~S(
...><!DOCTYPE html>
...><html>
...><head>
...> <meta charset="utf-8">
...></head>
...><body>
...></body>
...></html>
...>), server_url: System.get_env("VNU_SERVER_URL") || "http://localhost:8888")
{:ok, %Vnu.Result{messages: [
%Vnu.Message{
type: :error,
message: "Element “head” is missing a required instance of child element “title”.",
extract: "=\"utf-8\">\n</head>\n<body",
first_line: 6,
last_line: 6,
first_column: 1,
last_column: 7,
hilite_length: 7,
hilite_start: 10
},
%Vnu.Message{
type: :info,
sub_type: :warning,
message: "Consider adding a “lang” attribute to the “html” start tag to declare the language of this document.",
extract: "TYPE html>\n<html>\n<head",
first_line: 2,
last_line: 3,
first_column: 16,
last_column: 6,
hilite_length: 7,
hilite_start: 10,
}
]}}
iex> Vnu.validate_html("", server_url: "http://wrong-domain")
{:error, %Vnu.Error{
reason: :unexpected_server_response,
message: "Could not contact the server, got error: :nxdomain"
}}
"""
@spec validate_html(String.t(), Keyword.t()) :: {:ok, Result.t()} | {:error, Error.t()}
def validate_html(html, opts \\ []) when is_bitstring(html) and is_list(opts) do
Validator.validate(html, Keyword.merge(opts, format: :html))
end
@doc ~S"""
Same as `validate_svg/2` but returns `%Vnu.Result{}` or raises `%Vnu.Error{}`.
"""
@spec validate_html!(String.t(), Keyword.t()) :: Result.t() | no_return()
def validate_html!(html, opts \\ []) when is_bitstring(html) and is_list(opts) do
Validator.validate!(html, Keyword.merge(opts, format: :html))
end
@doc ~S"""
Validates the given CSS document.
See `validate_html/2` for the list of options and other details.
## Examples
iex> Vnu.validate_css(".button { display: banana; }", server_url: System.get_env("VNU_SERVER_URL") || "http://localhost:8888")
{:ok, %Vnu.Result{messages: [
%Vnu.Message{
type: :error,
message: "“display”: “banana” is not a “display” value.",
extract: ".button { display: banana; }\n",
first_line: 1,
last_line: 1,
first_column: 20,
last_column: 25,
hilite_length: 6,
hilite_start: 19,
}
]}}
iex> Vnu.validate_css("", server_url: "http://wrong-domain")
{:error, %Vnu.Error{
reason: :unexpected_server_response,
message: "Could not contact the server, got error: :nxdomain"
}}
"""
@spec validate_css(String.t(), Keyword.t()) :: {:ok, Result.t()} | {:error, Error.t()}
def validate_css(css, opts \\ []) when is_bitstring(css) and is_list(opts) do
Validator.validate(css, Keyword.merge(opts, format: :css))
end
@doc ~S"""
Same as `validate_css/2` but returns `%Vnu.Result{}` or raises `%Vnu.Error{}`.
"""
@spec validate_css!(String.t(), Keyword.t()) :: Result.t() | no_return()
def validate_css!(css, opts \\ []) when is_bitstring(css) and is_list(opts) do
Validator.validate!(css, Keyword.merge(opts, format: :css))
end
@doc ~S"""
Validates the given SVG document.
See `validate_html/2` for the list of options and other details.
## Examples
iex> Vnu.validate_svg(~S(
...><svg width="5cm" height="4cm" version="1.1" xmlns="http://www.w3.org/2000/svg">
...><desc>Rectangle</desc>
...><rect x="0.5cm" y="0.5cm" height="1cm"/>
...></svg>
...> ), server_url: System.get_env("VNU_SERVER_URL") || "http://localhost:8888")
{:ok, %Vnu.Result{messages: [
%Vnu.Message{
type: :info,
message: "Using the preset for SVG 1.1 + URL + HTML + MathML 3.0 based on the root namespace."
},
%Vnu.Message{
type: :error,
message: "SVG element “rect” is missing required attribute “width”.",
extract: "le</desc>\n<rect x=\"0.5cm\" y=\"0.5cm\" height=\"1cm\"/>\n</svg",
first_line: 4,
last_line: 4,
first_column: 1,
last_column: 40,
hilite_length: 40,
hilite_start: 10,
}
]}}
iex> Vnu.validate_svg("", server_url: "http://wrong-domain")
{:error, %Vnu.Error{
reason: :unexpected_server_response,
message: "Could not contact the server, got error: :nxdomain"
}}
"""
@spec validate_svg(String.t(), Keyword.t()) :: {:ok, Result.t()} | {:error, Error.t()}
def validate_svg(svg, opts \\ []) when is_bitstring(svg) and is_list(opts) do
Validator.validate(svg, Keyword.merge(opts, format: :svg))
end
@doc ~S"""
Same as `validate_svg/2` but returns `%Vnu.Result{}` or raises `%Vnu.Error{}`.
"""
@spec validate_svg!(String.t(), Keyword.t()) :: Result.t() | no_return()
def validate_svg!(svg, opts \\ []) when is_bitstring(svg) and is_list(opts) do
Validator.validate!(svg, Keyword.merge(opts, format: :svg))
end
@doc ~S"""
Checks if the results of `Vnu.validate_html/2`, `Vnu.validate_css/2`, or `Vnu.validate_svg/2` determined the document to be valid.
## Options
- `:server_url` - The URL of [the Checker server](https://github.com/validator/validator). Defaults to `http://localhost:8888`.
- `:fail_on_warnings` - Messages of type `:info` and subtype `:warning` will be treated as if they were validation errors.
Their presence will mean the document is invalid. Defaults to `false`.
- `:http_client` - A module implementing the `Vnu.HTTPClient` behaviour that will be used to make the HTTP request to the server.
Defaults to `Vnu.HTTPClient.Hackney`.
## Examples
iex> {:ok, result} = Vnu.validate_html("", server_url: System.get_env("VNU_SERVER_URL") || "http://localhost:8888")
iex> Vnu.valid?(result)
false
iex> {:ok, result} = Vnu.validate_html(~S(
...><!DOCTYPE html>
...><html>
...><head>
...> <meta charset="utf-8">
...> <title>Hello World</title>
...></head>
...><body>
...></body>
...></html>
...>), server_url: System.get_env("VNU_SERVER_URL") || "http://localhost:8888")
iex> [message] = result.messages
iex> message.message
"Consider adding a “lang” attribute to the “html” start tag to declare the language of this document."
iex> message.sub_type
:warning
iex> Vnu.valid?(result)
true
iex> Vnu.valid?(result, fail_on_warnings: true)
false
"""
@spec valid?(Result.t()) :: true | false
def valid?(%Result{} = result, opts \\ []) do
Validator.valid?(result, opts)
end
end
|
lib/vnu.ex
| 0.89957 | 0.485722 |
vnu.ex
|
starcoder
|
defmodule Circuits.GPIO.Chip do
@moduledoc """
Control GPIOs using the GPIO chip interface
With the character device driver for GPIOs there three concepts to learn.
First, the API is made up of chips and lines that are grouped together for
that chip. A chip is more of a grouping identifier than anything physical
property about the board.
Secondly, the API requires us to request lines from a GPIO chip. The reason
for this is the kernel can provide control over who "owns" that line and
prevent multiple programs from trying to control the same GPIO pin.
Lastly, you can listen for events on a line. These events report if the line
is high or low.
Generally speaking the character device driver allows more fine grain control
and more reliability than the `sysfs` API.
"""
alias Circuits.GPIO.Chip.{Events, LineHandle, LineInfo, Nif}
@type t() :: %__MODULE__{
name: String.t(),
label: String.t(),
number_of_lines: non_neg_integer(),
reference: reference()
}
@typedoc """
The offset of the pin
An offset is the pin number provided. Normally these are labeled `GPIO N` or
`GPIO_N` where `N` is the pin number. For example, if you wanted to use to
use `GPIO 17` on a Raspberry PI the offset value would be `17`.
More resources:
Raspberry PI: https://pinout.xyz/
Beaglebone: https://beagleboard.org/Support/bone101
"""
@type offset() :: non_neg_integer()
@typedoc """
The value of the offset
This is either 0 for low or off, or 1 for high or on.
"""
@type offset_value() :: 0 | 1
@typedoc """
The direction of the line
With the character device you drive a line with configured offsets. These
offsets all share a direction, either `:output` or `:input`, which is called
the line direction.
The `:output` direction means you control the GPIOs by setting the value of
the GPIOs to 1 or 0. See `Circuits.GPIO.Chip.set_value/2` for more
information.
The `:input` direction means you can only read the current value of the GPIOs
on the line. See `Circuits.GPIO.Chip.read_value/1` for more information.
"""
@type line_direction() :: :input | :output
defstruct name: nil, label: nil, number_of_lines: 0, reference: nil
@doc """
Getting information about a line
"""
@spec get_line_info(t(), offset()) :: {:ok, LineInfo.t()} | {:error, atom()}
def get_line_info(%__MODULE__{} = chip, offset) do
case Nif.get_line_info_nif(chip.reference, offset) do
{:ok, name, consumer, direction, active_low} ->
{:ok,
%LineInfo{
offset: offset,
name: to_string(name),
consumer: to_string(consumer),
direction: direction_to_atom(direction),
active_low: active_low_int_to_bool(active_low)
}}
error ->
error
end
end
@doc """
Listen to line events on the line offset
```elixir
Circuits.GPIO.Chip.listen_event(mygpio_chip, 24)
# cause the offset to change value
flush
{:circuits_cdev, 24, timestamp, new_value}
```
The timestamp will be in nanoseconds so as you do time calculations and
conversions be sure to take that into account.
The `new_value` will be the value the offset value changed to either `1` or
`0`.
"""
@spec listen_event(t() | String.t(), offset()) :: :ok
def listen_event(%__MODULE__{} = chip, offset) do
Events.listen_event(chip, offset)
end
def listen_event(chip_name, offset) when is_binary(chip_name) do
case open(chip_name) do
{:ok, chip} -> listen_event(chip, offset)
end
end
@doc """
Open a GPIO Chip
```elixir
{:ok, chip} = Circuits.GPIO.Chip.open(gpiochip_device)
```
"""
@spec open(String.t()) :: {:ok, t()}
def open(chip_name) do
chip_name = Path.join("/dev", chip_name)
{:ok, ref} = Nif.chip_open_nif(to_charlist(chip_name))
{:ok, name, label, number_of_lines} = Nif.get_chip_info_nif(ref)
{:ok,
%__MODULE__{
name: to_string(name),
label: to_string(label),
number_of_lines: number_of_lines,
reference: ref
}}
end
@doc """
Read value from a line handle
This is useful when you have a line handle that contains only one GPIO
offset.
If you want to read multiple GPIOs at once see
`Circuits.GPIO.Chip.read_values/1`.
```elixir
{:ok, line_handle} = Circuits.GPIO.Chip.request_line("gpiochip0", 17)
{:ok, 0} = Circuits.GPIO.Chip.read_value(line_handle)
```
"""
@spec read_value(LineHandle.t()) :: {:ok, offset_value()} | {:error, atom()}
def read_value(line_handle) do
case read_values(line_handle) do
{:ok, [value]} ->
{:ok, value}
error ->
error
end
end
@doc """
Read values for a line handle
This is useful when you a line handle that contains multiple GPIO offsets.
```elixir
{:ok, line_handle} = Circuits.GPIO.Chip.request_lines("gpiochip0", [17, 22, 23, 24])
{:ok, [0, 0, 0, 0]} = Circuits.GPIO.Chip.read_values(line_handle)
```
Note that the values in the list match the index order of how the offsets were
requested.
Note that the order of the values returned return the order that the offsets
were requested.
"""
@spec read_values(LineHandle.t()) :: {:ok, [offset_value()]} | {:error, atom()}
def read_values(line_handle) do
%LineHandle{handle: handle} = line_handle
Nif.read_values_nif(handle)
end
@doc """
Request a line handle for a single GPIO offset
```elixir
{:ok, line_handle} = Circuits.GPIO.Chip.request_line(my_gpio_chip, 17, :output)
```
See `Circuits.GPIO.Chip.request_lines/3` and `Circuits.GPIO.LineHandle` for
more details about line handles.
"""
@spec request_line(t() | String.t(), offset(), line_direction()) :: {:ok, LineHandle.t()}
def request_line(%__MODULE__{} = chip, offset, direction) do
request_lines(chip, [offset], direction)
end
def request_line(chip_name, offset, direction) when is_binary(chip_name) do
case open(chip_name) do
{:ok, chip} ->
request_lines(chip, [offset], direction)
end
end
@doc """
Request a line handle for multiple GPIO offsets
```elixir
{:ok, line_handle} = Circuits.GPIO.Chip.request_lines(my_gpio_chip, [17, 24], :output)
```
For the GPIO character device driver you drive GPIOs by requesting for a line
handle what contains one or more GPIO offsets. The line handle is mechanism
by which you can read and set the values of the GPIO(s). The line handle is
attached to the calling process and kernel will not allow others to control
the GPIO(s) that are part of that the line handle. Moreover, one the process
that requested the line handle goes away the kernel will be able to
automatically free the system resources that were tied to that line handle.
"""
@spec request_lines(t() | String.t(), [offset()], line_direction()) :: {:ok, LineHandle.t()}
def request_lines(%__MODULE__{} = chip, offsets, direction) do
{:ok, handle} = Nif.request_lines_nif(chip.reference, offsets, direction_from_atom(direction))
{:ok, %LineHandle{chip: chip, handle: handle}}
end
def request_lines(chip_name, offsets, direction) when is_binary(chip_name) do
case open(chip_name) do
{:ok, chip} ->
request_lines(chip, offsets, direction)
end
end
@doc """
Set the value of the GPIO
```elixir
{:ok, line_handle} = Circuits.GPIO.Chip.request_lines(my_gpio_chip, 17)
{:ok, 0} = Circuits.GPIO.Chip.read_value(line_handle)
:ok = Circuits.GPIO.Chip.set_value(line_handle, 1)
{:ok, 1} = Circuits.GPIO.Chip.read_value(line_handle)
```
"""
@spec set_value(LineHandle.t(), offset_value()) :: :ok | {:error, atom()}
def set_value(handle, value) do
set_values(handle, [value])
end
@doc """
Set values of the GPIOs
```elixir
{:ok, line_handle} = Circuits.GPIO.Chip.request_lines(my_gpio_chip, [17, 24, 22])
{:ok, [0, 0, 0]} = Circuits.GPIO.Chip.read_value(line_handle)
:ok = Circuits.GPIO.Chip.set_value(line_handle, [1, 0, 1])
{:ok, [1, 0, 1]} = Circuits.GPIO.Chip.read_value(line_handle)
```
Note that the order of the values that were sent matches the order by which
the GPIO offsets where requested. In the example above offset 17 was set to
1, offset 24 was stayed at 0, offset 22 was set to 1.
"""
@spec set_values(LineHandle.t(), [offset_value()]) :: :ok | {:error, atom()}
def set_values(line_handle, values) do
%LineHandle{handle: handle} = line_handle
Nif.set_values_nif(handle, values)
end
defp direction_from_atom(:input), do: 0
defp direction_from_atom(:output), do: 1
defp direction_to_atom(0), do: :input
defp direction_to_atom(1), do: :output
defp active_low_int_to_bool(0), do: false
defp active_low_int_to_bool(1), do: true
end
|
lib/chip.ex
| 0.888686 | 0.923558 |
chip.ex
|
starcoder
|
defmodule Thrift.AST do
@moduledoc """
Thrift Abstract Syntax Tree
Parsed Thrift files are repesented as a tree of these structures, starting
with a `Thrift.AST.Schema` node.
"""
import Thrift.Parser.Conversions
alias Thrift.Parser.{Literals, Types}
defmodule Namespace do
@moduledoc false
@type t :: %Namespace{line: Parser.line(), name: atom, path: String.t()}
@enforce_keys [:name, :path]
defstruct line: nil, name: nil, path: nil
@spec new(charlist, charlist) :: t
def new(name, path) do
%Namespace{name: atomify(name), path: List.to_string(path)}
end
end
defmodule Include do
@moduledoc false
@type t :: %Include{line: Parser.line(), path: String.t()}
@enforce_keys [:path]
defstruct line: nil, path: nil
@spec new(charlist) :: t
def new(path) do
%Include{path: List.to_string(path)}
end
end
defmodule Constant do
@moduledoc false
@type t :: %Constant{line: Parser.line(), name: atom, value: Literals.t(), type: Types.t()}
@enforce_keys [:name, :value, :type]
defstruct line: nil, name: nil, value: nil, type: nil
@spec new(charlist, Literals.t(), Types.t()) :: t
def new(name, val, type) do
%Constant{name: atomify(name), value: cast(type, val), type: type}
end
end
defmodule TEnum do
@moduledoc false
@type enum_value :: bitstring | integer
@type t :: %TEnum{
line: Parser.line(),
annotations: Parser.annotations(),
name: atom,
values: [{atom, enum_value}]
}
@enforce_keys [:name, :values]
defstruct line: nil, annotations: %{}, name: nil, values: []
@spec new(charlist, %{charlist => enum_value}) :: t
def new(name, values) do
{_, values} =
Enum.reduce(values, {0, []}, fn
{name, value}, {_index, acc} ->
{value + 1, [{atomify(name), value} | acc]}
name, {index, acc} ->
{index + 1, [{atomify(name), index} | acc]}
end)
%TEnum{name: atomify(name), values: Enum.reverse(values)}
end
end
defmodule Field do
@moduledoc false
@type printable :: String.t() | atom
@type t :: %Field{
line: Parser.line(),
annotations: Parser.annotations(),
id: integer,
name: atom,
type: Types.t(),
required: boolean,
default: Literals.t()
}
@enforce_keys [:id, :name, :type]
defstruct line: nil,
annotations: %{},
id: nil,
name: nil,
type: nil,
required: :default,
default: nil
@spec new(integer, boolean, Types.t(), charlist, Literals.t()) :: t
def new(id, required, type, name, default) do
%Field{
id: id,
type: type,
name: atomic_snake(name),
required: required,
default: cast(type, default)
}
end
@spec build_field_list(printable, [Field.t()]) :: [Field.t()]
def build_field_list(parent_name, fields) do
fields
|> assign_missing_ids
|> validate_ids(parent_name)
end
# Fields without explicit indices are automatically assigned starting from
# -1 and working their way down. Implicit field indices were deprecated by
# Apache Thrift, but we support them for greater compatibility.
defp assign_missing_ids(fields, auto_index \\ -1)
defp assign_missing_ids([%Field{id: nil} = field | fields], auto_index) do
[%Field{field | id: auto_index} | assign_missing_ids(fields, auto_index - 1)]
end
defp assign_missing_ids([field | fields], auto_index) do
[field | assign_missing_ids(fields, auto_index)]
end
defp assign_missing_ids([], _), do: []
defp validate_ids(fields, name) do
dupes =
fields
|> Enum.group_by(& &1.id)
|> Enum.filter(fn {_, v} -> length(v) > 1 end)
unless Enum.empty?(dupes) do
{id, dupe_fields} = List.first(dupes)
names =
dupe_fields
|> Enum.map(&"#{name}.#{&1.name}")
|> Enum.sort()
|> Enum.join(", ")
raise "Error: #{names} share field number #{id}."
end
fields
end
end
defmodule Exception do
@moduledoc false
@type t :: %Exception{
line: Parser.line(),
annotations: Parser.annotations(),
name: atom,
fields: [Field.t()]
}
@enforce_keys [:name, :fields]
defstruct line: nil, annotations: %{}, fields: %{}, name: nil
@spec new(charlist, [Field.t(), ...]) :: t
def new(name, fields) do
ex_name = atomify(name)
updated_fields = Field.build_field_list(ex_name, fields)
%Exception{name: ex_name, fields: updated_fields}
end
end
defmodule Struct do
@moduledoc false
@type t :: %Struct{
line: Parser.line(),
annotations: Parser.annotations(),
name: atom,
fields: [Field.t()]
}
@enforce_keys [:name, :fields]
defstruct line: nil, annotations: %{}, name: nil, fields: %{}
@spec new(charlist, [Field.t(), ...]) :: t
def new(name, fields) do
struct_name = atomify(name)
fields = Field.build_field_list(struct_name, fields)
%Struct{name: struct_name, fields: fields}
end
end
defmodule Union do
@moduledoc false
@type t :: %Union{
line: Parser.line(),
annotations: Parser.annotations(),
name: atom,
fields: [Field.t()]
}
@enforce_keys [:name, :fields]
defstruct line: nil, annotations: %{}, name: nil, fields: %{}
@spec new(charlist, [Field.t(), ...]) :: t
def new(name, fields) do
name = atomify(name)
fields =
name
|> Field.build_field_list(fields)
|> Enum.map(fn %Field{} = field ->
# According to Thrift docs, unions have implicitly optional
# fields. See https://thrift.apache.org/docs/idl#union
%Field{field | required: false}
end)
%Union{name: name, fields: fields}
end
def validator(%Union{}, var_name) do
union_var = Macro.var(var_name, nil)
quote do
set_fields =
unquote(union_var)
|> Map.delete(:__struct__)
|> Enum.reject(fn {_, val} -> is_nil(val) end)
case set_fields do
[] ->
:ok
[_] ->
:ok
set_fields ->
field_names = Enum.map(set_fields, &elem(&1, 0))
raise %Thrift.Union.TooManyFieldsSetError{
message: "Thrift union has more than one field set",
set_fields: field_names
}
end
end
end
def validator(_, var_name) do
non_union_var = Macro.var(var_name, nil)
quote do
_ = unquote(non_union_var)
end
end
end
defmodule TypeRef do
@moduledoc false
@type t :: %TypeRef{line: Parser.line(), referenced_type: atom}
@enforce_keys [:referenced_type]
defstruct line: nil, referenced_type: nil
@spec new(charlist) :: t
def new(referenced_type) do
%TypeRef{referenced_type: atomify(referenced_type)}
end
end
defmodule ValueRef do
@moduledoc false
@type t :: %ValueRef{line: Parser.line(), referenced_value: atom}
@enforce_keys [:referenced_value]
defstruct line: nil, referenced_value: nil
@spec new(charlist) :: t
def new(referenced_value) do
%ValueRef{referenced_value: atomify(referenced_value)}
end
end
defmodule Function do
@moduledoc false
@type return :: :void | Types.t()
@type t :: %Function{
line: Parser.line(),
annotations: Parser.annotations(),
oneway: boolean,
return_type: return,
name: atom,
params: [Field.t()],
exceptions: [Exception.t()]
}
@enforce_keys [:name]
defstruct line: nil,
annotations: %{},
oneway: false,
return_type: :void,
name: nil,
params: [],
exceptions: []
@spec new(boolean, Types.t(), charlist, [Field.t(), ...], [Exception.t(), ...]) :: t
def new(oneway, return_type, name, params, exceptions) do
name = atomify(name)
params = Field.build_field_list(name, params)
%Function{
oneway: oneway,
return_type: return_type,
name: name,
params: params,
exceptions: exceptions
}
end
end
defmodule Service do
@moduledoc false
@type t :: %Service{
line: Parser.line(),
annotations: Parser.annotations(),
name: atom,
extends: atom,
functions: %{atom => Function.t()}
}
@enforce_keys [:name, :functions]
defstruct line: nil, annotations: %{}, name: nil, extends: nil, functions: %{}
@spec new(charlist, [Function.t(), ...], charlist) :: t
def new(name, functions, extends) do
fn_map = Enum.into(functions, %{}, fn f -> {f.name, f} end)
%Service{name: atomify(name), extends: atomify(extends), functions: fn_map}
end
end
defmodule Schema do
@moduledoc """
A Thrift schema.
A program represents a single parsed file in Thrift.
Many programs can be compiled together to build a Thrift service.
This is the root datastructure that the parser emits after running.
"""
@type header :: Include.t() | Namespace.t()
@type typedef :: {:typedef, Types.t(), atom}
@type definition ::
Service.t()
| TEnum.t()
| Exception.t()
| Union.t()
| Struct.t()
| Constant.t()
| typedef
@type t :: %Schema{
path: Path.t() | nil,
module: String.t(),
namespaces: %{String.t() => Namespace.t()},
structs: %{String.t() => Struct.t()},
services: %{String.t() => Service.t()},
enums: %{String.t() => TEnum.t()},
unions: %{String.t() => Union.t()},
includes: [Include.t()],
constants: %{String.t() => Literals.t()},
exceptions: %{String.t() => Exception.t()},
typedefs: %{String.t() => Types.t()},
file_group: FileGroup.t()
}
defstruct path: nil,
module: nil,
namespaces: %{},
structs: %{},
services: %{},
enums: %{},
unions: %{},
includes: [],
constants: %{},
exceptions: %{},
typedefs: %{},
file_group: nil
@doc """
Constructs a schema from header and definition lists.
"""
@spec new(Path.t() | nil, [header], [definition]) :: t
def new(path, headers, defs) do
schema = %Schema{path: path, module: module_name(path)}
(headers ++ defs)
|> Enum.reverse()
|> Enum.reduce(schema, &merge(&2, &1))
end
defp module_name(nil), do: nil
defp module_name(path) when is_bitstring(path) do
path
|> Path.basename()
|> Path.rootname()
|> String.to_atom()
end
@spec merge(t, header | definition) :: t
defp merge(schema, %Include{} = inc) do
%Schema{schema | includes: [inc | schema.includes]}
end
defp merge(schema, %Namespace{} = ns) do
%Schema{schema | namespaces: Map.put(schema.namespaces, ns.name, ns)}
end
defp merge(schema, %Constant{} = const) do
%Schema{schema | constants: put_new_strict(schema.constants, const.name, const)}
end
defp merge(schema, %TEnum{} = enum) do
%Schema{
schema
| enums:
put_new_strict(schema.enums, enum.name, add_namespace_to_name(schema.module, enum))
}
end
defp merge(schema, %Exception{} = exc) do
fixed_fields =
schema.module
|> add_namespace_to_name(exc)
|> add_namespace_to_fields()
%Schema{schema | exceptions: put_new_strict(schema.exceptions, exc.name, fixed_fields)}
end
defp merge(schema, %Struct{} = s) do
fixed_fields =
schema.module
|> add_namespace_to_name(s)
|> add_namespace_to_fields()
%Schema{schema | structs: put_new_strict(schema.structs, s.name, fixed_fields)}
end
defp merge(schema, %Union{} = union) do
fixed_fields =
schema.module
|> add_namespace_to_name(union)
|> add_namespace_to_fields()
%Schema{schema | unions: put_new_strict(schema.unions, union.name, fixed_fields)}
end
defp merge(schema, %Service{} = service) do
%Schema{
schema
| services:
put_new_strict(
schema.services,
service.name,
add_namespace_to_name(schema.module, service)
)
}
end
defp merge(schema, {:typedef, actual_type, type_alias}) do
%Schema{
schema
| typedefs:
put_new_strict(
schema.typedefs,
atomify(type_alias),
add_namespace_to_type(schema.module, actual_type)
)
}
end
defp add_namespace_to_name(nil, model) do
model
end
defp add_namespace_to_name(module, %{name: name} = model) do
%{model | name: add_namespace_to_type(module, name)}
end
defp add_namespace_to_type(module, %TypeRef{referenced_type: t} = type) do
%TypeRef{type | referenced_type: add_namespace_to_type(module, t)}
end
defp add_namespace_to_type(module, {:set, elem_type}) do
{:set, add_namespace_to_type(module, elem_type)}
end
defp add_namespace_to_type(module, {:list, elem_type}) do
{:list, add_namespace_to_type(module, elem_type)}
end
defp add_namespace_to_type(module, {:map, {key_type, val_type}}) do
{:map, {add_namespace_to_type(module, key_type), add_namespace_to_type(module, val_type)}}
end
for type <- Thrift.primitive_names() do
defp add_namespace_to_type(_, unquote(type)) do
unquote(type)
end
end
defp add_namespace_to_type(module, type_name) when is_atom(type_name) do
split_type_name =
type_name
|> Atom.to_string()
|> String.split(".")
case split_type_name do
[^module | _rest] ->
# this case accounts for types that already have the current module in them
type_name
_ ->
:"#{module}.#{type_name}"
end
end
defp add_namespace_to_fields(%{fields: fields} = model) do
%{model | fields: Enum.map(fields, &add_namespace_to_field/1)}
end
defp add_namespace_to_field(%Field{default: nil} = field) do
field
end
defp add_namespace_to_field(%Field{default: default, type: type} = field) do
%Field{field | default: add_namespace_to_defaults(type, default)}
end
defp add_namespace_to_defaults({:list, elem_type}, defaults) when is_list(defaults) do
for elem <- defaults do
add_namespace_to_defaults(elem_type, elem)
end
end
defp add_namespace_to_defaults({:set, elem_type}, %MapSet{} = defaults) do
for elem <- defaults, into: MapSet.new() do
add_namespace_to_defaults(elem_type, elem)
end
end
defp add_namespace_to_defaults({:map, {_, _}}, %ValueRef{} = val) do
val
end
defp add_namespace_to_defaults({:map, {key_type, val_type}}, defaults)
when is_map(defaults) do
for {key, val} <- defaults, into: %{} do
{add_namespace_to_defaults(key_type, key), add_namespace_to_defaults(val_type, val)}
end
end
defp add_namespace_to_defaults(
%TypeRef{referenced_type: referenced_type},
%ValueRef{referenced_value: referenced_value} = val_ref
) do
%ValueRef{val_ref | referenced_value: namespaced_module(referenced_type, referenced_value)}
end
defp add_namespace_to_defaults(%TypeRef{} = type, defaults) when is_list(defaults) do
for default_value <- defaults do
add_namespace_to_defaults(type, default_value)
end
end
defp add_namespace_to_defaults(ref, {key_type, val_type}) do
# this is used for a remote typedef that defines a map
{add_namespace_to_defaults(ref, key_type), add_namespace_to_defaults(ref, val_type)}
end
defp add_namespace_to_defaults(_t, val) do
val
end
defp namespaced_module(type, value) do
with string_val <- Atom.to_string(type),
[module, _value | _rest] <- String.split(string_val, ".") do
add_namespace_to_type(module, value)
else
_ ->
value
end
end
defp put_new_strict(map, key, value) do
case map[key] do
nil ->
Map.put(map, key, value)
_ ->
raise "Name collision: #{key}"
end
end
end
@type all ::
Namespace.t()
| Include.t()
| Constant.t()
| TEnum.t()
| Field.t()
| Exception.t()
| Struct.t()
| Union.t()
| Function.t()
| Service.t()
| Schema.t()
end
|
lib/thrift/ast.ex
| 0.840062 | 0.424949 |
ast.ex
|
starcoder
|
defmodule Mix.Tasks.PhxUp.Gen.ControllerViewTemplate do
@shortdoc "Generate a controller, view and template (for each action) and tests"
@moduledoc """
Generate a controller, view, templates (for each action) and tests.
mix phx_up.gen.controller_view_template Post create update index find
or
mix phx_up.gen.cvt Post create update index find
The first argument is the controller module followed by the actions.
Actions can be empty.
"""
use Mix.Task
@doc false
def run(io_puts \\ true, args) do
if io_puts == true do
IO.puts("""
_ __ __ _____
___| |_ _ _ | | | _ |
| . | |_'_| | | | __|
| _|_|_|_,_|_____|_____|__|
|_| |_____| .gen.controller_view_template
""")
end
IO.puts("... Preparing controller")
context = get_context(args)
actions = get_actions(args)
create_controller(context, actions)
create_controller_test(context, actions)
IO.puts("")
Mix.Tasks.PhxUp.Gen.View.run(false, args)
for action <- actions do
Mix.Tasks.PhxUp.Gen.Template.run(false, ["#{context[:path]}/#{action}"])
end
IO.puts("")
end
defp create_controller(context, actions) do
copy_template(
"controller.eex",
"lib/#{context[:web_path]}/controllers/#{context[:path]}_controller.ex",
context: context,
actions: actions
)
end
defp create_controller_test(context, actions) do
copy_template(
"controller_test.eex",
"test/#{context[:web_path]}/controllers/#{context[:path]}_controller_test.exs",
context: context,
actions: actions
)
end
defp copy_template(name, final_path, opts) do
Path.join(:code.priv_dir(:phoenix_up), "templates/phx_up.gen.controller_view_template/#{name}")
|> Mix.Generator.copy_template(final_path, opts)
end
defp get_actions([_|actions]), do: actions
defp get_context([module|_]), do: PhoenixUp.inflect(module)
defp get_context([]), do: raise(RuntimeError, "Invalid module name")
end
|
lib/mix/tasks/phx_up.gen.controller_view_template.ex
| 0.716318 | 0.415907 |
phx_up.gen.controller_view_template.ex
|
starcoder
|
defmodule ThumborClient.UrlBuilder do
@moduledoc """
Module to build url with params
"""
@doc """
Get full path image passing options.
"""
def full_path(options) do
[]
|> trim(options)
|> crop(options)
|> meta(options)
|> fit_in(options)
|> sizes(options)
|> align(options, :halign)
|> align(options, :valign)
|> smart(options)
|> filters(options)
|> image(options)
|> Enum.join("/")
end
@doc """
Removes surrounding space in image using top-left pixel color unless specified otherwise
Unless specified trim assumes the top-left pixel color and no tolerance (more on tolerance below).
To use it, just add a trim: true.
If you need to specify the orientation from where to get the pixel color, just use trim: "top-left" for the top-left pixel color, for example
Trim also supports color tolerance. The euclidian distance between the colors of the reference pixel and the surrounding pixels is used. If the distance is within the tolerance they'll get trimmed. For a RGB image the tolerance would be within the range 0-442. The tolerance can be specified like this: trim: "top-left:50"
## Examples
iex> ThumborClient.UrlBuilder.trim(["300x200"], %{trim: "bottom-left"})
["300x200", "trim:bottom-left"]
iex> ThumborClient.UrlBuilder.trim(["300x200"], %{trim: true})
["300x200", "trim"]
"""
def trim(path, options) do
case options[:trim] do
nil -> path
true -> path ++ ["trim"]
trim -> path ++ ["trim:#{trim}"]
end
end
@doc """
Instead of get the image, get all meta data informations in image returning using json
## Examples
iex> ThumborClient.UrlBuilder.meta(["300x200"], %{meta: true})
["300x200", "meta"]
"""
def meta(path, options) do
case options[:meta] do
nil -> path
false -> path
_ -> path ++ ["meta"]
end
end
@doc """
The fit argument specifies that the image should not be auto-cropped and auto-resized to be EXACTLY the specified size,
and should be fit in an imaginary box of "E" width and "F" height, instead.
Possible params: [:fit_in, :adaptive_fit_in, :full_fit_in, :adaptive_full_fit_in]
Param in options: :fit
## Example
iex> ThumborClient.UrlBuilder.fit_in(["200x200"], %{fit: :full_fit_in})
["200x200", "full_fit_in"]
"""
def fit_in(path, options) do
options_fit = [:fit_in, :adaptive_fit_in, :full_fit_in, :adaptive_full_fit_in]
fit = Enum.find(options_fit, fn(key) ->
options[:fit] == key
end)
if fit do path ++ [Atom.to_string(fit)] else path end
end
@doc """
Add sizes to url image
## Examples
iex> ThumborClient.UrlBuilder.sizes([], %{width: 300, height: 200})
["300x200"]
"""
def sizes(path, options) do
[width, height] = sizes_transform(options)
path ++ ["#{width}x#{height}"]
end
def sizes_transform(options) do
[
size_transform(:width, options),
size_transform(:height, options)
]
end
def size_transform(size_type, options) do
fill_size_value(options[size_type])
|> flip(options[if size_type == :width do :flip else :flop end])
end
def fill_size_value(size) do
case size do
nil -> 0
_size -> size
end
end
@doc """
Function to flip values to int
If second parameter is true: value * -1
iex> ThumborClient.UrlBuilder.flip(10, true)
-10
iex> ThumborClient.UrlBuilder.flip(10, false)
10
"""
def flip(size, flip \\ false) do
size * (if flip == true do -1 else 1 end)
end
@doc """
Manually specify crop window starting from top left coordinates
top left x, top left y : bottom right x, bottom right y
## Examples
iex> ThumborClient.UrlBuilder.crop([], %{crop: [11, 12, 13, 14]})
["11x12:13x14"]
"""
def crop(path, options) do
case options[:crop] do
nil -> path
[] -> path
crop -> path ++ ["#{Enum.at(crop, 0)}x#{Enum.at(crop, 1)}:#{Enum.at(crop, 2)}x#{Enum.at(crop, 3)}"]
end
end
@doc """
Use orientation to crop image.
Parameters:
path: List with anothers attributes to build url
options: Could be: :top, :left, :center, :right, :bottom
orientation: Could be: :halign or :valign
## Examples
iex> ThumborClient.UrlBuilder.align(["300x200"], %{valign: :top}, :valign)
["300x200", "top"]
"""
def align(path, options, orientation) do
case options[orientation] do
nil -> path
false -> path
:center -> path
position -> path ++ [Atom.to_string(position)]
end
end
@doc """
Add url parameter smart to crop better images.
Thumbor has algorithms to crop using facial recognition process.
## Examples
iex> ThumborClient.UrlBuilder.smart(["300x200"], %{smart: true})
["300x200", "smart"]
"""
def smart(path, options) do
case options[:smart] do
nil -> path
false -> path
_ -> path ++ ["smart"]
end
end
@doc """
Adding filters to image. The option must be a List of strings.
You can see all filters in https://github.com/thumbor/thumbor/wiki/Filters
## Examples
iex> ThumborClient.UrlBuilder.filters(["300x300"], %{filters: ["rotate(30)", "brightness(40)"]})
["300x300", "filters:rotate(30):brightness(40)"]
"""
def filters(path, options) do
case options[:filters] do
nil -> path
[] -> path
_filters -> path ++ [Enum.join(["filters"] ++ options[:filters], ":")]
end
end
@doc """
Add url image when receive options[:image]
## Examples
iex> ThumborClient.UrlBuilder.image([], %{image: "path/to/image.jpg"})
["path/to/image.jpg"]
"""
def image(path, options) do
if options[:image] do
path ++ [options[:image]]
else
raise "The option 'image' is required"
end
end
end
|
lib/thumbor_client_url_builder.ex
| 0.888931 | 0.401189 |
thumbor_client_url_builder.ex
|
starcoder
|
defmodule NeuralNet.Constructor do
@moduledoc "Contains the nuts and bolts for constructing the network in the process dictionary. These functions are use by the NeuralNet.Helpers module."
defp key, do: :network_defs
def put_neural_net(net), do: Process.put(key, net)
def get_neural_net(), do: Process.get(key, %NeuralNet{})
def update!(fun), do: put_neural_net(fun.(get_neural_net))
def update!(key, fun), do: update!(fn net -> Map.update!(net, key, fun) end)
def add_operation(id, data={_, inputs}) do
add_vec_grouping([id | inputs])
add_component(:operations, id, data)
end
def add_net_layer(id, data={_, inputs}) do
Enum.each [id | inputs], fn vec -> add_vec_grouping([vec]) end
add_component(:net_layers, id, data)
end
def add_special(id, data={{:tanh_given_weights, weight_vec, actual_inputs}, inputs}) do
Enum.each [id | inputs], fn vec -> add_vec_grouping([vec]) end
update! :construction_data, fn con_data ->
weight_vecs = Map.get(con_data, :weight_vecs, %{})
Map.put(con_data, :weight_vecs, Map.put(weight_vecs, weight_vec, {actual_inputs, id}))
end
add_component(:operations, id, data)
end
def add_component(type, id, data) do
update! type, fn map ->
Map.put(map, id, data)
end
id
end
def add_vec_grouping(new_group_list) do
new_group = Enum.reduce new_group_list, MapSet.new, fn vec, set -> MapSet.put(set, NeuralNet.deconstruct(vec)) end
{matching_groups, remaining_groups} = Enum.partition Map.get(get_neural_net().construction_data, :vec_groupings, []), fn group ->
!MapSet.disjoint?(group, new_group)
end
super_group = Enum.reduce [new_group | matching_groups], MapSet.new, fn group, super_group ->
MapSet.union(super_group, group)
end
update! :construction_data, fn con_data ->
Map.put(con_data, :vec_groupings,
[super_group | remaining_groups]
)
end
end
def link(inputs, output) do
Enum.each(inputs, fn input ->
case input do
{:previous, vec} ->
append_root(input)
append_affect(vec, {:next, output})
:input ->
append_root(input)
append_affect(input, output)
_ ->
append_affect(input, output)
end
end)
end
def append_affect(id, new_affect) do
update! :affects, fn affects ->
its_affects = Map.get(affects, id, [])
Map.put(affects, id, [new_affect | its_affects])
end
end
def append_root(root) do
update! :roots, fn roots ->
MapSet.put(roots, root)
end
end
def set_special_vec_definitions() do
net = get_neural_net()
Enum.each Map.get(net.construction_data, :weight_vecs, %{}), fn {weight_vec, {inputs, output}} ->
if Map.has_key?(net.vec_defs, weight_vec), do: raise "Weight vector #{inspect(weight_vec)} had it's components defined to #{inspect(Map.get(net.vec_defs, weight_vec))}.\nPlease let it's component specification be set automatically."
input_components = Enum.flat_map(inputs, fn input ->
Enum.map(get_vector_def!(net, input), fn component ->
{input, component}
end)
end)
output_components = get_vector_def!(net, output)
weight_components = Enum.flat_map(input_components, fn input_comp ->
Enum.map(output_components, fn output_comp ->
{input_comp, output_comp}
end)
end)
NeuralNet.Helpers.def_vec(weight_vec, weight_components)
end
end
defp get_vector_def!(net, vec) do
if !Map.has_key?(net.vec_defs, vec), do: raise "Vector #{inspect(vec)} lacks a vector definition."
Map.fetch!(net.vec_defs, vec)
end
def confirm_groupings_defined(net) do
vec_defs = net.vec_defs
Enum.each Map.get(net.construction_data, :vec_groupings, %{}), fn group ->
if !Map.has_key?(vec_defs, Enum.at(group, 0)) do
raise "The following group of vectors lacks a definition for its components: #{inspect(group)}"
end
end
net
end
@doc "Returns the network with the randomly generated weight map."
def gen_random_weight_map(net, weight_gen_fun \\ &gen_random_weight/0) do
Map.put(net, :weight_map,
Enum.reduce(net.net_layers, %{}, fn {output, {{:net_layer, _, _}, inputs}}, weight_map -> #net layers are named by their output
Map.put(weight_map, output,
Enum.reduce(NeuralNet.Constructor.get_weight_ids(net, output, inputs), %{}, fn id, map ->
Map.put(map, id, weight_gen_fun.())
end)
)
end)
)
end
def gen_random_weight do
0.2 * (:rand.uniform - 0.5)
end
def get_weight_ids(net, output) do
{{:net_layer, _, _}, inputs} = Map.fetch!(net.net_layers, output)
get_weight_ids(net, output, inputs)
end
def get_weight_ids(net, output, inputs) do
Enum.flat_map(inputs, fn input ->
Enum.flat_map(NeuralNet.get_vec_def(net, input), fn input_component ->
Enum.map(NeuralNet.get_vec_def(net, output), fn output_component ->
{{input, input_component}, output_component}
end)
end)
end)
end
end
|
lib/neural_net/constructor.ex
| 0.721351 | 0.561906 |
constructor.ex
|
starcoder
|
defmodule GatewayService do
@moduledoc """
The GatewayService is an OTP GenServer which gets intitialized with
a Gateway-implementation and is responsible to keep the state of the
Gateway across the application.
"""
use GenServer
# Client API
@doc """
Start a Gateway service
#### Example:
iex> gw = %ListGateway{ entries: [:a, :b, :c] }
...> {:ok, service} = GatewayService.start_service( gw )
...> GatewayService.gateway(service)
%ListGateway{entries: [:a, :b, :c]}
"""
def start_service gateway do
GenServer.start_link(__MODULE__, %{gateway: gateway})
end
@doc """
Get the current gateway from the service
#### Example:
iex> gw = %ListGateway{}
...> {:ok, service} = GatewayService.start_service( gw )
...> GatewayService.gateway(service)
%ListGateway{entries: []}
"""
def gateway service do
GenServer.call service, :gateway
end
@doc """
Put a new entry to the gateway
#### Example:
iex> gw = %ListGateway{}
...> {:ok, service} = GatewayService.start_service( gw )
...> GatewayService.put(service, :new_entry)
...> GatewayService.gateway(service)
%ListGateway{entries: [:new_entry]}
"""
def put service, entry do
GenServer.cast service, {:put, entry}
end
@doc """
Filter by function
#### Example:
iex> gw = %ListGateway{ entries: [:a, :b, :c] }
...> {:ok, service} = GatewayService.start_service( gw )
...> GatewayService.where(service, &( &1 == :b ))
[:b]
"""
def where service, f do
GenServer.call service, {:filter, f}
end
@doc "To List"
def to_list service do
where(service, fn(_) -> true end)
end
@doc """
Count entries in collection
"""
def count service do
GenServer.call service, :count
end
@doc """
Drop the entire collection
"""
def drop service do
GenServer.cast service, :drop
end
def find service, id do
GenServer.cast service, {:find, id}
end
# Callbacks
def handle_call :gateway, _from, service do
{:reply, service.gateway, service}
end
def handle_call {:filter, f}, _from, service do
found = Gateway.filter( service[:gateway], f )
{:reply, found, service}
end
def handle_call {:find, id},_from, service do
entry = Gateway.find(service[:gateway], id)
{:reply, entry, service}
end
def handle_call :count, _from, service do
{:reply, Gateway.count(service[:gateway]), service}
end
def handle_cast {:put, entry}, service do
gw = Gateway.put(service[:gateway], entry)
{:noreply, %{gateway: gw}}
end
def handle_cast :drop, service do
gw = Gateway.drop(service[:gateway])
{:noreply, %{gateway: gw}}
end
end
|
lib/gateways/service.ex
| 0.807688 | 0.418786 |
service.ex
|
starcoder
|
defmodule CyberSourceSDK.Client do
@moduledoc """
This Client module handle all HTTPS requests to the CyberSource server. It
takes some parameters and convert to HTTPS requests.
It support the following payments:
* Android Pay
* Apple Pay
It supports the following requests:
* Authorization
* Capture
* Refund
"""
import SweetXml
alias CyberSourceSDK.Helper
use GenServer
def init(args) do
{:ok, args}
end
def start_link do
GenServer.start_link(__MODULE__, {}, name: :cybersource_sdk_client)
end
@doc """
Create an authorization payment
For a normal account, bill_to is mandatory. If you ask CyberSource for a
relaxed AVS check, bill_to can be optional.
## Parameters
- price: Float that represents the price to be charged to the user.
- merchant_reference_code: String that represents the order. Normally you should pass an unique identifier like `order_id`.
- card_type: String with the name of card type, like VISA, MASTERCARD, etc.
- encrypted_payment: String that must be in Base64 received by Apple/Android payment system.
- bill_to: Structure generated by `CyberSourceSDK.bill_to()`. (Optional)
- worker: Atom with name of the structure in configurations to be use. (Optional)
## Example
Without `bill_to` and `worker` parameters
```
authorize(32.0, "1234", "VISA", "oJ8IOx6SA9HNncxzpS9akm32n+DSAJH==")
```
With `bill_to` parameter
```
bill_to = CyberSourceSDK.bill_to("John", "Doe", "Marylane Street", "34", "New York", "Hong Kong", "<EMAIL>")
authorize(32.0, "1234", "VISA", "oJ8IOx6SA9HNncxzpS9akm32n+DSAJH==", bill_to)
```
"""
def authorize(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
)
def authorize(price, merchant_reference_code, card_type, encrypted_payment, bill_to, worker)
when is_float(price) do
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
case Helper.check_payment_type(encrypted_payment) do
{:ok, :apple_pay} ->
pay_with_apple_pay(
price,
merchant_reference_code_validated,
card_type,
encrypted_payment,
bill_to,
worker
)
{:ok, :android_pay} ->
pay_with_android_pay(
price,
merchant_reference_code_validated,
card_type,
encrypted_payment,
bill_to,
worker
)
{:error, reason} ->
{:error, reason}
end
end
end
def authorize(_, _, _, _, _, _) do
{:error, :price_needs_to_be_float}
end
@doc """
Capture authorization on user credit card
## Parameters
- order_id: Unique number to identify the purchase.
- request_params: Base64 of a JSON with `request_id` and `request_token` from authorization request.
- items: An array of map containing the following values: `id`, `unit_price` and `quantity`. Example: ```%{id: id, unit_price: unit_price, quantity: quantity}```
- worker: Merchant atom to use (setup in configurations).
## Result
On successful return the result will be:
```
{:ok, object}
```
"""
def capture(order_id, request_params, items \\ [], worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
[request_id: request_id, reference_id: order_id] ++ [items: items]
EEx.eval_file(get_template("capture_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
Remove authorization on user credit card
## Parameters
- order_id: Unique number to identify the purchase.
- amount: Price (value) to refund.
- request_params: Base64 of a JSON with `request_id` and `request_token` from authorization request.
- items: An array of map containing the following values: `id`, `unit_price` and `quantity`. Example: ```%{id: id, unit_price: unit_price, quantity: quantity}```
- worker: Merchant atom to use (setup in configurations)
## Example
```
refund("1234", 23435465442432, items)
```
"""
def refund(order_id, amount, request_params, items \\ [], worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
[request_id: request_id, reference_id: order_id, total_amount: amount] ++
[items: items]
EEx.eval_file(get_template("refund_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
A void cancels a capture or credit request that you submitted to CyberSource. A
transaction can be voided only when CyberSource has not already submitted the capture
or credit request to your processor. CyberSource usually submits capture and credit
requests to your processor once a day, so your window for successfully voiding a capture
or credit request is small. CyberSource declines your void request when the capture or
credit request has already been sent to the processor
"""
def void(order_id, request_params, worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++ [request_id: request_id, reference_id: order_id]
EEx.eval_file(get_template("void_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
When your request for a credit is successful, the issuing bank for the credit
card takes money out of your merchant bank account and returns it to the customer.
It usually takes two to four days for your acquiring bank to transfer funds
from your merchant bank account.
"""
def credit(order_id, amount, reason, request_params, worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
[
request_id: request_id,
reference_id: order_id,
total_amount: amount,
refund_reason: reason
]
EEx.eval_file(get_template("credit_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
Make a request to pay with Android Pay
Returns `{:ok, response_object}` , `{:error, :card_type_not_found` or
`{:error, response_code}`
"""
def pay_with_android_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
case get_card_type(card_type) do
nil ->
{:error, :card_type_not_found}
card_type ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
get_payment_params(merchant_reference_code, price, encrypted_payment, card_type) ++
bill_to
EEx.eval_file(get_template("android_pay_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Make a request to pay with Apple Pay
Returns `{:ok, response_object}` , `{:error, :card_type_not_found` or
`{:error, response_code}`
"""
def pay_with_apple_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
case get_card_type(card_type) do
nil ->
{:error, :card_type_not_found}
card_type ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
CyberSourceSDK.Client.get_configuration_params(worker) ++
CyberSourceSDK.Client.get_payment_params(
merchant_reference_code,
price,
encrypted_payment,
card_type
) ++ bill_to
EEx.eval_file(get_template("apple_pay_request.xml"), assigns: replace_params)
|> call()
else
Helper.invalid_merchant_configuration()
end
end
end
# Define path of request templates
defp get_template(filename) do
Path.join(__DIR__, "/requests/" <> filename <> ".eex")
end
# Get Payment parameters
@spec get_payment_params(String.t(), float(), String.t(), String.t()) :: list()
def get_payment_params(order_id, price, encrypted_token, card_type) do
[
reference_id: order_id,
total_amount: price,
encrypted_payment_data: encrypted_token,
card_type: card_type
]
end
@spec get_card_type(String.t()) :: String.t() | nil
defp get_card_type(card_type) do
case card_type do
"VISA" -> "001"
"MASTERCARD" -> "002"
"AMEX" -> "003"
"DISCOVER" -> "004"
"JCB" -> nil
_ -> nil
end
end
@spec get_configuration_params(atom()) :: list()
def get_configuration_params(worker) do
merchant_configuration = Application.get_env(:cybersource_sdk, worker)
if !is_nil(merchant_configuration) do
[
merchant_id: Map.get(merchant_configuration, :id),
transaction_key: Map.get(merchant_configuration, :transaction_key),
currency: Map.get(merchant_configuration, :currency),
client_library: "CyberSourceSDK Elixir #{Application.spec(:cybersource_sdk, :vsn)}"
]
else
[]
end
end
# Make HTTPS request
@spec call(String.t()) :: {:ok, map()} | {:error, String.t()} | {:error, :unknown_response}
defp call(xml_body) do
endpoint = Application.get_env(:cybersource_sdk, :endpoint)
timeout = Application.get_env(:cybersource_sdk, :timeout, 8000)
case HTTPoison.post(
endpoint,
xml_body,
[{"Content-Type", "application/xml"}],
timeout: timeout
) do
{:ok, %HTTPoison.Response{body: response_body}} ->
parse_response(response_body)
|> handle_response
{:error, %HTTPoison.Error{id: _, reason: reason}} ->
{:error, reason}
end
end
defp validate_merchant_reference_code(merchant_reference_code) do
cond do
String.valid?(merchant_reference_code) && String.length(merchant_reference_code) ->
merchant_reference_code
is_integer(merchant_reference_code) ->
Integer.to_string(merchant_reference_code)
true ->
{:error, :invalid_order_id}
end
end
# Parse response from CyberSource
@spec parse_response(String.t()) :: map()
def parse_response(xml) do
xml
|> xmap(
merchantReferenceCode:
~x"//soap:Envelope/soap:Body/c:replyMessage/c:merchantReferenceCode/text()"os,
requestID: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:requestID/text()"oi,
decision: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:decision/text()"os,
reasonCode: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:reasonCode/text()"oi,
requestToken: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:requestToken/text()"os,
ccAuthReply: [
~x".//c:ccAuthReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
amount: ~x"./c:amount/text()"of
],
ccCaptureReply: [
~x".//c:ccCaptureReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
amount: ~x"./c:amount/text()"of,
requestDateTime: ~x"./c:requestDateTime/text()"so,
reconciliationID: ~x"./c:reconciliationID/text()"io
],
ccAuthReversalReply: [
~x".//c:ccAuthReversalReply"o,
reasonCode: ~x"./c:reasonCode/text()"i
],
originalTransaction: [
~x".//c:originalTransaction"o,
amount: ~x"./c:amount/text()"of,
reasonCode: ~x"./c:reasonCode/text()"i
],
voidReply: [
~x".//c:voidReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
amount: ~x"./c:amount/text()"of,
requestDateTime: ~x"./c:requestDateTime/text()"so,
currency: ~x"./c:currency/text()"so
],
ccCreditReply: [
~x".//c:ccCreditReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
requestDateTime: ~x"./c:requestDateTime/text()"so,
amount: ~x"./c:amount/text()"of,
reconciliationID: ~x"./c:reconciliationID/text()"so,
purchasingLevel3Enabled: ~x"./c:purchasingLevel3Enabled/text()"so,
enhancedDataEnabled: ~x"./c:enhancedDataEnabled/text()"so,
authorizationXID: ~x"./c:authorizationXID/text()"so,
forwardCode: ~x"./c:forwardCode/text()"so,
ownerMerchantID: ~x"./c:ownerMerchantID/text()"so,
reconciliationReferenceNumber: ~x"./c:reconciliationReferenceNumber/text()"so
],
fault: [
~x"//soap:Envelope/soap:Body/soap:Fault"o,
faultCode: ~x"./faultcode/text()"s,
faultString: ~x"./faultstring/text()"s
]
)
end
@spec handle_response(map()) ::
{:ok, map()} | {:error, String.t()} | {:error, :unknown_response}
defp handle_response(response) do
cond do
response.decision != "" ->
case response.decision do
"ACCEPT" -> {:ok, response}
"REJECT" -> {:error, response.reasonCode}
"ERROR" -> {:error, response.reasonCode}
end
response.fault.faultCode != "" ->
{:error, "#{response.fault.faultCode} - #{response.fault.faultString}"}
true ->
{:error, :unknown_response}
end
end
end
|
lib/cybersource-sdk/client.ex
| 0.873377 | 0.725746 |
client.ex
|
starcoder
|
defmodule Janus.Graph do
@moduledoc """
Reverse, Preorder, Directed Graph Traversal from a given
starting point with an accumulating behaviour.
*Reverse* because this traverses in the opposite direction
the edges are pointing. *Preorder*, the depth-first tree
traversal strategy.
Primarily used in conjunction with `Janus.Planner` to
transform an `EQL` query into a graph that resolvers to needed
to be executed in order to fulfill it.
"""
alias Janus.Resolver
alias EQL.AST.{Ident, Join, Params, Prop, Query, Union, Union.Entry}
@behaviour Access
# TODO: add checks to Janus.Graph.from_resolvers/1 for resolver uniqueness
# TODO: add resolver function to nodes? (or just keep the resolver index on the graph?)
defstruct unreachable: MapSet.new([]),
attr_trail: [],
dg: nil
@type t :: %__MODULE__{
unreachable: MapSet.t(node_id),
attr_trail: [node_id],
dg: :digraph.graph()
}
@type node_id :: Janus.attr() | [Janus.attr()]
@type vertex :: :digraph.vertex()
@type edge :: :digraph.edge()
@type acc :: term
@type reachability :: :reachable | :unreachable | :found
@type walker(x) :: (type :: term, x, t, acc -> {reachability, t, acc})
@type walker :: walker(edge)
@type depth :: non_neg_integer
@type label_tuple(id) :: {id, depth, [Janus.attr()], Janus.attr() | nil, leaf? :: boolean}
@type label(id) :: %{
required(:id) => id,
required(:depth) => depth,
required(:path) => [Janus.attr()],
required(:union_key) => Janus.attr() | nil,
required(:leaf?) => boolean
}
@type ast_type ::
:pre_walk
| :ident
| {:pre_walk | :post_walk, :params}
| {:pre_subquery | :post_subquery, Resolver.id()}
| {:recursion, depth :: timeout}
| {:post_walk, reachability}
@type attr_type :: :pre_walk | :cyclic | {:post_walk, reachability}
@callback ast_walker(ast_type, EQL.AST.t(), t, acc) ::
{:cont | :skip, t, acc} | {:error, reason :: term}
@callback attr_walker(attr_type, edge, t, acc) :: {reachability, t, acc}
@impl Access
def fetch(graph, key) do
Map.fetch(graph, key)
end
@impl Access
def get_and_update(graph, key, fun) do
Map.get_and_update(graph, key, fun)
end
@impl Access
def pop(graph, key) do
Map.pop(graph, key)
end
@spec new([Resolver.t()]) :: t
def new(resolvers) do
%__MODULE__{
dg: from_resolvers(resolvers)
}
end
@spec new([Resolver.t()], :digraph.graph()) :: t
def new(resolvers, dg) do
%__MODULE__{
dg: from_resolvers(resolvers, dg)
}
end
@spec reset(t) :: t
def reset(graph) do
%{graph | attr_trail: [], unreachable: MapSet.new([])}
end
@spec ast_walker(module, ast_type, EQL.AST.t(), t, acc) ::
{:cont | :skip, t, acc} | {:error, reason :: term}
def ast_walker(module, type, ast, graph, acc) do
module.ast_walker(type, ast, graph, acc)
end
@spec attr_walker(module, attr_type, edge, t, acc) :: {reachability, t, acc}
def attr_walker(module, type, edge, graph, acc) do
module.attr_walker(type, edge, graph, acc)
end
@spec walk_ast(t, EQL.AST.t(), acc, module) :: {:ok, {t, acc}} | {:error, reason :: term}
def walk_ast(graph, %Prop{} = prop, acc, module) do
with {:cont, graph, acc} <- ast_walker(module, :pre_walk, prop, graph, acc),
{r, g, a} <- walk_attr(graph, EQL.get_key(prop), acc, module),
{_, g, a} <- ast_walker(module, {:post_walk, r}, prop, g, a) do
{:ok, {g, a}}
else
{:skip, graph, acc} -> {:ok, {graph, acc}}
{:error, reason} -> {:error, reason}
end
end
def walk_ast(graph, %Ident{} = ident, acc, module) do
case ast_walker(module, :ident, ident, acc, graph) do
{:error, reason} -> {:error, reason}
{_, graph, acc} -> {:ok, {graph, acc}}
end
end
def walk_ast(graph, %Params{expr: expr} = params, acc, module) do
with {:cont, graph, acc} <- ast_walker(module, {:pre_walk, :params}, params, graph, acc),
{:ok, {graph, acc}} <- walk_ast(graph, expr, acc, module),
{_, graph, acc} <- ast_walker(module, {:post_walk, :params}, params, graph, acc) do
{:ok, {graph, acc}}
else
{:skip, graph, acc} -> {:ok, {graph, acc}}
{:error, reason} -> {:error, reason}
end
end
def walk_ast(graph, %Join{key: key, query: %Query{} = q} = join, acc, module) do
case walk_ast(graph, key, acc, module) do
{:error, reason} ->
{:error, reason}
{:ok, {graph, acc}} ->
graph
|> subqueries(key)
|> Enum.reduce_while({:ok, {graph, acc}}, fn
_, {:error, reason} ->
{:halt, {:error, reason}}
r, {:ok, {g, a}} ->
with {:cont, g, a} <- ast_walker(module, {:pre_subquery, r}, join, g, a),
{:ok, {g, a}} <- walk_ast(g, q, a, module),
{_, g, a} <- ast_walker(module, {:post_subquery, r}, join, g, a) do
{:cont, {:ok, {g, a}}}
else
{:skip, graph, acc} -> {:cont, {:ok, {graph, acc}}}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
end
def walk_ast(graph, %Join{key: key} = join, acc, module)
when is_integer(key) or key == :infinity do
with {:ok, {graph, acc}} <- walk_ast(graph, key, acc, module),
{:cont, graph, acc} <- ast_walker(module, {:recursion, key}, join, graph, acc) do
{:ok, {graph, acc}}
else
{:skip, graph, acc} -> {:ok, {graph, acc}}
{:error, reason} -> {:error, reason}
end
end
def walk_ast(graph, %Union{} = union, acc, module) do
walk_ast(graph, EQL.union_to_query(union), acc, module)
end
def walk_ast(graph, %Query{children: children}, acc, module) do
Enum.reduce_while(children, {:ok, {graph, acc}}, fn
_, {:error, reason} -> {:halt, {:error, reason}}
ast, {:ok, {g, a}} -> {:cont, walk_ast(g, ast, a, module)}
end)
end
def walk_ast(_graph, ast, _acc, _module) do
{:error, {:invalid_ast, ast}}
end
@spec walk_attr(t, node_id, acc, module) :: {reachability, t, acc}
def walk_attr(graph, node_ids, acc, module) when is_list(node_ids) do
Enum.reduce_while(node_ids, {:reachable, graph, acc}, fn
_, {:unreachable, g, a} ->
{:halt, {:unreachable, g, a}}
nid, {r, g, a} when r in [:reachable, :found] ->
fake_edge = {nil, nid, node_ids, %{id: nil}}
walk_attr_reducer(fake_edge, {:reachable, g, a}, module)
end)
end
def walk_attr(graph, node_id, acc, module) do
graph.dg
|> :digraph.in_edges(node_id)
|> Enum.map(&:digraph.edge(graph.dg, &1))
|> Enum.filter(&direct_edge?/1)
|> Enum.reduce_while({:unreachable, graph, acc}, &walk_attr_reducer(&1, &2, module))
end
@spec walk_attr_reducer(edge, {reachability, t, acc}, module) ::
{:cont | :halt, {reachability, t, acc}}
defp walk_attr_reducer(edge, {r, graph, acc}, module) do
cond do
unreachable?(graph, edge) ->
{:unreachable, graph, acc}
cyclic?(graph, edge) ->
{:unreachable, graph, acc}
|> wrap_walker(edge, module, :cyclic)
|> update_unreachable(edge, graph, r)
true ->
{graph, acc}
|> continue_walk_attr(edge, module)
|> update_unreachable(edge, graph, r)
end
|> cont_or_halt(edge)
end
@spec continue_walk_attr({t, acc}, edge, module) :: {reachability, t, acc}
defp continue_walk_attr({g, a}, {_, i, o, _} = edge, module) do
case attr_walker(module, :pre_walk, edge, g, a) do
{:reachable, graph, acc} ->
graph
|> Map.update!(:attr_trail, &[o | &1])
|> walk_attr(i, acc, module)
|> wrap_walker(edge, module)
otherwise ->
otherwise
end
end
@spec wrap_walker({reachability, t, acc}, edge, module) :: {reachability, t, acc}
defp wrap_walker(rga, edge, module, type \\ nil)
defp wrap_walker({reach, graph, acc}, edge, module, nil) do
attr_walker(module, {:post_walk, reach}, edge, graph, acc)
end
defp wrap_walker({_, graph, acc}, edge, module, type) do
attr_walker(module, type, edge, graph, acc)
end
@spec cont_or_halt({reachability, t, acc}, edge) :: {:cont | :halt, {reachability, t, acc}}
defp cont_or_halt({:unreachable, _, _} = rga, {_, _, o, _}) when is_list(o), do: {:halt, rga}
defp cont_or_halt(rga, _edge), do: {:cont, rga}
@spec update_unreachable({reachability, t, acc}, edge, t, reachability) ::
{reachability, t, acc}
defp update_unreachable({:unreachable, g, a}, {_, i, o, _}, graph, _) when is_list(o) do
{:unreachable, mark_unreachable(graph, g, i), a}
end
defp update_unreachable({:unreachable, g, a}, {_, i, _, _}, graph, r) do
{r, mark_unreachable(graph, g, i), a}
end
defp update_unreachable({r, g, a}, _edge, graph, _) do
{r, %{graph | unreachable: g.unreachable}, a}
end
@spec mark_unreachable(t, t, Janus.attr()) :: t
defp mark_unreachable(previous_graph, graph, attr) do
if attr in graph.attr_trail do
%{previous_graph | unreachable: graph.unreachable}
else
%{previous_graph | unreachable: MapSet.put(graph.unreachable, attr)}
end
end
@spec direct_edge?(edge) :: boolean
defp direct_edge?({_, _, _, %{depth: 0}}), do: true
defp direct_edge?(_), do: false
@spec unreachable?(t, edge) :: boolean
defp unreachable?(graph, {_, i, _, _}) when is_list(i) do
i in graph.unreachable or Enum.any?(i, &(&1 in graph.unreachable))
end
defp unreachable?(graph, {_, i, _, _}) do
i in graph.unreachable
end
@spec cyclic?(t, edge) :: boolean
defp cyclic?(graph, {_, i, _, _}), do: i in graph.attr_trail
@spec from_resolvers([Resolver.t()]) :: :digraph.graph()
defp from_resolvers(resolvers) do
from_resolvers(resolvers, :digraph.new([]))
end
@spec from_resolvers([Resolver.t()], :digraph.graph()) :: :digraph.graph()
defp from_resolvers([], dg), do: dg
defp from_resolvers([res | t], dg) do
i = extract_input_name(res)
labels = output_info(res)
output = Enum.map(labels, &Map.get(&1, :id))
_ = Enum.each([i | res.input], &:digraph.add_vertex(dg, &1))
_ = Enum.each(output, &:digraph.add_vertex(dg, &1))
_ = Enum.each(labels, &:digraph.add_edge(dg, i, &1.id, %{&1 | id: res.id}))
from_resolvers(t, dg)
end
@spec extract_input_name(Resolver.t()) :: id | [id] when id: {module, atom}
defp extract_input_name(%Resolver{input: []}), do: []
defp extract_input_name(%Resolver{input: [id]}), do: id
defp extract_input_name(%Resolver{input: [_ | _] = ids}), do: ids
@spec output_info(Resolver.t()) :: [label(id)] when id: Resolver.id()
defp output_info(resolver) do
case EQL.to_ast(resolver.output) do
nil ->
[]
ast ->
ast
|> output_info(0, [], nil)
|> Enum.map(fn {id, depth, path, union_key, leaf?} ->
%{id: id, depth: depth, parent: path, union_key: union_key, leaf?: leaf?}
end)
end
end
@spec output_info([EQL.AST.t()] | EQL.AST.t(), depth, path, union_key) ::
[{id, depth, path, union_key, leaf?}]
when id: Janus.attr(), path: [id], union_key: id | nil, leaf?: boolean
defp output_info([], _, _, _), do: []
defp output_info([h | t], d, p, u), do: output_info(h, d, p, u) ++ output_info(t, d, p, u)
defp output_info(%Prop{module: m, key: k}, d, p, u), do: [{{m, k}, d, p, u, true}]
defp output_info(%Join{key: %Prop{module: m, key: k}, query: q}, d, p, u),
do: [{{m, k}, d, p, u, false} | output_info(q, d + 1, [{m, k} | p], nil)]
defp output_info(%Union{children: cs}, d, p, _), do: output_info(cs, d, p, nil)
defp output_info(%Entry{key: %Prop{module: m, key: k}, query: q}, d, p, _),
do: output_info(q, d, p, {m, k})
defp output_info(%Query{children: cs}, d, p, _), do: output_info(cs, d, p, nil)
@spec subqueries(t, Prop.t()) :: [Resolver.id()]
defp subqueries(graph, prop) do
graph.dg
|> :digraph.in_edges(EQL.get_key(prop))
|> Enum.map(fn
{_, _, _, %{id: id}} -> id
_ -> nil
end)
|> Enum.reject(&is_nil/1)
|> Enum.dedup()
end
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Janus.Graph
@spec walk_ast(Janus.Graph.t(), EQL.AST.t(), Janus.Graph.acc()) ::
{:ok, {Janus.Graph.t(), Janus.Graph.acc()}} | {:error, reason :: term}
def walk_ast(graph, ast, acc) do
Janus.Graph.walk_ast(graph, ast, acc, __MODULE__)
end
@spec walk_attr(Janus.Graph.t(), Janus.Graph.node_id(), Janus.Graph.acc()) ::
{Janus.Graph.reachability(), Janus.Graph.t(), Janus.Graph.acc()}
def walk_attr(graph, node_id, acc) do
Janus.Graph.walk_attr(graph, node_id, acc, __MODULE__)
end
end
end
end
|
lib/janus/graph.ex
| 0.669853 | 0.526586 |
graph.ex
|
starcoder
|
defmodule Janus.Transport.WS.Adapter do
@moduledoc """
This module specifies the behaviour for adapter modules communicating `Janus.Transport.WS` module
with lower level WebSocket client (e.g. `:websockex`).
An adapter is responsible for sending and passing WebSocket frames and notifying about connection's status change (eg. disconnected).
Sending and receiving frames is supposed to be asynchronous.
Frames received from websocket should be forwarded to `message_receiver` process via `forward_frame/2`.
## Creating custom adapter
To implement custom adapter one should use the `Janus.Transport.WS.Adapter` and implement all callbacks.
## Example
```elixir
defmodule CustomAdapter do
use Janus.Transport.WS.Adapter
@impl true
def connect(url, receiver, opts) do
on_receive = fn msg -> forward_message(receiver, msg) end
ws_pid = SomeLibrary.spawn_websocket_connection(url, on_receive, opts)
%{websocket: ws_pid, receiver: receiver}
end
@impl true
def send(payload, %{websocket: pid}) do
SomeLib.send_frame(pid, payload)
end
@impl true
def disconnect(state) do
:ok = SomeLib.disconnect(state.pid)
notify_status(state.receiver, {:disconnected, "Disconnected request"})
end
end
```
"""
@type websocket_t :: any()
@type url_t :: String.t()
@type payload_t :: iodata()
@type timeout_t :: number()
@type message_receiver_t :: pid()
@doc """
Creates a new WebSocket connection.
The callback should synchronously return a new connection or error on failure.
## Arguments
- `url` - valid websocket url
- `message_receiver` - pid of incoming messages and connection info recipient
- `opts` - options specific to adapter itself
Notice that `message_receiver` is passed only during this callback but should be used on every new websocket response and connection new status.
"""
@callback connect(url :: url_t(), message_receiver :: message_receiver_t(), opts :: Keyword.t()) ::
{:ok, websocket_t()} | {:error, any}
@doc """
Sends payload via given WebSocket.
"""
@callback send(payload :: payload_t(), websocket :: websocket_t()) :: :ok | {:error, any}
@doc """
Closes the WebSocket connection.
The callback should notify the message receiver about its status change with `{:disconnected, reason}` message.
"""
@callback disconnect(websocket :: websocket_t()) :: :ok | {:error, any}
defmacro __using__(_opts) do
quote location: :keep do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__)
end
end
@doc """
Forwards the frame received via WebSocket to message receiver previously initialized during `c:connect/3`.
"""
@spec forward_frame(message_receiver_t(), payload_t()) :: any()
def forward_frame(message_receiver, frame) when is_pid(message_receiver) do
Kernel.send(message_receiver, {:ws_frame, frame})
end
@doc """
Notifies the receiver with connection's new status.
List of currently supported statuses:
* `{:disconnected, reason}` - used when connection has been closed from either sever or client side
"""
@spec notify_status(message_receiver_t(), {atom(), any}) :: any()
def notify_status(receiver, {status, _info} = msg) when is_atom(status) and is_pid(receiver) do
Kernel.send(receiver, msg)
end
end
|
lib/adapters/adapter.ex
| 0.876449 | 0.698458 |
adapter.ex
|
starcoder
|
defmodule Sublocator do
@moduledoc """
An Elixir library for identifying the location(s) of a pattern in a given string.
Using `Sublocator.locate/3`, the pattern can be a string, a list of strings, or
a regular expression, and the result is a list of simple line and column data or
an empty list.
Multiline pattern support added in version 0.2.0
"""
alias __MODULE__
@col_offset 1
@typep t :: %{line: integer, col: integer}
@typep pattern :: binary | list(binary) | Regex.t()
@typep at_most :: :all | integer
defguardp is_loc(line, col) when is_integer(line) and is_integer(col)
@doc """
Creates a simple location map from line and column integers.
## Example
iex> Sublocator.new_loc(42, 12)
%{line: 42, col: 12}
"""
@spec new_loc(integer, integer) :: t
def new_loc(line, col) when is_loc(line, col) do
%{line: line, col: col}
end
@doc ~S"""
Finds line and column location(s) of a pattern in a given string.
Returns a list of these locations or an empty list if not found.
The pattern can be a string, a list of strings, or a regular
expression, including multiline patterns.
The returned locations are listed in the order found, from top to
bottom, left to right.
All locations are reported in the list by default but can be
controlled via the `:at_most` option.
By default, the pattern is located from the beginning of the string,
but the `:start` option can be used to report locations starting at
a later point.
## Options
* `:at_most` (positive integer or `:all`) - the number of locations
returned is at most as many as this option specifies.
If `:all`, all found locations are returned. Defaults to `:all`.
* `:start` (`%{line: integer, col: integer}`) - only locations >= the
starting point specified by this option are returned; otherwise,
the string is searched from the beginning.
## Examples
Locating with a string:
iex> Sublocator.locate("<h2>\n <span class=\"a\"", "a")
{:ok, [%{line: 2, col: 6}, %{line: 2, col: 11}, %{line: 2, col: 16}]}
iex> Sublocator.locate("<h2>\n <span class=\"a\"", "a", at_most: 1)
{:ok, [%{line: 2, col: 6}]}
iex> Sublocator.locate("<h2>\n <span class=\"a\"", "a", [start: %{line: 2, col: 10}])
{:ok, [%{line: 2, col: 11}, %{line: 2, col: 16}]}
A list of strings:
iex> Sublocator.locate("<h2>\n <span class=\"a\"", ["h", "l"])
{:ok, [%{line: 1, col: 2}, %{line: 2, col: 10}]}
A regular expression:
iex> Sublocator.locate("<h2>\n <span class=\"a\"", ~r{<(?!h)})
{:ok, [%{line: 2, col: 3}]}
"""
@spec locate(binary, pattern, keyword) :: {atom, list(t) | binary}
def locate(string, pattern, opts \\ [])
def locate(string, pattern, opts) when is_binary(string) and is_list(pattern) do
joined =
pattern
|> Enum.map(&Regex.escape(&1))
|> Enum.join("|")
regexp = Regex.compile!("(?:#{joined})")
locate(string, regexp, opts)
end
def locate(string, pattern, opts) when is_binary(string) do
at_most = Keyword.get(opts, :at_most, :all)
start_loc = Keyword.get(opts, :start, new_loc(0, 0))
string
|> String.split(pattern, include_captures: true)
|> tuplify(pattern)
|> do_locate(at_most, start_loc)
end
def locate(_string, _pattern, _opts), do: {:error, "intended only for a string"}
@spec tuplify(Enumerable.t(binary), pattern) :: Enumerable.t()
defp tuplify(split_result, pattern)
defp tuplify(split_result, pattern) when is_binary(pattern) do
Enum.intersperse(split_result, pattern)
|> Enum.chunk_every(2, 2, :discard)
|> Enum.map(&List.to_tuple/1)
end
defp tuplify(split_result, _pattern) do
Enum.chunk_every(split_result, 2, 2, :discard)
|> Enum.map(&List.to_tuple/1)
end
@spec stream_lines(binary) :: Enumerable.t({binary, integer})
defp stream_lines(string) do
~r{(?:\r\n|\n|\r)}
|> Regex.split(string)
|> Stream.with_index(0)
end
@spec do_locate(Enumerable.t(), at_most, t) :: {atom, list(t) | binary}
defp do_locate(parts, cnt, start)
defp do_locate(parts, :all, %{line: line, col: col} = start) when is_loc(line, col) do
locs =
parts
|> report_locs(start)
|> Enum.to_list()
{:ok, locs}
end
defp do_locate(_parts, cnt, _start) when is_integer(cnt) and cnt <= 0 do
{:error, ":at_most value must be greater than 0 or :all"}
end
defp do_locate(parts, cnt, %{line: line, col: col} = start)
when is_integer(cnt) and is_loc(line, col) do
locs =
parts
|> report_locs(start)
|> Enum.take(cnt)
{:ok, locs}
end
defp do_locate(_parts, cnt, _start) when not is_integer(cnt) and cnt != :all do
{:error, ":at_most value must be an integer or :all"}
end
defp do_locate(_parts, _cnt, _start) do
{:error, ":start value must be %{line: integer, col: integer}"}
end
defp get_begin_loc(%{begin_loc: begin_loc}), do: begin_loc
defp include_loc?(loc, start) do
col_predicate = if loc.line === start.line, do: loc.col >= start.col, else: true
loc.line >= start.line && col_predicate
end
@spec report_locs(Enumerable.t(binary), t) :: Enumerable.t(t)
defp report_locs(parts, start) do
acc = %{begin_loc: new_loc(0, 0), end_loc: new_loc(1, 0)}
parts
|> Stream.scan(acc, &report_loc(&2, &1))
|> Stream.map(&get_begin_loc/1)
|> Stream.filter(&include_loc?(&1, start))
end
@spec report_loc(%{begin_loc: t, end_loc: t}, {binary, binary}) :: %{begin_loc: t, end_loc: t}
defp report_loc(acc, {before, match}) do
%{line: blines, col: bcol} = get_partial_loc(before)
%{line: mlines, col: mcol} = get_partial_loc(match)
begin_line = blines + acc.end_loc.line
begin_col = if begin_line === acc.end_loc.line, do: bcol + acc.end_loc.col, else: bcol
begin_loc = new_loc(begin_line, begin_col + @col_offset)
end_line = begin_line + mlines
end_col = if end_line === begin_line, do: begin_col + mcol, else: mcol
end_loc = new_loc(end_line, end_col)
%{begin_loc: begin_loc, end_loc: end_loc}
end
@spec get_partial_loc(binary) :: t
defp get_partial_loc(str) do
stream_lines(str)
|> Enum.take(-1)
|> Enum.at(0)
|> line_info_to_loc()
end
@spec line_info_to_loc({binary, integer}) :: t
defp line_info_to_loc(tup) do
{str, line} = tup
new_loc(line, String.length(str))
end
end
|
lib/sublocator.ex
| 0.916862 | 0.62157 |
sublocator.ex
|
starcoder
|
defmodule Nostrum.Struct.Embed do
@moduledoc ~S"""
Functions that work on Discord embeds.
## Building Embeds
`Nostrum.Struct.Embed`s can be built using this module's builder functions
or standard `Map` syntax:
```Elixir
iex> import Nostrum.Struct.Embed
...> embed =
...> %Nostrum.Struct.Embed{}
...> |> put_title("craig")
...> |> put_description("nostrum")
...> |> put_url("https://google.com/")
...> |> put_timestamp("2016-05-05T21:04:13.203Z")
...> |> put_color(431_948)
...> |> put_field("Field 1", "Test")
...> |> put_field("Field 2", "More test", true)
...> embed
%Nostrum.Struct.Embed{
title: "craig",
description: "nostrum",
url: "https://google.com/",
timestamp: "2016-05-05T21:04:13.203Z",
color: 431_948,
fields: [
%Nostrum.Struct.Embed.Field{name: "Field 1", value: "Test"},
%Nostrum.Struct.Embed.Field{name: "Field 2", value: "More test", inline: true}
]
}
```
## Using structs
You can also create `Nostrum.Struct.Embed`s from structs, by using the
`Nostrum.Struct.Embed` module. Here's how the example above could be build using structs
```Elixir
defmodule MyApp.MyStruct do
use Nostrum.Struct.Embed
defstruct []
def title(_), do: "craig"
def description(_), do: "nostrum"
def url(_), do: "https://google.com/"
def timestamp(_), do: "2016-05-05T21:04:13.203Z"
def color(_), do: 431_948
def fields(_) do
[
%Nostrum.Struct.Embed.Field{name: "Field 1", value: "Test"},
%Nostrum.Struct.Embed.Field{name: "Field 2", value: "More test", inline: true}
]
end
end
iex> Nostrum.Struct.Embed.from(%MyApp.MyStruct{})
%Nostrum.Struct.Embed{
title: "craig",
description: "nostrum",
url: "https://google.com/",
timestamp: "2016-05-05T21:04:13.203Z",
color: 431_948,
fields: [
%Nostrum.Struct.Embed.Field{name: "Field 1", value: "Test"},
%Nostrum.Struct.Embed.Field{name: "Field 2", value: "More test", inline: true}
]
}
```
See this modules callbacks for a list of all the functions that can be implemented.
The implementation of these callbacks is optional. Not implemented functions will simply
be ignored.
"""
alias Nostrum.Struct.Embed.{Author, Field, Footer, Image, Provider, Thumbnail, Video}
alias Nostrum.Util
alias Poison.Encoder
defstruct [
:title,
:type,
:description,
:url,
:timestamp,
:color,
:footer,
:image,
:thumbnail,
:video,
:provider,
:author,
:fields
]
defimpl Encoder do
def encode(embed, options) do
embed
|> Map.from_struct()
|> Enum.filter(fn {_, v} -> v != nil end)
|> Map.new()
|> Encoder.encode(options)
end
end
@typedoc "Title of the embed"
@type title :: String.t() | nil
@typedoc "Type of the embed"
@type type :: String.t() | nil
@typedoc "Description of the embed"
@type description :: String.t() | nil
@typedoc "Url of the embed"
@type url :: String.t() | nil
@typedoc "Timestamp of embed content"
@type timestamp :: String.t() | nil
@typedoc "Color code of the embed"
@type color :: integer() | nil
@typedoc "Footer information"
@type footer :: Footer.t() | nil
@typedoc "Image information"
@type image :: Image.t() | nil
@typedoc "Thumbnail information"
@type thumbnail :: Thumbnail.t() | nil
@typedoc "Video information"
@type video :: Video.t() | nil
@typedoc "Provider information"
@type provider :: Provider.t() | nil
@typedoc "Author information"
@type author :: Author.t() | nil
@typedoc "Fields information"
@type fields :: [Field.t()] | nil
@type t :: %__MODULE__{
title: title,
type: type,
description: description,
url: url,
timestamp: timestamp,
color: color,
footer: footer,
image: image,
thumbnail: thumbnail,
video: video,
provider: provider,
author: author,
fields: fields
}
@callback author(struct) :: author()
@callback color(struct) :: integer() | nil
@callback fields(struct) :: fields()
@callback description(struct) :: description()
@callback footer(struct) :: footer()
@callback image(struct) :: url()
@callback thumbnail(struct) :: url()
@callback timestamp(struct) :: timestamp()
@callback title(struct) :: title()
@callback url(struct) :: url()
defmacro __using__(_) do
quote do
@behaviour Nostrum.Struct.Embed
def author(_), do: nil
def color(_), do: nil
def fields(_), do: nil
def description(_), do: nil
def footer(_), do: nil
def image(_), do: nil
def thumbnail(_), do: nil
def timestamp(_), do: nil
def title(_), do: nil
def url(_), do: nil
defoverridable(
author: 1,
color: 1,
fields: 1,
description: 1,
footer: 1,
image: 1,
thumbnail: 1,
timestamp: 1,
title: 1,
url: 1
)
end
end
@doc ~S"""
Puts the given `value` under `:title` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_title(embed, "nostrum")
%Nostrum.Struct.Embed{title: "nostrum"}
```
"""
@spec put_title(t, title) :: t
def put_title(%__MODULE__{} = embed, value) do
%__MODULE__{embed | title: value}
end
@doc false
@spec put_type(t, type) :: t
def put_type(%__MODULE__{} = embed, value) do
%__MODULE__{embed | type: value}
end
@doc ~S"""
Puts the given `value` under `:description` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_description(embed, "An elixir library for the discord API.")
%Nostrum.Struct.Embed{description: "An elixir library for the discord API."}
```
"""
@spec put_description(t, description) :: t
def put_description(%__MODULE__{} = embed, value) do
%__MODULE__{embed | description: value}
end
@doc ~S"""
Puts the given `value` under `:url` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_url(embed, "https://github.com/Kraigie/nostrum")
%Nostrum.Struct.Embed{url: "https://github.com/Kraigie/nostrum"}
```
"""
@spec put_url(t, url) :: t
def put_url(%__MODULE__{} = embed, value) do
%__MODULE__{embed | url: value}
end
@doc ~S"""
Puts the given `value` under `:timestamp` in `embed`.
## Examples
```elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_timestamp(embed, "2018-04-21T17:33:51.893000Z")
%Nostrum.Struct.Embed{timestamp: "2018-04-21T17:33:51.893000Z"}
```
"""
@spec put_timestamp(t, timestamp) :: t
def put_timestamp(%__MODULE__{} = embed, value) do
%__MODULE__{embed | timestamp: value}
end
@doc ~S"""
Puts the given `value` under `:color` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_color(embed, 431948)
%Nostrum.Struct.Embed{color: 431948}
```
"""
@spec put_color(t, color) :: t
def put_color(%__MODULE__{} = embed, value) do
%__MODULE__{embed | color: value}
end
@doc ~S"""
Puts a `Nostrum.Struct.Embed.Footer` under `:footer` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_footer(embed, "Discord API", nil)
%Nostrum.Struct.Embed{
footer: %Nostrum.Struct.Embed.Footer{
text: "Discord API",
icon_url: nil
}
}
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_footer(embed, "nostrum footer", "https://discord.com/assets/53ef346458017da2062aca5c7955946b.svg")
%Nostrum.Struct.Embed{
footer: %Nostrum.Struct.Embed.Footer{
text: "nostrum footer",
icon_url: "https://discord.com/assets/53ef346458017da2062aca5c7955946b.svg"
}
}
```
"""
@spec put_footer(t, Footer.text(), Footer.icon_url()) :: t
def put_footer(%__MODULE__{} = embed, text, icon_url \\ nil) do
footer = %Footer{
text: text,
icon_url: icon_url
}
%__MODULE__{embed | footer: footer}
end
@doc ~S"""
Puts a `Nostrum.Struct.Embed.Image` under `:image` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_image(embed, "https://discord.com/assets/af92e60c16b7019f34a467383b31490a.svg")
%Nostrum.Struct.Embed{
image: %Nostrum.Struct.Embed.Image{
url: "https://discord.com/assets/af92e60c16b7019f34a467383b31490a.svg"
}
}
```
"""
@spec put_image(t, Image.url()) :: t
def put_image(%__MODULE__{} = embed, nil) do
%__MODULE__{embed | image: nil}
end
def put_image(%__MODULE__{} = embed, url) do
image = %Image{
url: url
}
%__MODULE__{embed | image: image}
end
@doc ~S"""
Puts a `Nostrum.Struct.Embed.Thumbnail` under `:thumbnail` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_thumbnail(embed, "https://discord.com/assets/af92e60c16b7019f34a467383b31490a.svg")
%Nostrum.Struct.Embed{
thumbnail: %Nostrum.Struct.Embed.Thumbnail{
url: "https://discord.com/assets/af92e60c16b7019f34a467383b31490a.svg"
}
}
```
"""
@spec put_thumbnail(t, Thumbnail.url()) :: t
def put_thumbnail(%__MODULE__{} = embed, nil) do
%__MODULE__{embed | thumbnail: nil}
end
def put_thumbnail(%__MODULE__{} = embed, url) do
thumbnail = %Thumbnail{
url: url
}
%__MODULE__{embed | thumbnail: thumbnail}
end
@doc false
@spec put_video(t, Video.url()) :: t
def put_video(%__MODULE__{} = embed, url) do
video = %Video{
url: url
}
%__MODULE__{embed | video: video}
end
@doc false
@spec put_provider(t, Provider.name(), Provider.url()) :: t
def put_provider(%__MODULE__{} = embed, name, url) do
provider = %Provider{
name: name,
url: url
}
%__MODULE__{embed | provider: provider}
end
@doc ~S"""
Puts a `Nostrum.Struct.Embed.Author` under `:author` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_author(embed, "skippi", "https://github.com/skippi", nil)
%Nostrum.Struct.Embed{
author: %Nostrum.Struct.Embed.Author{
name: "skippi",
url: "https://github.com/skippi",
icon_url: nil
}
}
```
"""
@spec put_author(t, Author.name(), Author.url(), Author.icon_url()) :: t
def put_author(%__MODULE__{} = embed, name, url, icon_url) do
author = %Author{
name: name,
url: url,
icon_url: icon_url
}
%__MODULE__{embed | author: author}
end
@doc ~S"""
Adds a `Nostrum.Struct.Embed.Field` under `:fields` in `embed`.
## Examples
```Elixir
iex> embed = %Nostrum.Struct.Embed{}
...> Nostrum.Struct.Embed.put_field(embed, "First User", "b1nzy")
%Nostrum.Struct.Embed{
fields: [
%Nostrum.Struct.Embed.Field{name: "First User", value: "b1nzy"}
]
}
iex> embed = %Nostrum.Struct.Embed{
...> fields: [
...> %Nostrum.Struct.Embed.Field{name: "First User", value: "b1nzy"}
...> ]
...> }
...> Nostrum.Struct.Embed.put_field(embed, "Second User", "Danny")
%Nostrum.Struct.Embed{
fields: [
%Nostrum.Struct.Embed.Field{name: "First User", value: "b1nzy"},
%Nostrum.Struct.Embed.Field{name: "Second User", value: "Danny"}
]
}
```
"""
@spec put_field(t, Field.name(), Field.value(), Field.inline()) :: t
def put_field(embed, name, value, inline \\ nil)
def put_field(%__MODULE__{fields: fields} = embed, name, value, inline) when is_list(fields) do
field = %Field{
name: name,
value: value,
inline: inline
}
%__MODULE__{embed | fields: fields ++ [field]}
end
def put_field(embed, name, value, inline) do
put_field(%__MODULE__{embed | fields: []}, name, value, inline)
end
@doc """
Create an embed from a struct that implements the `Nostrum.Struct.Embed` behaviour
"""
def from(%module{} = struct) do
# checks if the struct implements the behaviour
unless Enum.member?(module.module_info(:attributes), {:behaviour, [__MODULE__]}) do
raise "#{module} does not implement the behaviour #{__MODULE__}"
end
embed =
%__MODULE__{}
|> put_color(module.color(struct))
|> put_description(module.description(struct))
|> put_image(module.image(struct))
|> put_thumbnail(module.thumbnail(struct))
|> put_timestamp(module.timestamp(struct))
|> put_title(module.title(struct))
|> put_url(module.url(struct))
embed =
case module.author(struct) do
%Author{} = author -> put_author(embed, author.name, author.url, author.icon_url)
nil -> embed
other -> raise "\"#{inspect(other)}\" is invalid for type author()"
end
embed =
case module.footer(struct) do
%Footer{} = footer -> put_footer(embed, footer.text, footer.icon_url)
nil -> embed
other -> raise "\"#{inspect(other)}\" is invalid for type footer()"
end
struct
|> module.fields()
|> List.wrap()
|> Enum.reduce(embed, fn
%Field{} = field, embed -> put_field(embed, field.name, field.value, field.inline)
other, _ -> raise "\"#{inspect(other)}\" is invalid for type fields()"
end)
end
# TODO: Jump down the rabbit hole
@doc false
def p_encode do
%__MODULE__{}
end
@doc false
def to_struct(map) do
new =
map
|> Map.new(fn {k, v} -> {Util.maybe_to_atom(k), v} end)
|> Map.update(:footer, nil, &Util.cast(&1, {:struct, Footer}))
|> Map.update(:image, nil, &Util.cast(&1, {:struct, Image}))
|> Map.update(:thumbnail, nil, &Util.cast(&1, {:struct, Thumbnail}))
|> Map.update(:video, nil, &Util.cast(&1, {:struct, Video}))
|> Map.update(:provider, nil, &Util.cast(&1, {:struct, Provider}))
|> Map.update(:author, nil, &Util.cast(&1, {:struct, Author}))
|> Map.update(:fields, nil, &Util.cast(&1, {:list, {:struct, Field}}))
struct(__MODULE__, new)
end
end
|
lib/nostrum/struct/embed.ex
| 0.907349 | 0.773195 |
embed.ex
|
starcoder
|
defmodule CsvGenerator do
@moduledoc File.read!("README.md")
@types [:string, :integer, :float, :date, :datetime, :time]
defmacro __using__(_options) do
quote do
Module.register_attribute(__MODULE__, :columns, accumulate: true, persist: false)
Module.register_attribute(__MODULE__, :delimiter, accumulate: false, persist: false)
Module.register_attribute(__MODULE__, :line_ending, accumulate: false, persist: false)
Module.register_attribute(__MODULE__, :decimal_point, accumulate: false, persist: false)
Module.register_attribute(__MODULE__, :hardcoded, accumulate: false, persist: false)
Module.register_attribute(__MODULE__, :no_header, accumulate: false, persist: false)
import unquote(__MODULE__)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
compile(
Module.get_attribute(env.module, :columns) |> Enum.reverse(),
Module.get_attribute(env.module, :delimiter, ","),
Module.get_attribute(env.module, :line_ending, "\n"),
Module.get_attribute(env.module, :decimal_point, "."),
Module.get_attribute(env.module, :no_header, false)
)
end
@doc """
Defines a column in the CSV.
`column name, type, options`
The column name will be used to select the value from the given input.
The following types are currently supported:
Type | Elixir type | Default format
:----------- | :---------------------- | :------------------
`:string` | `String` | n/a
`:integer` | `Integer` | n/a
`:float` | `Float` | n/a
`:date` | `Date` | `"%Y-%m-%d"`
`:time` | `DateTime` or `Integer` | `"%H:%M"`
`:datetime` | `DateTime` | `"%Y-%m-%d %H:%M:%S"`
For `:date`, `:time`, and `:datetime`, any of the Date(Time) types that
are compatible with `Calendar.Strftime.strftime/2` are allowed.
`:time` also allows an `Integer` value that represents the time within a day.
## Options
* `:header` - Use this instead of the name for column header.
* `:format` - Supply a different format string, see https://hexdocs.pm/calendar/readme.html.
* `:digits` - Supply the number of digits for a `Float`.
* `:with` - Specifies a function to be called on the value before processing.
column :value, :integer, with: &calc/1 or
column :value, :integer, with: fn(x) -> x * 2 end
* `:source` - Use another field as the source for this column, this allows you to use the same column multiple times.
## nil values
Columns with a `nil` value will be empty in the output.
If you do not want this, if you want some _default_ value, then use the `with:` option to supply a function that transforms the `nil` into something, formatting and rounding options will be applied.
column :c3po, :integer, with: fn i -> if i == nil, do: 0, else: i end
"""
defmacro column(_name, type, opts \\ [])
defmacro column(_name, type, _opts)
when not (type in @types) do
raise(ArgumentError, "type should be one of #{inspect(@types)} on line #{__CALLER__.line}")
end
defmacro column(name, type, opts) do
# This makes it possible to pass an anonymous function to :with
parms =
case Keyword.get(opts, :with) do
nil -> opts
_ -> Keyword.update!(opts, :with, &Macro.escape/1)
end
quote bind_quoted: [name: name, type: type, opts: parms] do
@columns {name, type, opts}
end
end
@doc """
Defines a column in the CSV that will always have the same hardcoded value.
## Example
hardcoded :string, "name", "John"
For `type` check out the possibilities in `column/3`.
Make sure the `value` is of `type`.
"""
defmacro hardcoded(type, _header, _value)
when not (type in @types) do
raise(ArgumentError, "type should be one of #{inspect(@types)} on line #{__CALLER__.line}")
end
defmacro hardcoded(_type, header, _value) when not is_binary(header) do
raise(ArgumentError, "header should be a string on line #{__CALLER__.line}")
end
defmacro hardcoded(type, header, value) do
opts = [hardcoded: value, header: header]
quote bind_quoted: [type: type, opts: opts] do
@columns {"hardcoded#{length(@columns)}" |> String.to_atom(), type, opts}
end
end
@doc """
Specify the character to use as column delimiter, default: ","
## Example
delimiter ";"
"""
defmacro delimiter(char) when is_binary(char) do
quote bind_quoted: [char: char] do
@delimiter char
end
end
defmacro delimiter(_) do
raise(ArgumentError, "delimiter expects a binary on line #{__CALLER__.line}.")
end
@doc """
Specify the line ending to use, default: "\\n".
## Example
line_ending "\\r\\n"
"""
defmacro line_ending(char) when is_binary(char) do
quote bind_quoted: [char: char] do
@line_ending char
end
end
defmacro line_ending(_) do
raise(ArgumentError, "line_ending expects a binary on line #{__CALLER__.line}.")
end
@doc """
Specify the decimal point, default: "."
## Example
decimal_point ","
"""
defmacro decimal_point(char) when is_binary(char) do
quote bind_quoted: [char: char] do
@decimal_point char
end
end
defmacro decimal_point(_) do
raise(ArgumentError, "decimal_point expects a binary on line #{__CALLER__.line}.")
end
@doc """
Add the header to the generated CSV, default: true.
## Example
header false
"""
defmacro header(flag) when is_boolean(flag) do
quote bind_quoted: [flag: !flag] do
@no_header flag
end
end
defmacro header(_flag) do
raise(ArgumentError, "header takes a boolean on line #{__CALLER__.line}.")
end
@doc false
def compile(columns, delimiter, line_ending, decimal_point, no_header) do
headers = gen_header(columns, delimiter)
columns_ast = gen_columns(columns, decimal_point)
columns_fn =
Enum.map(columns, fn {name, _type, opts} ->
case Keyword.get(opts, :hardcoded) do
nil ->
value = Keyword.get(opts, :source, name)
quote do
render(unquote(name), Map.get(row, unquote(value)))
end
value ->
quote bind_quoted: [name: name, value: Macro.escape(value)] do
render(name, value)
end
end
end)
row_fn =
quote do
Enum.map(list, fn row ->
unquote(columns_fn)
|> Enum.join(unquote(delimiter))
end)
end
list_fn =
if no_header do
row_fn
else
quote do
[unquote(headers) | unquote(row_fn)]
end
end
quote do
unquote(columns_ast)
@doc """
Called to render the CSV output.
## Example
iex> MyCSV.render(list)
"..."
"""
def render(list) when is_list(list) do
unquote(list_fn) |> Enum.join(unquote(line_ending))
end
end
end
defp gen_header(columns, delimiter) do
Enum.map(columns, fn {name, _type, opts} ->
~s("#{Keyword.get(opts, :header, name)}")
end)
|> Enum.join(delimiter)
end
defp gen_columns(columns, decimal_point) do
for {name, type, opts} <- columns do
{fname, func} =
case Keyword.get(opts, :with) do
nil ->
{:render,
quote do
# test
end}
with_function ->
{:post_render,
quote do
def render(unquote(name), value) do
post_render(unquote(name), unquote(with_function).(value))
end
end}
end
case type do
:string ->
quote do
unquote(func)
def unquote(fname)(unquote(name), value) do
~s("#{value}")
end
end
:integer ->
quote do
@doc false
unquote(func)
@doc false
def unquote(fname)(unquote(name), nil), do: ""
def unquote(fname)(unquote(name), value) when is_integer(value) do
value
end
def unquote(fname)(unquote(name), value) when is_binary(value) do
value
end
def unquote(fname)(unquote(name), value) do
raise "Invalid value for #{unquote(name)}: #{inspect(value)}"
end
end
:float ->
convert =
case {Keyword.get(opts, :digits), decimal_point} do
{nil, "."} ->
quote do
v
end
{nil, char} ->
quote do
v
|> to_string
|> String.replace(".", unquote(char))
end
{digits, "."} ->
divisor = 5 / :math.pow(10, digits + 2)
quote do
Float.round(v + unquote(divisor), unquote(digits))
end
{digits, char} ->
divisor = 5 / :math.pow(10, digits + 2)
quote do
Float.round(v + unquote(divisor), unquote(digits))
|> to_string
|> String.replace(".", unquote(char))
end
end
quote do
unquote(func)
def unquote(fname)(unquote(name), value) do
v =
cond do
is_nil(value) ->
""
is_float(value) ->
value
is_integer(value) ->
value / 1
is_binary(value) ->
case Float.parse(value) do
:error ->
raise "Cannort parse float value \"#{inspect(value)}\""
{f, _} ->
f
end
true ->
raise "Invalid float value \"#{inspect(value)}\""
end
unquote(convert)
end
end
:date ->
quote do
unquote(func)
def unquote(fname)(unquote(name), nil), do: ""
def unquote(fname)(unquote(name), value) do
Calendar.Strftime.strftime!(value, unquote(Keyword.get(opts, :format, "%Y-%m-%d")))
end
end
:time ->
quote do
unquote(func)
def unquote(fname)(unquote(name), nil), do: ""
def unquote(fname)(unquote(name), value) when is_integer(value) do
unquote(fname)(unquote(name), DateTime.from_unix!(value))
end
def unquote(fname)(unquote(name), value) do
Calendar.Strftime.strftime!(
value,
unquote(Keyword.get(opts, :format, "%H:%M"))
)
end
end
:datetime ->
quote do
unquote(func)
def unquote(fname)(unquote(name), nil), do: ""
def unquote(fname)(unquote(name), value) do
Calendar.Strftime.strftime!(
value,
unquote(Keyword.get(opts, :format, "%Y-%m-%d %H:%M:%S"))
)
end
end
end
end
end
end
|
lib/csv_generator.ex
| 0.837753 | 0.447158 |
csv_generator.ex
|
starcoder
|
defmodule Beeline.Appsignal do
@moduledoc """
an Appsignal exporter for Beeline telemetry
This exporter works by attaching a telemetry handler. This means that the
code to set the gauge runs in the process of the HealthChecker.
Attach this exporter by adding this task to a supervision tree, for example
the application supervision tree defined in the `lib/my_app/application.ex`
file:
```elixir
def start(_type, _args) do
children = [
{Beeline.Appsignal, []},
MyApp.MyBeelineTopology
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
```
This exporter sets an Appsignal gauge measuring the difference between
the latest available event number and the current position given by the
Beeline's `:get_stream_position` function option. This gauge is tagged
with the name of the producer under the key `:module` and the hostname
on which the producer is running under the key `:hostname`.
## Options
The `start_link/1` function takes a keyword list of options. These can also
be specified by passing the keyword as the second element of a tuple given
to a `Supervisor.start_link/2` list of children.
* `:gauge_name` (string, default: `"event_listener_lag"`) - the gauge name
to which the delta should be published
"""
@appsignal Application.get_env(:beeline_appsignal, :appsignal, Appsignal)
use Task
@doc false
def start_link(opts) do
Task.start_link(__MODULE__, :attach, [opts])
end
@doc false
def attach(opts) do
:telemetry.attach(
"beeline-appsignal-exporter",
[:beeline, :health_check, :stop],
&__MODULE__.handle_event/4,
opts
)
end
@doc false
def handle_event(_event, _measurement, metadata, state) do
@appsignal.set_gauge(
state[:gauge_name] || "event_listener_lag",
metadata[:head_position] - metadata[:current_position],
%{
module: inspect(metadata[:producer]),
hostname: metadata[:hostname]
}
)
state
end
end
|
lib/beeline/appsignal.ex
| 0.856107 | 0.746486 |
appsignal.ex
|
starcoder
|
defmodule Gullintanni.MergeRequest do
@moduledoc """
Defines a merge request.
"""
alias __MODULE__, as: MergeRequest
@typedoc "A merge request identifier"
@type id :: pos_integer
@typedoc "A 40 character SHA-1 hash"
@type sha :: String.t
@typedoc "Supported merge request states"
@type state :: :under_review | :approved | :build_pending | :build_passed
| :build_failed | :error
@typedoc "The merge request type"
@type t :: %__MODULE__{
id: id,
title: String.t,
url: String.t,
clone_url: String.t,
branch_name: String.t,
target_branch: String.t,
latest_commit: sha,
approved_by: %{optional(String.t) => NaiveDateTime.t},
state: state
}
@enforce_keys [:id]
defstruct [
:id,
:title,
:url,
:clone_url,
:branch_name,
:target_branch,
:latest_commit,
approved_by: %{},
state: :under_review
]
@doc """
Creates a new merge request with the given `id`.
## Options
The accepted options are:
* `:title` - the title of the merge request
* `:url` - the URL for viewing the merge request discussion
* `:clone_url` - the URL for Git to use when cloning the merge request
* `:branch_name` - the Git branch name of the merge request
* `:target_branch` - the name of the Git branch that the merge request is targeting
* `:latest_commit` - the SHA-1 hash of the latest commit on the merge request
* `:approved_by` - the users that have approved the merge request
* `:state` - the state of the merge request; valid states are defined by `t:state/0`
"""
@spec new(id, Keyword.t) :: t
def new(id, opts \\ []) do
Enum.reduce(opts, %MergeRequest{id: id}, fn({key, value}, merge_req) ->
%{merge_req | key => value}
end)
end
@doc """
Replaces the latest commit on the merge request with `sha`.
This will reset the state back to "under review" as well, canceling any
existing approvals.
"""
@spec update_sha(t, sha) :: t
def update_sha(%MergeRequest{} = merge_req, sha) do
%{merge_req | latest_commit: sha} |> reset
end
@spec approved_at(t) :: NaiveDateTime.t
def approved_at(%MergeRequest{} = merge_req) do
merge_req.approved_by
|> Map.values
|> Enum.sort
|> List.first
end
# A basic Finite State Machine
def reset(%MergeRequest{} = merge_req) do
%{merge_req | state: :under_review, approved_by: %{}}
end
@spec approve(t, String.t, NaiveDateTime.t) :: t
def approve(%MergeRequest{state: :under_review} = merge_req, username, timestamp) do
%{merge_req |
state: :approved,
approved_by: Map.put_new(merge_req.approved_by, username, timestamp),
}
end
def approve(%MergeRequest{} = merge_req, _username, _timestamp) do
merge_req
end
def unapprove(%MergeRequest{} = merge_req, _username) do
reset(merge_req)
end
def merge_passed(%MergeRequest{state: :approved} = merge_req),
do: %{merge_req | state: :build_pending}
def merge_failed(%MergeRequest{state: :approved} = merge_req),
do: %{merge_req | state: :error}
def build_passed(%MergeRequest{state: :build_pending} = merge_req),
do: %{merge_req | state: :build_passed}
def build_failed(%MergeRequest{state: :build_pending} = merge_req),
do: %{merge_req | state: :build_failed}
def build_error(%MergeRequest{state: :build_pending} = merge_req),
do: %{merge_req | state: :error}
def ffwd_failed(%MergeRequest{state: :build_passed} = merge_req),
do: %{merge_req | state: :error}
end
defimpl Inspect, for: Gullintanni.MergeRequest do
import Inspect.Algebra
def inspect(merge_req, opts) do
keys = [:id, :state, :title, :url]
attributes =
keys
|> Enum.reduce([], fn(key, attributes) ->
value = Map.get(merge_req, key)
case value do
nil -> attributes
_ -> ["#{key}: #{inspect value}" | attributes]
end
end)
|> Enum.reverse
surround_many("#MergeRequest<", attributes, ">",
opts, fn(i, _opts) -> i end)
end
end
|
apps/gullintanni/lib/gullintanni/merge_request.ex
| 0.819857 | 0.473231 |
merge_request.ex
|
starcoder
|
defmodule Geometry do
@moduledoc """
A set of geometry types for WKT/WKB and GeoJson.
"""
alias Geometry.{
Feature,
FeatureCollection,
GeoJson,
GeometryCollection,
GeometryCollectionM,
GeometryCollectionZ,
GeometryCollectionZM,
LineString,
LineStringM,
LineStringZ,
LineStringZM,
MultiLineString,
MultiLineStringM,
MultiLineStringZ,
MultiLineStringZM,
MultiPoint,
MultiPointM,
MultiPointZ,
MultiPointZM,
MultiPolygon,
MultiPolygonM,
MultiPolygonZ,
MultiPolygonZM,
Point,
PointM,
PointZ,
PointZM,
Polygon,
PolygonM,
PolygonZ,
PolygonZM
}
alias Geometry.{WKB, WKT}
@geometries [
GeometryCollection,
GeometryCollectionM,
GeometryCollectionZ,
GeometryCollectionZM,
LineString,
LineStringM,
LineStringZ,
LineStringZM,
MultiLineString,
MultiLineStringM,
MultiLineStringZ,
MultiLineStringZM,
MultiPoint,
MultiPointM,
MultiPointZ,
MultiPointZM,
MultiPolygon,
MultiPolygonM,
MultiPolygonZ,
MultiPolygonZM,
Polygon,
PolygonM,
PolygonZ,
PolygonZM,
Point,
PointM,
PointZ,
PointZM
]
@geo_json [
Feature,
FeatureCollection
]
@typedoc """
A geometry is one of the provided geometries or geometry-collections.
"""
@type t ::
GeometryCollection.t()
| GeometryCollectionM.t()
| GeometryCollectionZ.t()
| GeometryCollectionZM.t()
| LineString.t()
| LineStringM.t()
| LineStringZ.t()
| LineStringZM.t()
| MultiLineString.t()
| MultiLineStringM.t()
| MultiLineStringZ.t()
| MultiLineStringZM.t()
| MultiPoint.t()
| MultiPointM.t()
| MultiPointZ.t()
| MultiPointZM.t()
| MultiPolygon.t()
| MultiPolygonM.t()
| MultiPolygonZ.t()
| MultiPolygonZM.t()
| Polygon.t()
| PolygonM.t()
| PolygonZ.t()
| PolygonZM.t()
| Point.t()
| PointM.t()
| PointZ.t()
| PointZM.t()
@typedoc """
An n-dimensional coordinate.
"""
@type coordinate :: [number(), ...]
@typedoc """
A list of n-dimensional coordinates.
"""
@type coordinates :: [coordinate()]
@typedoc """
The Spatial Reference System Identifier to identify projected, unprojected,
and local spatial coordinate system definitions.
"""
@type srid :: non_neg_integer()
@typedoc """
[Well-known text](https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry)
(WKT) is a text markup language for representing vector geometry objects.
"""
@type wkt :: String.t()
@typedoc """
[Well-known binary](https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry#Well-known_binary)
The binary representation of WKT.
"""
@type wkb :: binary()
@typedoc """
Errors that can occur when a geometry is generating from WKT.
"""
@type wkt_error ::
{:error, %{expected: t(), got: t()}}
| {
:error,
message :: String.t(),
rest :: String.t(),
{line :: pos_integer(), offset :: non_neg_integer()},
offset :: non_neg_integer()
}
@typedoc """
Errors that can occur when a geometry is generating from WKT.
"""
@type wkb_error ::
{:error, %{expected: t(), got: t()}}
| {:error, message :: String.t(), rest :: binary(), offset :: non_neg_integer()}
@typedoc """
A [GeoJson](https://geojson.org) term.
"""
@type geo_json_term :: map()
@typedoc """
Errors that can occur when a geometry is generating from GeoJson.
"""
@type geo_json_error ::
{:error,
:coordinates_not_found
| :geometries_not_found
| :invalid_data
| :type_not_found
| :unknown_type}
@typedoc """
Byte order.
- `:ndr`: Little endian byte order encoding
- `:xdr`: Big endian byte order encoding
"""
@type endian :: :ndr | :xdr
@type mode :: :binary | :hex
@doc """
Returns true if a geometry is empty.
## Examples
iex> Geometry.empty?(Point.new(1, 2))
false
iex> Geometry.empty?(Point.new())
true
iex> Geometry.empty?(LineString.new([]))
true
"""
@spec empty?(t()) :: boolean
def empty?(%module{} = geometry)
when module in @geometries or module in @geo_json do
module.empty?(geometry)
end
@doc """
Returns the WKB representation of a geometry.
With option `:srid` an EWKB representation with the SRID is returned.
The option `:endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
## Examples
iex> Geometry.to_wkb(PointZ.new(1, 2, 3), endian: :ndr, mode: :hex)
"0101000080000000000000F03F00000000000000400000000000000840"
iex> Geometry.to_wkb(Point.new(1, 2), srid: 4711) |> Hex.from_binary()
"0020000001000012673FF00000000000004000000000000000"
"""
@spec to_wkb(t(), opts) :: String.t()
when opts: [endian: endian(), srid: srid(), mode: mode()]
def to_wkb(%module{} = geometry, opts \\ []) when module in @geometries do
module.to_wkb(geometry, opts)
end
@doc """
Returns an `:ok` tuple with the geometry from the given WKT string. Otherwise
returns an `:error` tuple.
If WKB contains an SRID the tuple is extended by the id.
The optional second argument determines if a `:hex`-string or a `:binary`
input is expected. The default is `:binary`.
## Examples
iex> Geometry.from_wkb("0101000080000000000000F03F00000000000000400000000000000840", :hex)
{:ok, %PointZ{coordinate: [1.0, 2.0, 3.0]}}
iex> Geometry.from_wkb("0020000001000012673FF00000000000004000000000000000", :hex)
{:ok, {%Point{coordinate: [1.0, 2.0]}, 4711}}
"""
@spec from_wkb(wkb(), mode()) :: {:ok, t() | {t(), srid()}} | wkb_error
def from_wkb(wkb, mode \\ :binary), do: WKB.Parser.parse(wkb, mode)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(wkb(), mode()) :: t() | {t(), srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.Parser.parse(wkb, mode) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKT representation of a geometry. An optional `:srid` can be set
in the options.
## Examples
iex> Geometry.to_wkt(Point.new(1, 2))
"Point (1 2)"
iex> Geometry.to_wkt(PointZ.new(1.1, 2.2, 3.3), srid: 4211)
"SRID=4211;Point Z (1.1 2.2 3.3)"
iex> Geometry.to_wkt(LineString.new([Point.new(1, 2), Point.new(3, 4)]))
"LineString (1 2, 3 4)"
"""
@spec to_wkt(t(), opts) :: String.t()
when opts: [srid: srid()]
def to_wkt(%module{} = geometry, opts \\ []) when module in @geometries do
module.to_wkt(geometry, opts)
end
@doc """
Returns an `:ok` tuple with the geometry from the given WKT string. Otherwise
returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> Geometry.from_wkt("Point ZM (1 2 3 4)")
{:ok, %PointZM{coordinate: [1, 2, 3, 4]}}
iex> Geometry.from_wkt("SRID=42;Point (1.1 2.2)")
{:ok, {%Point{coordinate: [1.1, 2.2]}, 42}}
"""
@spec from_wkt(wkt()) :: {:ok, t() | {t(), srid()}} | wkt_error
def from_wkt(wkt), do: WKT.Parser.parse(wkt)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(wkt()) :: t() | {t(), srid()}
def from_wkt!(wkt) do
case WKT.Parser.parse(wkt) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term representation of a geometry.
## Examples
iex> Geometry.to_geo_json(PointZ.new(1.2, 3.4, 5.6))
%{"type" => "Point", "coordinates" => [1.2, 3.4, 5.6]}
iex> Geometry.to_geo_json(LineString.new([Point.new(1, 2), Point.new(3, 4)]))
%{"type" => "LineString", "coordinates" => [[1, 2], [3, 4]]}
"""
@spec to_geo_json(t() | Feature.t() | FeatureCollection.t()) :: geo_json_term
def to_geo_json(%module{} = geometry)
when module in @geometries or module in @geo_json do
module.to_geo_json(geometry)
end
@doc """
Returns an `:ok` tuple with the geometry from the given GeoJSON term.
Otherwise returns an `:error` tuple.
The `:type` option specifies which type is expected. The
possible values are `:z`, `:m`, and `:zm`.
## Examples
iex> ~s({"type": "Point", "coordinates": [1, 2]})
iex> |> Jason.decode!()
iex> |> Geometry.from_geo_json()
{:ok, %Point{coordinate: [1, 2]}}
iex> ~s({"type": "Point", "coordinates": [1, 2, 3, 4]})
iex> |> Jason.decode!()
iex> |> Geometry.from_geo_json(type: :zm)
{:ok, %PointZM{coordinate: [1, 2, 3, 4]}}
"""
@spec from_geo_json(geo_json_term(), opts) ::
{:ok, t() | Feature.t() | FeatureCollection.t()} | geo_json_error
when opts: [type: :z | :m | :zm]
def from_geo_json(json, opts \\ []), do: GeoJson.to_geometry(json, opts)
@doc """
The same as `from_geo_josn/1`, but raises a `Geometry.Error` exception if it
fails.
"""
@spec from_geo_json!(geo_json_term(), opts) :: t() | Feature.t() | FeatureCollection.t()
when opts: [type: :z | :m | :zm]
def from_geo_json!(json, opts \\ []) do
case GeoJson.to_geometry(json, opts) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc false
@spec default_endian :: endian()
def default_endian, do: :xdr
@doc false
@spec default_mode :: mode()
def default_mode, do: :binary
end
|
lib/geometry.ex
| 0.938597 | 0.64586 |
geometry.ex
|
starcoder
|
defmodule Elsa.Consumer.Worker do
@moduledoc """
Defines the worker GenServer that is managed by the DynamicSupervisor.
Workers are instantiated and assigned to a specific topic/partition
and process messages according to the specified message handler module
passed in from the manager before calling the ack function to
notify the cluster the messages have been successfully processed.
"""
use GenServer, restart: :temporary, shutdown: 10_000
require Logger
import Elsa.Supervisor, only: [registry: 1]
import Record, only: [defrecord: 2, extract: 2]
defrecord :kafka_message_set, extract(:kafka_message_set, from_lib: "brod/include/brod.hrl")
@subscribe_delay 200
@subscribe_retries 20
@start_failure_delay 5_000
defmodule State do
@moduledoc """
The running state of the worker process.
"""
defstruct [
:connection,
:topic,
:partition,
:generation_id,
:offset,
:handler,
:handler_init_args,
:handler_state,
:config,
:consumer_pid
]
end
@type init_opts :: [
connection: Elsa.connection(),
topic: Elsa.topic(),
partition: Elsa.partition(),
generation_id: non_neg_integer,
begin_offset: non_neg_integer,
handler: module,
handler_init_args: term,
config: :brod.consumer_options()
]
@doc """
Start the worker process and init the state with the given config.
"""
@spec start_link(init_opts) :: GenServer.on_start()
def start_link(init_args) do
GenServer.start_link(__MODULE__, init_args)
end
def init(init_args) do
Process.flag(:trap_exit, true)
state = %State{
connection: Keyword.fetch!(init_args, :connection),
topic: Keyword.fetch!(init_args, :topic),
partition: Keyword.fetch!(init_args, :partition),
generation_id: Keyword.get(init_args, :generation_id),
offset: Keyword.fetch!(init_args, :begin_offset),
handler: Keyword.fetch!(init_args, :handler),
handler_init_args: Keyword.get(init_args, :handler_init_args, []),
config: Keyword.get(init_args, :config, [])
}
Process.put(:elsa_connection, state.connection)
Process.put(:elsa_topic, state.topic)
Process.put(:elsa_partition, state.partition)
Process.put(:elsa_generation_id, state.generation_id)
Elsa.Registry.register_name({registry(state.connection), :"worker_#{state.topic}_#{state.partition}"}, self())
{:ok, state, {:continue, :subscribe}}
end
def handle_continue(:subscribe, state) do
registry = registry(state.connection)
with {:ok, consumer_pid} <- start_consumer(state.connection, state.topic, state.partition, state.config),
:yes <- Elsa.Registry.register_name({registry, :"consumer_#{state.topic}_#{state.partition}"}, consumer_pid),
:ok <- subscribe(consumer_pid, state) do
{:ok, handler_state} = state.handler.init(state.handler_init_args)
{:noreply, %{state | consumer_pid: consumer_pid, handler_state: handler_state}}
else
{:error, reason} ->
Logger.warn(
"Unable to subscribe to topic/partition/offset(#{state.topic}/#{state.partition}/#{state.offset}), reason #{
inspect(reason)
}"
)
Process.sleep(@start_failure_delay)
{:stop, reason, state}
end
end
def handle_info({_consumer_pid, kafka_message_set(topic: topic, partition: partition, messages: messages)}, state) do
transformed_messages = transform_messages(topic, partition, messages, state)
case send_messages_to_handler(transformed_messages, state) do
{ack, new_handler_state} when ack in [:ack, :acknowledge] ->
offset = transformed_messages |> List.last() |> Map.get(:offset)
ack_messages(topic, partition, offset, state)
{:noreply, %{state | offset: offset, handler_state: new_handler_state}}
{ack, offset, new_handler_state} when ack in [:ack, :acknowledge] ->
ack_messages(topic, partition, offset, state)
{:noreply, %{state | offset: offset, handler_state: new_handler_state}}
{no_ack, new_handler_state} when no_ack in [:no_ack, :noop] ->
{:noreply, %{state | handler_state: new_handler_state}}
{:continue, new_handler_state} ->
offset = transformed_messages |> List.last() |> Map.get(:offset)
:ok = :brod_consumer.ack(state.consumer_pid, offset)
{:noreply, %{state | handler_state: new_handler_state}}
end
end
def handle_info({:EXIT, _pid, reason}, state) do
{:stop, reason, state}
end
def terminate(_reason, %{consumer_pid: nil} = state) do
state
end
def terminate(reason, state) do
:brod_consumer.unsubscribe(state.consumer_pid, self())
Process.exit(state.consumer_pid, reason)
state
end
defp transform_messages(topic, partition, messages, state) do
Enum.map(messages, &Elsa.Message.new(&1, topic: topic, partition: partition, generation_id: state.generation_id))
end
defp send_messages_to_handler(messages, state) do
state.handler.handle_messages(messages, state.handler_state)
end
defp ack_messages(_topic, _partition, offset, %{generation_id: nil} = state) do
:brod_consumer.ack(state.consumer_pid, offset)
end
defp ack_messages(topic, partition, offset, state) do
Elsa.Group.Acknowledger.ack(state.connection, topic, partition, state.generation_id, offset)
offset
end
defp start_consumer(connection, topic, partition, config) do
registry = registry(connection)
brod_client = Elsa.Registry.whereis_name({registry, :brod_client})
:brod_consumer.start_link(brod_client, topic, partition, config)
end
defp subscribe(consumer_pid, state, retries \\ @subscribe_retries)
defp subscribe(_consumer_pid, _state, 0) do
{:error, :failed_subscription}
end
defp subscribe(consumer_pid, state, retries) do
opts = determine_subscriber_opts(state)
case :brod_consumer.subscribe(consumer_pid, self(), opts) do
{:error, reason} ->
Logger.warn(
"Retrying to subscribe to topic #{state.topic} parition #{state.partition} offset #{state.offset} reason #{
inspect(reason)
}"
)
Process.sleep(@subscribe_delay)
subscribe(consumer_pid, state, retries - 1)
:ok ->
Logger.info("Subscribing to topic #{state.topic} partition #{state.partition} offset #{state.offset}")
:ok
end
end
defp determine_subscriber_opts(state) do
begin_offset =
case state.offset do
:undefined ->
Keyword.get(state.config, :begin_offset, :latest)
offset ->
offset
end
Keyword.put(state.config, :begin_offset, begin_offset)
end
end
|
lib/elsa/consumer/worker.ex
| 0.682679 | 0.406126 |
worker.ex
|
starcoder
|
defmodule Bolt.Cogs.GateKeeper.OnJoin do
@moduledoc false
@behaviour Nosedrum.Command
alias Bolt.Cogs.GateKeeper.Actions
alias Bolt.Converters
alias Bolt.Gatekeeper
alias Bolt.Schema.JoinAction
alias Bolt.{ErrorFormatters, ModLog, Repo}
alias Nosedrum.Predicates
alias Nostrum.Api
alias Nostrum.Struct.{Channel, User}
require Logger
@impl true
def usage, do: ["keeper onjoin <action...>"]
@impl true
def description,
do: """
Sets actions to be ran when a member joins the server.
**Actions**:
• `ignore`: Delete any configured actions.
• `send <template:str> to user`: Attempts to send the given `template` to the user who joined.
If the user has direct messages disabled, this will fail.
• `send <template:str> to <channel:textchannel>`: Sends the given `template` to the given `channel`.
• `add role <role:role...>`: Adds the given `role` to the member who joined.
Templates are regular text that have special values interpolated when they are about to be sent out.
You can use `{mention}` to mention the user who joined in the resulting text.
**Examples**:
```rs
// On join, (attempt to) send "Welcome to our server!" to the user who joined
.keeper onjoin send "Welcome to our server!" to user
// On join, send "Welcome to our server, {mention}!" to the #welcome channel
.keeper onjoin send "Welcome to our server, {mention}!" to #welcome
// On join, add the role 'Guest' to the user who joined
.keeper onjoin add role Guest
```
"""
@impl true
def predicates, do: [&Predicates.guild_only/1, Predicates.has_permission(:manage_guild)]
@impl true
def command(msg, []) do
Actions.command(msg, ["join"])
end
def command(msg, ["add", "role" | role_str]) do
response =
with {:ok, role} <- Converters.to_role(msg.guild_id, Enum.join(role_str, " ")),
action_map <- %{
guild_id: msg.guild_id,
action: "add_role",
data: %{
"role_id" => role.id
}
},
changeset <- JoinAction.changeset(%JoinAction{}, action_map),
{:ok, _action} <- Repo.insert(changeset) do
ModLog.emit(
msg.guild_id,
"CONFIG_UPDATE",
"#{User.full_name(msg.author)} set gatekeeper to add role `#{role.name}` on join"
)
"👌 will now add role `#{role.name}` on join"
else
error -> ErrorFormatters.fmt(msg, error)
end
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
def command(msg, ["ignore"]) do
{total_deleted, _} = Gatekeeper.clear_actions(msg.guild_id, :join)
response =
if total_deleted == 0 do
"🚫 no actions to delete"
else
ModLog.emit(
msg.guild_id,
"CONFIG_UPDATE",
"#{User.full_name(msg.author)} deleted **#{total_deleted}** join action(s)"
)
"👌 deleted **#{total_deleted}** join actions"
end
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
def command(msg, ["send", template, "to", "user"]) do
action_map = %{
guild_id: msg.guild_id,
action: "send_dm",
data: %{
"template" => template
}
}
changeset = JoinAction.changeset(%JoinAction{}, action_map)
response =
case Repo.insert(changeset) do
{:ok, _action} ->
ModLog.emit(
msg.guild_id,
"CONFIG_UPDATE",
"#{User.full_name(msg.author)} set gatekeeper to DM users with " <>
"```md\n#{template}``` on join"
)
"👌 will now attempt to DM users with the given template on join"
error ->
ErrorFormatters.fmt(msg, error)
end
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
def command(msg, ["send", template, "to", channel_str]) do
response =
with {:ok, channel} <- Converters.to_channel(msg.guild_id, channel_str),
action_map <- %{
guild_id: msg.guild_id,
action: "send_guild",
data: %{
"channel_id" => channel.id,
"template" => template
}
},
changeset <- JoinAction.changeset(%JoinAction{}, action_map),
{:ok, _action} <- Repo.insert(changeset) do
ModLog.emit(
msg.guild_id,
"CONFIG_UPDATE",
"#{User.full_name(msg.author)} set gatekeeper to send " <>
"```md\n#{template}``` to #{Channel.mention(channel)} on join"
)
"👌 will now send the given template to #{Channel.mention(channel)} on join"
else
error ->
ErrorFormatters.fmt(msg, error)
end
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
end
|
lib/bolt/cogs/gatekeeper/onjoin.ex
| 0.819605 | 0.656569 |
onjoin.ex
|
starcoder
|
defmodule GGity.Geom.Point do
@moduledoc false
alias GGity.{Draw, Geom, Plot}
@type t() :: %__MODULE__{}
@type plot() :: %Plot{}
@type record() :: map()
@type mapping() :: map()
defstruct data: nil,
mapping: nil,
stat: :identity,
position: :identity,
key_glyph: :point,
alpha: 1,
color: "black",
shape: :circle,
size: 4
@spec new(mapping(), keyword()) :: Geom.Point.t()
def new(mapping, options) do
struct(Geom.Point, [{:mapping, mapping} | options])
end
@spec draw(Geom.Point.t(), list(map()), plot()) :: iolist()
def draw(%Geom.Point{} = geom_point, data, plot), do: points(geom_point, data, plot)
defp points(%Geom.Point{} = geom_point, data, %Plot{scales: scales} = plot) do
scale_transforms =
geom_point.mapping
|> Map.keys()
|> Enum.reduce(%{}, fn aesthetic, mapped ->
Map.put(mapped, aesthetic, Map.get(scales[aesthetic], :transform))
end)
transforms =
geom_point
|> Map.take([:alpha, :color, :shape, :size])
|> Enum.reduce(%{}, fn {aesthetic, fixed_value}, fixed ->
Map.put(fixed, aesthetic, fn _value -> fixed_value end)
end)
|> Map.merge(scale_transforms)
data
|> Stream.map(fn row ->
[
transforms.x.(row[geom_point.mapping.x]),
transforms.y.(row[geom_point.mapping.y]),
transforms.alpha.(row[geom_point.mapping[:alpha]]),
transforms.color.(row[geom_point.mapping[:color]]),
transforms.shape.(row[geom_point.mapping[:shape]]),
transforms.size.(row[geom_point.mapping[:size]])
]
end)
|> Stream.map(fn row -> Enum.zip([:x, :y, :fill_opacity, :fill, :shape, :size], row) end)
|> Enum.map(fn row ->
Draw.marker(
row[:shape],
{row[:x] + plot.area_padding,
(plot.width - row[:y]) / plot.aspect_ratio + plot.area_padding},
row[:size],
Keyword.take(row, [:fill, :fill_opacity])
)
end)
end
end
|
lib/ggity/geom/point.ex
| 0.858392 | 0.617513 |
point.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL.Connection do
@moduledoc """
Specifies the behaviour to be implemented by SQL pooled connections.
"""
use Behaviour
@doc """
Connects to the underlying database.
Should return a process which is linked to
the caller process or an error.
"""
defcallback connect(Keyword.t) :: {:ok, pid} | {:error, term}
@doc """
Disconnects the given `pid`.
If the given `pid` no longer exists, it should not raise.
"""
defcallback disconnect(pid) :: :ok
@doc """
Executes the given query with params in connection.
In case of success, it must return an `:ok` tuple containing
a map with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
"""
defcallback query(pid, query :: binary, params :: list(), opts :: Keyword.t) ::
{:ok, %{rows: nil | [tuple], num_rows: non_neg_integer}} | {:error, Exception.t}
## Queries
@doc """
Receives a query and must return a SELECT query.
"""
defcallback all(Ecto.Query.t) :: String.t
@doc """
Receives a query and values to update and must return an UPDATE query.
"""
defcallback update_all(Ecto.Query.t, values :: Keyword.t) :: String.t
@doc """
Receives a query and must return a DELETE query.
"""
defcallback delete_all(Ecto.Query.t) :: String.t
@doc """
Returns an INSERT for the given `fields` in `table` returning
the given `returning`.
"""
defcallback insert(table :: String.t, fields :: [atom], returning :: [atom]) :: String.t
@doc """
Returns an UPDATE for the given `fields` in `table` filtered by
`filters` returning the given `returning`.
"""
defcallback update(table :: String.t, fields :: [atom],
filters :: [atom], returning :: [atom]) :: String.t
@doc """
Returns a DELETE for the `filters` returning the given `returning`.
"""
defcallback delete(table :: String.t, filters :: [atom], returning :: [atom]) :: String.t
## DDL
@doc """
Receives a DDL object and returns a query that checks its existence.
"""
defcallback ddl_exists(Ecto.Adapter.Migration.ddl_object) :: String.t
@doc """
Receives a DDL command and returns a query that executes it.
"""
defcallback execute_ddl(Ecto.Adapter.Migration.command) :: String.t
## Transaction
@doc """
Command to begin transaction.
"""
defcallback begin_transaction :: String.t
@doc """
Command to rollback transaction.
"""
defcallback rollback :: String.t
@doc """
Command to commit transaction.
"""
defcallback commit :: String.t
@doc """
Command to emit savepoint.
"""
defcallback savepoint(savepoint :: String.t) :: String.t
@doc """
Command to rollback to savepoint.
"""
defcallback rollback_to_savepoint(savepoint :: String.t) :: String.t
end
|
lib/ecto/adapters/sql/connection.ex
| 0.889879 | 0.550245 |
connection.ex
|
starcoder
|
defmodule DataLogger.Destination do
@moduledoc """
A behaviour, representing a destination for data logging.
The mandatory callback to implement is `DataLogger.Destination.send_data/3`.
An implementation should handle errors and retries by using the optional callbacks
`DataLogger.Destination.on_error/4` or/and `DataLogger.Destination.on_success/4`.
These functions are called with the result of the call to the `DataLogger.Destination.send_data/4` function.
Also an implementation can have some custom initialization, using the optional callback `DataLogger.Destination.init/1` which
gets the configured options from the config and can return modified options.
By default it returns what is passed to it.
A possible implementation could look like this:
defmodule RelationalDBDestination do
use DataLogger.Destination
@impl true
def send_data(topic, data, options) do
connection = ConnectionToDBImpl.connect(options)
query_to_insert_data = transform_data_to_query(topic, data)
case ConnectionToDBImpl.execute(connection, query_to_insert_data, data) do
:ok -> :ok
{:error, reason} -> {:error, reason}
end
end
end
The implementation can also define the `DataLogger.Destination.on_error/4` function and retry on error or
log some message. The default implementations of the `DataLogger.Destination.on_error/4` and `DataLogger.Destination.on_success/4` callbacks, do nothing.
The above example implementation can be configured in the application configuration like this:
config :data_logger,
destinations: [
{RelationalDBDestination, %{host: "localhost", user: "inflowmatix", password: "<PASSWORD>", send_async: true}}
]
"""
@type topic :: atom() | String.t()
@type prefix :: atom() | String.t()
@type options :: map()
@type send_result ::
:ok
| {:ok, result :: term()}
| {:error, reason :: term()}
| {:ok, result :: term(), options()}
| {:error, reason :: term(), options()}
@callback send_data(topic(), data :: term(), options()) :: send_result()
@callback initialize(options()) :: options()
@callback on_error(error :: term(), topic(), data :: term(), options()) :: :ok
@callback on_success(result :: term(), topic(), data :: term(), options()) :: :ok
@optional_callbacks initialize: 1,
on_error: 4,
on_success: 4
@doc false
defmacro __using__(_) do
quote do
@behaviour DataLogger.Destination
@doc false
def initialize(options), do: options
@doc false
def on_error(_, _, _, _), do: :ok
@doc false
def on_success(_, _, _, _), do: :ok
defoverridable initialize: 1, on_error: 4, on_success: 4
end
end
end
|
lib/data_logger/destination.ex
| 0.866839 | 0.564068 |
destination.ex
|
starcoder
|
defmodule AWS.CodeGuruProfiler do
@moduledoc """
This section provides documentation for the Amazon CodeGuru Profiler API
operations.
Amazon CodeGuru Profiler collects runtime performance data from your live
applications, and provides recommendations that can help you fine-tune your
application performance. Using machine learning algorithms, CodeGuru Profiler
can help you find your most expensive lines of code and suggest ways you can
improve efficiency and remove CPU bottlenecks.
Amazon CodeGuru Profiler provides different visualizations of profiling data to
help you identify what code is running on the CPU, see how much time is
consumed, and suggest ways to reduce CPU utilization.
Amazon CodeGuru Profiler currently supports applications written in all Java
virtual machine (JVM) languages and Python. While CodeGuru Profiler supports
both visualizations and recommendations for applications written in Java, it can
also generate visualizations and a subset of recommendations for applications
written in other JVM languages and Python.
For more information, see [What is Amazon CodeGuru Profiler](https://docs.aws.amazon.com/codeguru/latest/profiler-ug/what-is-codeguru-profiler.html)
in the *Amazon CodeGuru Profiler User Guide*.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-07-18",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "codeguru-profiler",
global?: false,
protocol: "rest-json",
service_id: "CodeGuruProfiler",
signature_version: "v4",
signing_name: "codeguru-profiler",
target_prefix: nil
}
end
@doc """
Add up to 2 anomaly notifications channels for a profiling group.
"""
def add_notification_channels(%Client{} = client, profiling_group_name, input, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/notificationConfiguration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns the time series of values for a requested list of frame metrics from a
time period.
"""
def batch_get_frame_metric_data(%Client{} = client, profiling_group_name, input, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/frames/-/metrics"
headers = []
{query_params, input} =
[
{"endTime", "endTime"},
{"period", "period"},
{"startTime", "startTime"},
{"targetResolution", "targetResolution"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Used by profiler agents to report their current state and to receive remote
configuration updates.
For example, `ConfigureAgent` can be used to tell an agent whether to profile or
not and for how long to return profiling data.
"""
def configure_agent(%Client{} = client, profiling_group_name, input, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/configureAgent"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a profiling group.
"""
def create_profiling_group(%Client{} = client, input, options \\ []) do
url_path = "/profilingGroups"
headers = []
{query_params, input} =
[
{"clientToken", "clientToken"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Deletes a profiling group.
"""
def delete_profiling_group(%Client{} = client, profiling_group_name, input, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Returns a [ `ProfilingGroupDescription`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_ProfilingGroupDescription.html)
object that contains information about the requested profiling group.
"""
def describe_profiling_group(%Client{} = client, profiling_group_name, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of [ `FindingsReportSummary`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_FindingsReportSummary.html)
objects that contain analysis results for all profiling groups in your AWS
account.
"""
def get_findings_report_account_summary(
%Client{} = client,
daily_reports_only \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/internal/findingsReports"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(daily_reports_only) do
[{"dailyReportsOnly", daily_reports_only} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Get the current configuration for anomaly notifications for a profiling group.
"""
def get_notification_configuration(%Client{} = client, profiling_group_name, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/notificationConfiguration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the JSON-formatted resource-based policy on a profiling group.
"""
def get_policy(%Client{} = client, profiling_group_name, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/policy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets the aggregated profile of a profiling group for a specified time range.
Amazon CodeGuru Profiler collects posted agent profiles for a profiling group
into aggregated profiles.
` Because aggregated profiles expire over time `GetProfile` is not idempotent.
Specify the time range for the requested aggregated profile using 1 or 2 of the
following parameters: `startTime`, `endTime`, `period`. The maximum time range
allowed is 7 days. If you specify all 3 parameters, an exception is thrown. If
you specify only `period`, the latest aggregated profile is returned.
Aggregated profiles are available with aggregation periods of 5 minutes, 1 hour,
and 1 day, aligned to UTC. The aggregation period of an aggregated profile
determines how long it is retained. For more information, see [
`AggregatedProfileTime`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_AggregatedProfileTime.html).
The aggregated profile's aggregation period determines how long it is retained
by CodeGuru Profiler.
* If the aggregation period is 5 minutes, the aggregated profile is
retained for 15 days.
* If the aggregation period is 1 hour, the aggregated profile is
retained for 60 days.
* If the aggregation period is 1 day, the aggregated profile is
retained for 3 years.
There are two use cases for calling `GetProfile`.
1. If you want to return an aggregated profile that already exists,
use [ `ListProfileTimes`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_ListProfileTimes.html)
to view the time ranges of existing aggregated profiles. Use them in a
`GetProfile` request to return a specific, existing aggregated profile.
2. If you want to return an aggregated profile for a time range that
doesn't align with an existing aggregated profile, then CodeGuru Profiler makes
a best effort to combine existing aggregated profiles from the requested time
range and return them as one aggregated profile.
If aggregated profiles do not exist for the full time range requested, then
aggregated profiles for a smaller time range are returned. For example, if the
requested time range is from 00:00 to 00:20, and the existing aggregated
profiles are from 00:15 and 00:25, then the aggregated profiles from 00:15 to
00:20 are returned.
`
"""
def get_profile(
%Client{} = client,
profiling_group_name,
end_time \\ nil,
max_depth \\ nil,
period \\ nil,
start_time \\ nil,
accept \\ nil,
options \\ []
) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/profile"
headers = []
headers =
if !is_nil(accept) do
[{"Accept", accept} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(period) do
[{"period", period} | query_params]
else
query_params
end
query_params =
if !is_nil(max_depth) do
[{"maxDepth", max_depth} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
options =
Keyword.put(
options,
:response_header_parameters,
[{"Content-Encoding", "contentEncoding"}, {"Content-Type", "contentType"}]
)
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of [ `Recommendation`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_Recommendation.html)
objects that contain recommendations for a profiling group for a given time
period.
A list of [ `Anomaly`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_Anomaly.html)
objects that contains details about anomalies detected in the profiling group
for the same time period is also returned.
"""
def get_recommendations(
%Client{} = client,
profiling_group_name,
end_time,
locale \\ nil,
start_time,
options \\ []
) do
url_path = "/internal/profilingGroups/#{URI.encode(profiling_group_name)}/recommendations"
headers = []
query_params = []
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(locale) do
[{"locale", locale} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
List the available reports for a given profiling group and time range.
"""
def list_findings_reports(
%Client{} = client,
profiling_group_name,
daily_reports_only \\ nil,
end_time,
max_results \\ nil,
next_token \\ nil,
start_time,
options \\ []
) do
url_path = "/internal/profilingGroups/#{URI.encode(profiling_group_name)}/findingsReports"
headers = []
query_params = []
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
query_params =
if !is_nil(daily_reports_only) do
[{"dailyReportsOnly", daily_reports_only} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the start times of the available aggregated profiles of a profiling group
for an aggregation period within the specified time range.
"""
def list_profile_times(
%Client{} = client,
profiling_group_name,
end_time,
max_results \\ nil,
next_token \\ nil,
order_by \\ nil,
period,
start_time,
options \\ []
) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/profileTimes"
headers = []
query_params = []
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(period) do
[{"period", period} | query_params]
else
query_params
end
query_params =
if !is_nil(order_by) do
[{"orderBy", order_by} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of profiling groups.
The profiling groups are returned as [ `ProfilingGroupDescription`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_ProfilingGroupDescription.html)
objects.
"""
def list_profiling_groups(
%Client{} = client,
include_description \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/profilingGroups"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(include_description) do
[{"includeDescription", include_description} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of the tags that are assigned to a specified resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Submits profiling data to an aggregated profile of a profiling group.
To get an aggregated profile that is created with this profiling data, use [
`GetProfile`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_GetProfile.html).
"""
def post_agent_profile(%Client{} = client, profiling_group_name, input, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}/agentProfile"
{headers, input} =
[
{"contentType", "Content-Type"}
]
|> Request.build_params(input)
{query_params, input} =
[
{"profileToken", "profileToken"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Adds permissions to a profiling group's resource-based policy that are provided
using an action group.
If a profiling group doesn't have a resource-based policy, one is created for it
using the permissions in the action group and the roles and users in the
`principals` parameter.
` The one supported action group that can be added is `agentPermission` which
grants `ConfigureAgent` and `PostAgent` permissions. For more information, see
[Resource-based policies in CodeGuru Profiler](https://docs.aws.amazon.com/codeguru/latest/profiler-ug/resource-based-policies.html)
in the *Amazon CodeGuru Profiler User Guide*, [ `ConfigureAgent`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_ConfigureAgent.html),
and [ `PostAgentProfile`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_PostAgentProfile.html).
The first time you call `PutPermission` on a profiling group, do not specify a
`revisionId` because it doesn't have a resource-based policy. Subsequent calls
must provide a `revisionId` to specify which revision of the resource-based
policy to add the permissions to.
The response contains the profiling group's JSON-formatted resource policy.
`
"""
def put_permission(%Client{} = client, action_group, profiling_group_name, input, options \\ []) do
url_path =
"/profilingGroups/#{URI.encode(profiling_group_name)}/policy/#{URI.encode(action_group)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Remove one anomaly notifications channel for a profiling group.
"""
def remove_notification_channel(
%Client{} = client,
channel_id,
profiling_group_name,
input,
options \\ []
) do
url_path =
"/profilingGroups/#{URI.encode(profiling_group_name)}/notificationConfiguration/#{
URI.encode(channel_id)
}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes permissions from a profiling group's resource-based policy that are
provided using an action group.
The one supported action group that can be removed is `agentPermission` which
grants `ConfigureAgent` and `PostAgent` permissions. For more information, see
[Resource-based policies in CodeGuru Profiler](https://docs.aws.amazon.com/codeguru/latest/profiler-ug/resource-based-policies.html)
in the *Amazon CodeGuru Profiler User Guide*, [ `ConfigureAgent`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_ConfigureAgent.html),
and [ `PostAgentProfile`
](https://docs.aws.amazon.com/codeguru/latest/profiler-api/API_PostAgentProfile.html).
"""
def remove_permission(
%Client{} = client,
action_group,
profiling_group_name,
input,
options \\ []
) do
url_path =
"/profilingGroups/#{URI.encode(profiling_group_name)}/policy/#{URI.encode(action_group)}"
headers = []
{query_params, input} =
[
{"revisionId", "revisionId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Sends feedback to CodeGuru Profiler about whether the anomaly detected by the
analysis is useful or not.
"""
def submit_feedback(
%Client{} = client,
anomaly_instance_id,
profiling_group_name,
input,
options \\ []
) do
url_path =
"/internal/profilingGroups/#{URI.encode(profiling_group_name)}/anomalies/#{
URI.encode(anomaly_instance_id)
}/feedback"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Use to assign one or more tags to a resource.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Use to remove one or more tags from a resource.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_params, input} =
[
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates a profiling group.
"""
def update_profiling_group(%Client{} = client, profiling_group_name, input, options \\ []) do
url_path = "/profilingGroups/#{URI.encode(profiling_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/code_guru_profiler.ex
| 0.81772 | 0.417687 |
code_guru_profiler.ex
|
starcoder
|
defmodule AWS.ResourceGroups do
@moduledoc """
AWS Resource Groups
AWS Resource Groups lets you organize AWS resources such as Amazon EC2
instances, Amazon Relational Database Service databases, and Amazon S3 buckets
into groups using criteria that you define as tags.
A resource group is a collection of resources that match the resource types
specified in a query, and share one or more tags or portions of tags. You can
create a group of resources based on their roles in your cloud infrastructure,
lifecycle stages, regions, application layers, or virtually any criteria.
Resource Groups enable you to automate management tasks, such as those in AWS
Systems Manager Automation documents, on tag-related resources in AWS Systems
Manager. Groups of tagged resources also let you quickly view a custom console
in AWS Systems Manager that shows AWS Config compliance and other monitoring
data about member resources.
To create a resource group, build a resource query, and specify tags that
identify the criteria that members of the group have in common. Tags are
key-value pairs.
For more information about Resource Groups, see the [AWS Resource Groups User Guide](https://docs.aws.amazon.com/ARG/latest/userguide/welcome.html).
AWS Resource Groups uses a REST-compliant API that you can use to perform the
following types of operations.
* Create, Read, Update, and Delete (CRUD) operations on resource
groups and resource query entities
* Applying, editing, and removing tags from resource groups
* Resolving resource group member ARNs so they can be returned as
search results
* Getting data about resources that are members of a group
* Searching AWS resources based on a resource query
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-11-27",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "resource-groups",
global?: false,
protocol: "rest-json",
service_id: "Resource Groups",
signature_version: "v4",
signing_name: "resource-groups",
target_prefix: nil
}
end
@doc """
Creates a resource group with the specified name and description.
You can optionally include a resource query, or a service configuration. For
more information about constructing a resource query, see [Create a tag-based group in Resource
Groups](https://docs.aws.amazon.com/ARG/latest/userguide/gettingstarted-query.html#gettingstarted-query-cli-tag).
For more information about service configurations, see [Service configurations for resource
groups](https://docs.aws.amazon.com/ARG/latest/APIReference/about-slg.html).
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:CreateGroup`
"""
def create_group(%Client{} = client, input, options \\ []) do
url_path = "/groups"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified resource group.
Deleting a resource group does not delete any resources that are members of the
group; it only deletes the group structure.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:DeleteGroup`
"""
def delete_group(%Client{} = client, input, options \\ []) do
url_path = "/delete-group"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns information about a specified resource group.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:GetGroup`
"""
def get_group(%Client{} = client, input, options \\ []) do
url_path = "/get-group"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the service configuration associated with the specified resource group.
For details about the service configuration syntax, see [Service configurations for resource
groups](https://docs.aws.amazon.com/ARG/latest/APIReference/about-slg.html).
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:GetGroupConfiguration`
"""
def get_group_configuration(%Client{} = client, input, options \\ []) do
url_path = "/get-group-configuration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the resource query associated with the specified resource group.
For more information about resource queries, see [Create a tag-based group in Resource
Groups](https://docs.aws.amazon.com/ARG/latest/userguide/gettingstarted-query.html#gettingstarted-query-cli-tag).
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:GetGroupQuery`
"""
def get_group_query(%Client{} = client, input, options \\ []) do
url_path = "/get-group-query"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a list of tags that are associated with a resource group, specified by
an ARN.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:GetTags`
"""
def get_tags(%Client{} = client, arn, options \\ []) do
url_path = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Adds the specified resources to the specified group.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:GroupResources`
"""
def group_resources(%Client{} = client, input, options \\ []) do
url_path = "/group-resources"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a list of ARNs of the resources that are members of a specified resource
group.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:ListGroupResources`
"""
def list_group_resources(%Client{} = client, input, options \\ []) do
url_path = "/list-group-resources"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a list of existing resource groups in your account.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:ListGroups`
"""
def list_groups(%Client{} = client, input, options \\ []) do
url_path = "/groups-list"
headers = []
{query_params, input} =
[
{"MaxResults", "maxResults"},
{"NextToken", "nextToken"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Attaches a service configuration to the specified group.
This occurs asynchronously, and can take time to complete. You can use
`GetGroupConfiguration` to check the status of the update.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:PutGroupConfiguration`
"""
def put_group_configuration(%Client{} = client, input, options \\ []) do
url_path = "/put-group-configuration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
202
)
end
@doc """
Returns a list of AWS resource identifiers that matches the specified query.
The query uses the same format as a resource query in a CreateGroup or
UpdateGroupQuery operation.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:SearchResources`
"""
def search_resources(%Client{} = client, input, options \\ []) do
url_path = "/resources/search"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds tags to a resource group with the specified ARN.
Existing tags on a resource group are not changed if they are not specified in
the request parameters.
Do not store personally identifiable information (PII) or other confidential or
sensitive information in tags. We use tags to provide you with billing and
administration services. Tags are not intended to be used for private or
sensitive data.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:Tag`
"""
def tag(%Client{} = client, arn, input, options \\ []) do
url_path = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified resources from the specified group.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:UngroupResources`
"""
def ungroup_resources(%Client{} = client, input, options \\ []) do
url_path = "/ungroup-resources"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes tags from a specified resource group.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:Untag`
"""
def untag(%Client{} = client, arn, input, options \\ []) do
url_path = "/resources/#{URI.encode(arn)}/tags"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the description for an existing group.
You cannot update the name of a resource group.
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:UpdateGroup`
"""
def update_group(%Client{} = client, input, options \\ []) do
url_path = "/update-group"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the resource query of a group.
For more information about resource queries, see [Create a tag-based group in Resource
Groups](https://docs.aws.amazon.com/ARG/latest/userguide/gettingstarted-query.html#gettingstarted-query-cli-tag).
## Minimum permissions
To run this command, you must have the following permissions:
* `resource-groups:UpdateGroupQuery`
"""
def update_group_query(%Client{} = client, input, options \\ []) do
url_path = "/update-group-query"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/resource_groups.ex
| 0.874573 | 0.473414 |
resource_groups.ex
|
starcoder
|
defmodule Goth.Backoff do
@moduledoc false
alias Goth.Backoff
@default_type :rand_exp
@min 1_000
@max 30_000
defstruct [:type, :min, :max, :state]
def new(opts) do
case Keyword.get(opts, :backoff_type, @default_type) do
:stop ->
nil
type ->
{min, max} = min_max(opts)
new(type, min, max)
end
end
def backoff(%Backoff{type: :rand, min: min, max: max} = s) do
{rand(min, max), s}
end
def backoff(%Backoff{type: :exp, min: min, state: nil} = s) do
{min, %Backoff{s | state: min}}
end
def backoff(%Backoff{type: :exp, max: max, state: prev} = s) do
require Bitwise
next = min(Bitwise.<<<(prev, 1), max)
{next, %Backoff{s | state: next}}
end
def backoff(%Backoff{type: :rand_exp, max: max, state: state} = s) do
{prev, lower} = state
next_min = min(prev, lower)
next_max = min(prev * 3, max)
next = rand(next_min, next_max)
{next, %Backoff{s | state: {next, lower}}}
end
def reset(%Backoff{type: :rand} = s), do: s
def reset(%Backoff{type: :exp} = s), do: %Backoff{s | state: nil}
def reset(%Backoff{type: :rand_exp, min: min, state: {_, lower}} = s) do
%Backoff{s | state: {min, lower}}
end
## Internal
defp min_max(opts) do
case {opts[:backoff_min], opts[:backoff_max]} do
{nil, nil} -> {@min, @max}
{nil, max} -> {min(@min, max), max}
{min, nil} -> {min, max(min, @max)}
{min, max} -> {min, max}
end
end
defp new(_, min, _) when not (is_integer(min) and min >= 0) do
raise ArgumentError, "minimum #{inspect(min)} not 0 or a positive integer"
end
defp new(_, _, max) when not (is_integer(max) and max >= 0) do
raise ArgumentError, "maximum #{inspect(max)} not 0 or a positive integer"
end
defp new(_, min, max) when min > max do
raise ArgumentError, "minimum #{min} is greater than maximum #{max}"
end
defp new(:rand, min, max) do
%Backoff{type: :rand, min: min, max: max, state: nil}
end
defp new(:exp, min, max) do
%Backoff{type: :exp, min: min, max: max, state: nil}
end
defp new(:rand_exp, min, max) do
lower = max(min, div(max, 3))
%Backoff{type: :rand_exp, min: min, max: max, state: {min, lower}}
end
defp new(type, _, _) do
raise ArgumentError, "unknown type #{inspect(type)}"
end
defp rand(min, max) do
:rand.uniform(max - min + 1) + min - 1
end
end
|
lib/goth/backoff.ex
| 0.738669 | 0.476214 |
backoff.ex
|
starcoder
|
defmodule Commanded.Registration do
@moduledoc """
Defines a behaviour for a process registry to be used by Commanded.
By default, Commanded will use a local process registry, defined in
`Commanded.Registration.LocalRegistry`, that uses Elixir's `Registry` module
for local process registration. This limits Commanded to only run on a single
node. However the `Commanded.Registration` behaviour can be implemented by a
library to provide distributed process registration to support running on a
cluster of nodes.
"""
@type start_child_arg :: {module(), keyword} | module()
@doc """
Return an optional supervisor spec for the registry
"""
@callback child_spec() :: [:supervisor.child_spec()]
@doc """
Use to start a supervisor.
"""
@callback supervisor_child_spec(module :: atom, arg :: any()) :: :supervisor.child_spec()
@doc """
Starts a uniquely named child process of a supervisor using the given module
and args.
Registers the pid with the given name.
"""
@callback start_child(name :: term(), supervisor :: module(), child_spec :: start_child_arg) ::
{:ok, pid} | {:error, term}
@doc """
Starts a uniquely named `GenServer` process for the given module and args.
Registers the pid with the given name.
"""
@callback start_link(name :: term(), module :: module(), args :: any()) ::
{:ok, pid} | {:error, term}
@doc """
Get the pid of a registered name.
Returns `:undefined` if the name is unregistered.
"""
@callback whereis_name(name :: term()) :: pid() | :undefined
@doc """
Return a `:via` tuple to route a message to a process by its registered name
"""
@callback via_tuple(name :: term()) :: {:via, module(), name :: term()}
@doc false
@spec child_spec() :: [:supervisor.child_spec()]
def child_spec, do: registry_provider().child_spec()
@doc false
@spec supervisor_child_spec(module :: atom, arg :: any()) :: :supervisor.child_spec()
def supervisor_child_spec(module, arg), do: registry_provider().supervisor_child_spec(module, arg)
@doc false
@spec start_child(name :: term(), supervisor :: module(), child_spec :: start_child_arg) ::
{:ok, pid()} | {:error, reason :: term()}
def start_child(name, supervisor, child_spec),
do: registry_provider().start_child(name, supervisor, child_spec)
@doc false
@spec start_link(name :: term(), module :: module(), args :: any()) ::
{:ok, pid()} | {:error, reason :: term()}
def start_link(name, module, args), do: registry_provider().start_link(name, module, args)
@doc false
@spec whereis_name(term()) :: pid() | :undefined
def whereis_name(name), do: registry_provider().whereis_name(name)
@doc false
@spec via_tuple(name :: term()) :: {:via, module(), name :: term()}
def via_tuple(name), do: registry_provider().via_tuple(name)
@doc """
Get the configured process registry.
Defaults to a local registry, restricted to running on a single node.
"""
@spec registry_provider() :: module()
def registry_provider do
case Application.get_env(:commanded, :registry, :local) do
:local -> Commanded.Registration.LocalRegistry
other -> other
end
end
@doc """
Use the `Commanded.Registration` module to import the registry provider and
via tuple functions.
"""
defmacro __using__(_opts) do
quote location: :keep do
@before_compile unquote(__MODULE__)
import unquote(__MODULE__), only: [registry_provider: 0, via_tuple: 1]
alias unquote(__MODULE__)
end
end
@doc """
Allow a registry provider to handle the standard `GenServer` callback
functions
"""
defmacro __before_compile__(_env) do
quote generated: true, location: :keep do
@doc false
def handle_call(request, from, state),
do: registry_provider().handle_call(request, from, state)
@doc false
def handle_cast(request, state),
do: registry_provider().handle_cast(request, state)
@doc false
def handle_info(msg, state),
do: registry_provider().handle_info(msg, state)
end
end
end
|
lib/commanded/registration/registration.ex
| 0.868367 | 0.454291 |
registration.ex
|
starcoder
|
defmodule Advent.Y2021.D20 do
@moduledoc """
https://adventofcode.com/2021/day/20
"""
use Bitwise
@typep point :: {integer(), integer()}
@typep pixel :: 0 | 1
@typep image :: %{point() => pixel()}
@typep lookup :: 0..511
@typep algorithm_int :: non_neg_integer()
@typep algorithm_map :: %{lookup() => pixel()}
@typep algorithm :: algorithm_int() | algorithm_map()
@doc """
"""
@spec part_one(Enumerable.t()) :: non_neg_integer()
def part_one(input) do
{image, alg} = parse_input(input)
solve(image, alg, 2)
end
@doc """
"""
@spec part_two(Enumerable.t()) :: non_neg_integer()
def part_two(input) do
{image, alg} = parse_input(input)
solve(image, alg, 50)
end
defp solve(image, alg, count) do
Stream.iterate({image, 0}, &enhance(&1, alg))
|> Enum.at(count)
|> elem(0)
|> Map.values()
|> Enum.sum()
end
@spec parse_input(Enumerable.t()) :: {image(), algorithm()}
defp parse_input(input) do
{alg, image, _rows} =
Enum.reduce(input, {[], nil, 0}, fn
"", {alg, nil, 0} ->
# int-based alg
alg =
alg
|> Enum.reverse()
|> Integer.undigits(2)
# map-based alg
# alg =
# alg
# |> Enum.reverse()
# |> Enum.with_index()
# |> Map.new(fn {bit, idx} -> {idx, bit} end)
{alg, Map.new(), 0}
line, {alg, nil, 0} ->
alg =
line
|> String.graphemes()
|> Enum.reduce(alg, fn
".", alg -> [0 | alg]
"#", alg -> [1 | alg]
end)
{alg, nil, 0}
line, {alg, image, row} ->
image =
line
|> String.graphemes()
|> Enum.with_index()
|> Enum.reduce(image, fn
{"#", col}, image -> Map.put(image, {col, row}, 1)
{".", col}, image -> Map.put(image, {col, row}, 0)
end)
{alg, image, row + 1}
end)
{image, alg}
end
@spec neighbors(point()) :: [point()]
defp neighbors({x, y}) do
for y_inc <- -1..1,
x_inc <- -1..1,
do: {x + x_inc, y + y_inc}
end
@spec points_to_num([point()], image(), pixel()) :: integer()
defp points_to_num(points, image, default) do
points
|> Enum.map(&Map.get(image, &1, default))
|> Integer.undigits(2)
end
@spec lookup(lookup(), algorithm_int()) :: pixel()
defp lookup(index, alg) when is_integer(alg) do
alg >>> (511 - index) &&& 1
end
@spec lookup(lookup(), algorithm_map()) :: pixel()
defp lookup(index, alg) when is_map(alg) do
Map.fetch!(alg, index)
end
defp pixel(point, image, alg, default) do
point
|> neighbors()
|> points_to_num(image, default)
|> lookup(alg)
end
@spec enhance({image(), pixel()}, algorithm()) :: {image(), pixel()}
defp enhance({image, default}, alg) do
image =
image
|> Map.keys()
|> Stream.flat_map(&neighbors/1)
|> Stream.uniq()
|> Map.new(fn point ->
{point, pixel(point, image, alg, default)}
end)
default = lookup(default, alg)
{image, default}
end
# defp print_image({image, _default}) do
# {{x_min, _}, {x_max, _}} = Enum.min_max_by(image, &elem(&1, 0))
# {{_, y_min}, {_, y_max}} = Enum.min_max_by(image, &elem(&1, 1))
# IO.puts("")
# for y <- y_min..y_max do
# for x <- x_min..x_max, into: "" do
# if MapSet.member?(image, {x, y}), do: "#", else: "."
# end
# end
# |> Enum.join("\n")
# |> IO.puts()
# end
end
|
lib/advent/y2021/d20.ex
| 0.708918 | 0.583352 |
d20.ex
|
starcoder
|
defmodule Conrex.CONREC do
@moduledoc false
# values is a 2d array of "heights"; contour_levels a list of height values to contour at
def conrec(values, x_coords, y_coords, contour_levels) do
Enum.reduce(contour_levels, %{}, fn contour_level, contours ->
# iterate over each cell of 2x2 coordinates
segments = Enum.reduce(0..(length(x_coords) - 2), [], fn i, segments ->
row_segments = Enum.reduce(0..(length(y_coords) - 2), [], fn j, row_segments ->
cell = cell_at(values, x_coords, y_coords, i, j)
if cell_has_segments?(cell, contour_level) do
segments = cell_segments(cell, contour_level)
List.flatten([segments | row_segments])
else
row_segments
end
end)
List.flatten([row_segments | segments])
end)
Map.put(contours, contour_level, segments)
end)
end
# gets a cell at x, y in a grid
defp cell_at(values, x_coords, y_coords, x, y) do
v1 = vertex_at(values, x_coords, y_coords, x, y)
v2 = vertex_at(values, x_coords, y_coords, x+1, y)
v3 = vertex_at(values, x_coords, y_coords, x+1, y+1)
v4 = vertex_at(values, x_coords, y_coords, x, y+1)
[ v1, v2, v3, v4 ] # arranged clockwise
end
# finds a vertex in a grid
defp vertex_at(values, x_coords, y_coords, x, y) do
x_coord = Enum.at(x_coords, x)
y_coord = Enum.at(y_coords, y)
value = Enum.at(Enum.at(values, x), y)
{x_coord, y_coord, value}
end
# gets all segments for a cell
defp cell_segments(cell, level) do
tris = cell_to_tris(cell)
Enum.map(tris, fn triangle -> get_segment(triangle, level) end)
|> Enum.filter(fn triangle -> triangle != :nil end)
end
defp cell_has_segments?(cell, level), do: level > cell_min(cell) and level < cell_max(cell)
defp cell_max(cell) do
{ _x, _y, h } = Enum.max_by(cell, fn { _x, _y, h } -> h end)
h
end
defp cell_min(cell) do
{ _x, _y, h } = Enum.min_by(cell, fn { _x, _y, h } -> h end)
h
end
defp cell_to_tris([ v1, v2, v3, v4 ] = cell) do
# center vertex is average of corners
center = cell_center(cell)
[
{ v1, v2, center },
{ v2, v3, center },
{ v3, v4, center },
{ v1, v4, center }
]
end
defp cell_center(cell) do
num_verts = length(cell)
cell
|> Enum.reduce({ 0, 0, 0 }, fn { x, y, h }, { cx, cy, ch } -> { cx + x, cy + y, ch + h } end)
|> (fn { x, y, h } -> { x / num_verts, y / num_verts, h / num_verts } end).()
end
def get_segment({ v1, v2, v3 } = triangle, level) do
case segment_position(triangle, level) do
# pathological case
{ :on, :on, :on } -> :nil
# segment between two vertices
{ :on, :on, _ } -> { point(v1), point(v2) }
{ :on, _, :on } -> { point(v1), point(v3) }
{ _, :on, :on } -> { point(v2), point(v3) }
# segment from one vertex to opposite side
{ :on, :above, :below } -> { point(v1), intersect(v2, v3, level) }
{ :on, :below, :above } -> { point(v1), intersect(v2, v3, level) }
{ :above, :on, :below } -> { point(v2), intersect(v1, v3, level) }
{ :below, :on, :above } -> { point(v2), intersect(v1, v3, level) }
{ :above, :below, :on } -> { point(v3), intersect(v1, v2, level) }
{ :below, :above, :on } -> { point(v3), intersect(v1, v2, level) }
# segment from one side to another side
{ :below, :above, :above } -> { intersect(v1, v2, level), intersect(v1, v3, level) }
{ :above, :below, :below } -> { intersect(v1, v2, level), intersect(v1, v3, level) }
{ :above, :below, :above } -> { intersect(v1, v2, level), intersect(v2, v3, level) }
{ :below, :above, :below } -> { intersect(v1, v2, level), intersect(v2, v3, level) }
{ :above, :above, :below } -> { intersect(v1, v3, level), intersect(v2, v3, level) }
{ :below, :below, :above } -> { intersect(v1, v3, level), intersect(v2, v3, level) }
# no segment
_ -> :nil
end
end
defp segment_position({ v1, v2, v3 }, level) do
{ vertex_position(v1, level), vertex_position(v2, level), vertex_position(v3, level) }
end
defp vertex_position({ _x, _y, h }, level) when h < level, do: :below
defp vertex_position({ _x, _y, h }, level) when h == level, do: :on
defp vertex_position({ _x, _y, h }, level) when h > level, do: :above
defp intersect({ x1, y1, h1 }, { x2, y2, h2 }, level) do
d1 = h1 - level
d2 = h2 - level
x = (d2*x1 - d1*x2) / (d2 - d1)
y = (d2*y1 - d1*y2) / (d2 - d1)
{ x, y }
end
defp point({ x, y, _h }), do: { x, y }
end
|
lib/conrex/conrec.ex
| 0.757166 | 0.760028 |
conrec.ex
|
starcoder
|
defmodule StripJs do
@moduledoc ~S"""
StripJs is an Elixir module for stripping executable JavaScript from
blocks of HTML and CSS, based on the <a target=_blank
href="https://github.com/philss/floki">Floki</a> parsing library.
It handles:
* `<script>...</script>` and `<script src="..."></script>` tags
* Event handler attributes such as `onclick="..."`
* `javascript:...` URLs in HTML and CSS
* CSS `expression(...)` directives
* HTML entity attacks (like `<script>`)
StripJs is production ready, and has sanitized over 1.5 billion payloads
at Appcues.
## Installation
Add `strip_js` to your application's `mix.exs`:
def application do
[applications: [:strip_js]]
end
def deps do
[{:strip_js, "~> #{StripJs.Mixfile.project()[:version]}"}]
end
## Usage
`clean_html/2` removes all JS vectors from an HTML string:
iex> html = "<button onclick=\"alert('pwnt')\">Hi!</button>"
iex> StripJs.clean_html(html)
"<button>Hi!</button>"
`clean_css/2` removes all JS vectors from a CSS string:
iex> css = "body { background-image: url('javascript:alert()'); }"
iex> StripJs.clean_css(css)
"body { background-image: url('removed_by_strip_js:alert()'); }"
StripJs relies on the [Floki](https://github.com/philss/floki)
HTML parser library, which is built using
[Mochiweb](https://github.com/mochi/mochiweb) by default.
StripJs provides a `clean_html_tree/1` function to strip JS from
`Floki.parse_fragment/1`- and `:mochiweb_html.parse/1`- style HTML parse trees.
## Security
StripJs blocks every JS injection vector known to the authors. It has
survived four years in production, multiple professional penetration
tests, and over a billion invocations with no known security issues.
If you believe there are JS injection methods not covered by this library,
please submit an issue with a test case!
## Bugs and Limitations
The brokenness of invalid HTML may be amplified by `clean_html/2`.
In uncommon cases, innocent CSS which very closely resembles
JS-injection techniques may be mangled by `clean_css/2`.
## Authorship and License
Copyright 2017-2021, Appcues, Inc.
Project homepage:
[StripJs](https://github.com/appcues/strip_js)
StripJs is released under the
[MIT License](https://opensource.org/licenses/MIT).
"""
require Logger
# reserved for future use
@type opts :: Keyword.t()
@type html_tag :: String.t()
@type html_attr :: {String.t(), String.t()}
@type html_node :: String.t() | {html_tag, [html_attr], [html_node]}
@type html_tree :: html_node | [html_node]
@doc ~S"""
Removes JS vectors from the given HTML string.
All non-tag text and tag attribute values will be HTML-escaped, except
for the contents of `<style>` tags, which are passed through `clean_css/2`.
Even if the input HTML contained no JS, the output of `clean_html/2`
is not guaranteed to match its input byte-for-byte.
Examples:
iex> StripJs.clean_html("<button onclick=\"alert('phear');\">Click here</button>")
"<button>Click here</button>"
iex> StripJs.clean_html("<script> console.log('oh heck'); </script>")
""
iex> StripJs.clean_html("<script> console.log('oh heck'); </script>")
"<script> console.log('oh heck'); </script>" ## HTML entity attack didn't work
"""
@spec clean_html(String.t(), opts) :: String.t()
def clean_html(html, opts \\ []) when is_binary(html) do
html
|> parse_html(opts)
|> clean_html_tree(opts)
|> to_html
end
@doc false
def strip_js(html, opts \\ []) do
IO.warn("StripJs.strip_js is deprecated; use StripJs.clean_html instead")
clean_html(html, opts)
end
@doc ~S"""
Removes JS vectors from the given
[Floki](https://github.com/philss/floki)/
[Mochiweb](https://github.com/mochi/mochiweb)-style HTML tree
(`t:html_tree/0`).
All attribute values and tag bodies except embedded stylesheets
will be HTML-escaped.
"""
@spec clean_html_tree(html_tree, opts) :: html_tree
def clean_html_tree(trees, opts \\ [])
def clean_html_tree(trees, opts) when is_list(trees) do
Enum.map(trees, &clean_html_tree(&1, opts))
end
def clean_html_tree({:comment, comment}, _opts) do
{:comment, comment}
end
def clean_html_tree({tag, attrs, children}, _opts) do
case String.downcase(tag) do
"script" ->
# remove scripts entirely
""
"style" ->
# don't HTML-escape!
cleaned_css = children |> to_html |> clean_css
{tag, clean_attrs(attrs), [cleaned_css]}
_ ->
cleaned_children = Enum.map(children, &clean_html_tree(&1))
{tag, clean_attrs(attrs), cleaned_children}
end
end
def clean_html_tree(string, _opts) when is_binary(string) do
string |> html_escape
end
@doc false
@spec strip_js_from_tree(html_tree, opts) :: html_tree
def strip_js_from_tree(tree, opts \\ []) do
IO.warn(
"StripJs.strip_js_from_tree is deprecated; use StripJs.clean_html_tree instead"
)
clean_html_tree(tree, opts)
end
@doc ~S"""
Removes JS vectors from the given CSS string; i.e., the contents of a
stylesheet or `<style>` tag.
Does not HTML-escape its output. Care is taken to maintain valid CSS
syntax.
Example:
iex> css = "tt { background-color: expression('alert()'); }"
iex> StripJs.clean_css(css)
"tt { background-color: removed_by_strip_js('alert()'); }"
Warning: this step is performed using regexes, not a parser, so it is
possible for innocent CSS containing either of the strings `javascript:`
or `expression(` to be mangled.
"""
@spec clean_css(String.t(), opts) :: String.t()
def clean_css(css, _opts \\ []) when is_binary(css) do
css
|> String.replace(~r/javascript \s* :/xi, "removed_by_strip_js:")
|> String.replace(~r/expression \s* \(/xi, "removed_by_strip_js(")
end
## Removes JS vectors from the given HTML attributes.
@spec clean_attrs([{String.t(), String.t()}]) :: [{String.t(), String.t()}]
defp clean_attrs(attrs) do
attrs
|> Enum.reduce([], &clean_attr/2)
|> Enum.reverse()
end
@attrs_with_urls ["href", "src", "background", "dynsrc", "lowsrc"]
@spec clean_attr({String.t(), String.t()}, [{String.t(), String.t()}]) :: [
{String.t(), String.t()}
]
defp clean_attr({attr, value}, acc) do
attr = String.downcase(attr)
cond do
attr in @attrs_with_urls &&
String.match?(value, ~r/^ \s* javascript \s* :/xi) ->
# retain the attribute so we emit valid HTML
[{attr, "#"} | acc]
String.starts_with?(attr, "on") ->
# remove on* handlers entirely
acc
:else ->
[{attr, value} | acc]
end
end
## Performs good-enough HTML escaping to prevent HTML entity attacks.
@spec html_escape(String.t()) :: String.t()
defp html_escape(html) do
html
|> String.replace("&", "&")
|> String.replace("<", "<")
|> String.replace(">", ">")
end
## Parses the given HTML into an `t:html_tree/0` structure.
@spec parse_html(String.t(), opts) :: html_tree
defp parse_html(html, _opts), do: Floki.parse_fragment!(html)
## Converts HTML tree to string.
@spec to_html(html_tree) :: String.t()
defp to_html(tree) when is_binary(tree), do: tree
defp to_html(tree), do: tree |> Floki.raw_html(encode: false)
end
|
lib/strip_js.ex
| 0.802981 | 0.423518 |
strip_js.ex
|
starcoder
|
defmodule Akd do
@moduledoc """
A framework that makes elixir deployments a breeze. It's highly configurable,
yet easy to set up.
Although Akd is mainly written for deploying elixir apps, it can be used
for any server automation process or deploying non-elixir apps.
Akd comes with DSL which make writing automated deployments much simpler, and
mix tasks with generators which allow the use of that DSL easier.
Akd, by default, has multiple phases for deploying an application:
- `fetch`: This is where `akd` attempts to fetch the source-code which
corresponds to a release (deployed app). This can be done by using `git`,
`svn` or just `scp`.
- `init`: In this phase `akd` initializes and configures the libraries
required for the rest of the deployment process. For an elixir app, it can
be configuring `distillery` or `docker`.
- `build`: In this phase `akd` produces a deployable entity. It can be a
binary produced by distillery or source code itself or even a docker image.
- `publish`: In this phase `akd` publishes/deploys the app to the desired
destination. This can be done by `scp`, `cp` etc.
- `stop`: In this phase `akd` stops a previously running instance of the
app. (This is not required for zero downtime apps)
- `start`: In this phase `akd` starts a newly deployed instance of the app.
Each of these phases accomplish what they do through `Akd.Hook` and
`Akd.Dsl.FormHook`.
"""
@doc """
`:fetch` can be set as a runtime config
in the `config.exs` file
## Examples
when no `fetch` config is set, it returns `Akd.Fetch.Git`
iex> Akd.fetch
Akd.Fetch.Git
"""
def fetch do
config(:fetch, Akd.Fetch.Git)
end
@doc """
`:init` can be set as a runtime config
in the `config.exs` file
## Examples
when no `init` config is set, it returns `Akd.Init.Distillery`
iex> Akd.init
Akd.Init.Distillery
"""
def init do
config(:init, Akd.Init.Distillery)
end
@doc """
`:build` can be set as a runtime config
in the `config.exs` file
## Examples
when no `build` config is set, it returns `Akd.Build.Distillery`
iex> Akd.build
Akd.Build.Distillery
"""
def build do
config(:build, Akd.Build.Distillery)
end
@doc """
`:publish` can be set as a runtime config
in the `config.exs` file
## Examples
when no `publish` config is set, it returns `Akd.Publish.Distillery`
iex> Akd.publish
Akd.Publish.Distillery
"""
def publish do
config(:publish, Akd.Publish.Distillery)
end
@doc """
`:start` can be set as a runtime config
in the `config.exs` file
## Examples
when no `start` config is set, it returns `Akd.Start.Distillery`
iex> Akd.start
Akd.Start.Distillery
"""
def start do
config(:start, Akd.Start.Distillery)
end
@doc """
`:stop` can be set as a runtime config
in the `config.exs` file
## Examples
when no `stop` config is set, it returns `Akd.Stop.Distillery`
iex> Akd.stop
Akd.Stop.Distillery
"""
def stop do
config(:stop, Akd.Stop.Distillery)
end
@doc """
Gets configuration assocaited with the `akd` app.
## Examples
when no config is set, it returns []
iex> Akd.config
[]
"""
@spec config() :: Keyword.t()
defp config() do
Application.get_env(:akd, Akd, [])
end
@doc """
Gets configuration set for a `key`, assocaited with the `akd` app.
## Examples
when no config is set for `key`, it returns `default`
iex> Akd.config(:random, "default")
"default"
"""
@spec config(Atom.t(), term) :: term
defp config(key, default \\ nil) do
config()
|> Keyword.get(key, default)
|> resolve_config(default)
end
@doc """
`resolve_config` returns a `system` variable set up with `var_name` key
or returns the specified `default` value. Takes in `arg` whose first element is
an atom `:system`.
## Examples
Returns value corresponding to a system variable config or returns the `default` value:
iex> Akd.resolve_config({:system, "SOME_RANDOM_CONFIG"}, "default")
"default"
iex> Akd.resolve_config("value", "default")
"value"
"""
@deprecated """
`{:system, var_name}` is deprecated. If you need to use a System variable in
the run-time, I would be explicit about what Hooks to use in the main call
instead of configuring it.
Read this article for more details: http://michal.muskala.eu/2017/07/30/configuring-elixir-libraries.html
"""
@spec resolve_config(Tuple.t(), term) :: term
defp resolve_config({:system, var_name}, default) do
System.get_env(var_name) || default
end
defp resolve_config(value, _default), do: value
end
|
lib/akd.ex
| 0.88639 | 0.579728 |
akd.ex
|
starcoder
|
defmodule HPAX.Table do
@moduledoc false
defstruct [
:max_table_size,
entries: [],
size: 0,
length: 0
]
@type t() :: %__MODULE__{
max_table_size: non_neg_integer(),
entries: [{binary(), binary()}],
size: non_neg_integer(),
length: non_neg_integer()
}
@static_table [
{":authority", nil},
{":method", "GET"},
{":method", "POST"},
{":path", "/"},
{":path", "/index.html"},
{":scheme", "http"},
{":scheme", "https"},
{":status", "200"},
{":status", "204"},
{":status", "206"},
{":status", "304"},
{":status", "400"},
{":status", "404"},
{":status", "500"},
{"accept-charset", nil},
{"accept-encoding", "gzip, deflate"},
{"accept-language", nil},
{"accept-ranges", nil},
{"accept", nil},
{"access-control-allow-origin", nil},
{"age", nil},
{"allow", nil},
{"authorization", nil},
{"cache-control", nil},
{"content-disposition", nil},
{"content-encoding", nil},
{"content-language", nil},
{"content-length", nil},
{"content-location", nil},
{"content-range", nil},
{"content-type", nil},
{"cookie", nil},
{"date", nil},
{"etag", nil},
{"expect", nil},
{"expires", nil},
{"from", nil},
{"host", nil},
{"if-match", nil},
{"if-modified-since", nil},
{"if-none-match", nil},
{"if-range", nil},
{"if-unmodified-since", nil},
{"last-modified", nil},
{"link", nil},
{"location", nil},
{"max-forwards", nil},
{"proxy-authenticate", nil},
{"proxy-authorization", nil},
{"range", nil},
{"referer", nil},
{"refresh", nil},
{"retry-after", nil},
{"server", nil},
{"set-cookie", nil},
{"strict-transport-security", nil},
{"transfer-encoding", nil},
{"user-agent", nil},
{"vary", nil},
{"via", nil},
{"www-authenticate", nil}
]
@static_table_size length(@static_table)
@dynamic_table_start @static_table_size + 1
@doc """
Creates a new HPACK table with the given maximum size.
The maximum size is not the maximum number of entries but rather the maximum size as defined in
http://httpwg.org/specs/rfc7541.html#maximum.table.size.
"""
@spec new(non_neg_integer()) :: t()
def new(max_table_size) do
%__MODULE__{max_table_size: max_table_size}
end
@doc """
Adds the given header to the given table.
If the new entry does not fit within the max table size then the oldest entries will be evicted.
Header names should be lowercase when added to the HPACK table
as per the [HTTP/2 spec](https://http2.github.io/http2-spec/#rfc.section.8.1.2):
> header field names MUST be converted to lowercase prior to their encoding in HTTP/2
"""
@spec add(t(), binary(), binary()) :: t()
def add(%__MODULE__{} = table, name, value) do
%{max_table_size: max_table_size, size: size} = table
entry_size = entry_size(name, value)
cond do
# An attempt to add an entry larger than the maximum size causes the table to be emptied of
# all existing entries and results in an empty table.
entry_size > max_table_size ->
%{table | entries: [], size: 0, length: 0}
size + entry_size > max_table_size ->
table
|> resize(max_table_size - entry_size)
|> add_header(name, value, entry_size)
true ->
add_header(table, name, value, entry_size)
end
end
defp add_header(%__MODULE__{} = table, name, value, entry_size) do
%{entries: entries, size: size, length: length} = table
%{table | entries: [{name, value} | entries], size: size + entry_size, length: length + 1}
end
@doc """
Looks up a header by index `index` in the given `table`.
Returns `{:ok, {name, value}}` if a header is found at the given `index`, otherwise returns
`:error`. `value` can be a binary in case both the header name and value are present in the
table, or `nil` if only the name is present (this can only happen in the static table).
"""
@spec lookup_by_index(t(), pos_integer()) :: {:ok, {binary(), binary() | nil}} | :error
def lookup_by_index(table, index)
# Static table
for {header, index} <- Enum.with_index(@static_table, 1) do
def lookup_by_index(%__MODULE__{}, unquote(index)), do: {:ok, unquote(header)}
end
def lookup_by_index(%__MODULE__{length: 0}, _index) do
:error
end
def lookup_by_index(%__MODULE__{entries: entries, length: length}, index)
when index in @dynamic_table_start..(@dynamic_table_start + length - 1) do
{:ok, Enum.at(entries, index - @dynamic_table_start)}
end
def lookup_by_index(%__MODULE__{}, _index) do
:error
end
@doc """
Looks up the index of a header by its name and value.
It returns:
* `{:full, index}` if the full header (name and value) are present in the table at `index`
* `{:name, index}` if `name` is present in the table but with a different value than `value`
* `:not_found` if the header name is not in the table at all
Header names should be lowercase when looked up in the HPACK table
as per the [HTTP/2 spec](https://http2.github.io/http2-spec/#rfc.section.8.1.2):
> header field names MUST be converted to lowercase prior to their encoding in HTTP/2
"""
@spec lookup_by_header(t(), binary(), binary() | nil) ::
{:full, pos_integer()} | {:name, pos_integer()} | :not_found
def lookup_by_header(table, name, value)
def lookup_by_header(%__MODULE__{entries: entries}, name, value) do
case static_lookup_by_header(name, value) do
{:full, _index} = result ->
result
{:name, index} ->
# Check if we get full match in the dynamic tabble
case dynamic_lookup_by_header(entries, name, value, @dynamic_table_start, nil) do
{:full, _index} = result -> result
_other -> {:name, index}
end
:not_found ->
dynamic_lookup_by_header(entries, name, value, @dynamic_table_start, nil)
end
end
for {{name, value}, index} when is_binary(value) <- Enum.with_index(@static_table, 1) do
defp static_lookup_by_header(unquote(name), unquote(value)) do
{:full, unquote(index)}
end
end
static_table_names =
@static_table
|> Enum.map(&elem(&1, 0))
|> Enum.with_index(1)
|> Enum.uniq_by(&elem(&1, 0))
for {name, index} <- static_table_names do
defp static_lookup_by_header(unquote(name), _value) do
{:name, unquote(index)}
end
end
defp static_lookup_by_header(_name, _value) do
:not_found
end
defp dynamic_lookup_by_header([{name, value} | _rest], name, value, index, _name_index) do
{:full, index}
end
defp dynamic_lookup_by_header([{name, _} | rest], name, value, index, _name_index) do
dynamic_lookup_by_header(rest, name, value, index + 1, index)
end
defp dynamic_lookup_by_header([_other | rest], name, value, index, name_index) do
dynamic_lookup_by_header(rest, name, value, index + 1, name_index)
end
defp dynamic_lookup_by_header([], _name, _value, _index, name_index) do
if name_index, do: {:name, name_index}, else: :not_found
end
@doc """
Resizes the table.
If the existing entries do not fit in the new table size the oldest entries are evicted.
"""
@spec resize(t(), non_neg_integer()) :: t()
def resize(%__MODULE__{entries: entries, size: size} = table, new_size) do
{new_entries_reversed, new_size} = evict_towards_size(Enum.reverse(entries), size, new_size)
%{
table
| entries: Enum.reverse(new_entries_reversed),
size: new_size,
length: length(new_entries_reversed)
}
end
defp evict_towards_size([{name, value} | rest], size, max_target_size) do
new_size = size - entry_size(name, value)
if new_size <= max_target_size do
{rest, new_size}
else
evict_towards_size(rest, new_size, max_target_size)
end
end
defp evict_towards_size([], 0, _max_target_size) do
{[], 0}
end
defp entry_size(name, value) do
byte_size(name) + byte_size(value) + 32
end
# Made public to be used in tests.
@doc false
def __static_table__() do
@static_table
end
end
|
lib/hpax/table.ex
| 0.880912 | 0.622172 |
table.ex
|
starcoder
|
defmodule SSHKit do
@moduledoc """
A toolkit for performing tasks on one or more servers.
```
hosts = ["1.eg.io", {"2.eg.io", port: 2222}]
hosts = [%SSHKit.Host{name: "3.eg.io", options: [port: 2223]} | hosts]
context =
SSHKit.context(hosts)
|> SSHKit.path("/var/www/phx")
|> SSHKit.user("deploy")
|> SSHKit.group("deploy")
|> SSHKit.umask("022")
|> SSHKit.env(%{"NODE_ENV" => "production"})
:ok = SSHKit.upload(context, ".", recursive: true)
:ok = SSHKit.run(context, "yarn install", mode: :parallel)
```
"""
alias SSHKit.SCP
alias SSHKit.SSH
alias SSHKit.Context
alias SSHKit.Host
@doc """
Produces an `SSHKit.Host` struct holding the information
needed to connect to a (remote) host.
## Examples
You can pass a map with hostname and options:
```
host = SSHKit.host(%{name: "name.io", options: [port: 2222]})
# This means, that if you pass in a host struct,
# you'll get the same result. In particular:
host == SSHKit.host(host)
```
…or, alternatively, a tuple with hostname and options:
```
host = SSHKit.host({"name.io", port: 2222})
```
See `host/2` for additional details and examples.
"""
def host(%{name: name, options: options}) do
%Host{name: name, options: options}
end
def host({name, options}) do
%Host{name: name, options: options}
end
@doc """
Produces an `SSHKit.Host` struct holding the information
needed to connect to a (remote) host.
## Examples
In its most basic version, you just pass a hostname and all other options
will use the defaults:
```
host = SSHKit.host("name.io")
```
If you wish to provide additional host options, e.g. a non-standard port,
you can pass a keyword list as the second argument:
```
host = SSHKit.host("name.io", port: 2222)
```
One or many of these hosts can then be used to create an execution context
in which commands can be executed:
```
host
|> SSHKit.context()
|> SSHKit.run("echo \"That was fun\"")
```
See `host/1` for additional ways of specifying host details.
"""
def host(host, options \\ [])
def host(name, options) when is_binary(name) do
%Host{name: name, options: options}
end
def host(%{name: name, options: options}, defaults) do
%Host{name: name, options: Keyword.merge(defaults, options)}
end
def host({name, options}, defaults) do
%Host{name: name, options: Keyword.merge(defaults, options)}
end
@doc """
Takes one or more (remote) hosts and creates an execution context in which
remote commands can be run. Accepts any form of host specification also
accepted by `host/1` and `host/2`, i.e. binaries, maps and 2-tuples.
See `path/2`, `user/2`, `group/2`, `umask/2`, and `env/2`
for details on how to derive variations of a context.
## Example
Create an execution context for two hosts. Commands issued in this context
will be executed on both hosts.
```
hosts = ["10.0.0.1", "10.0.0.2"]
context = SSHKit.context(hosts)
```
Create a context for hosts with different connection options:
```
hosts = [{"10.0.0.3", port: 2223}, %{name: "10.0.0.4", options: [port: 2224]}]
context = SSHKit.context(hosts)
```
Any shared options can be specified in the second argument.
Here we add a user and port for all hosts.
```
hosts = ["10.0.0.1", "10.0.0.2"]
options = [user: "admin", port: 2222]
context = SSHKit.context(hosts, options)
```
"""
def context(hosts, defaults \\ []) do
hosts =
hosts
|> List.wrap()
|> Enum.map(&host(&1, defaults))
%Context{hosts: hosts}
end
@doc """
Changes the working directory commands are executed in for the given context.
Returns a new, derived context for easy chaining.
## Example
Create `/var/www/app/config.json`:
```
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.path("/var/www/app")
|> SSHKit.run("touch config.json")
```
"""
def path(context, path) do
%Context{context | path: path}
end
@doc """
Changes the file creation mode mask affecting default file and directory
permissions.
Returns a new, derived context for easy chaining.
## Example
Create `precious.txt`, readable and writable only for the logged-in user:
```
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.umask("077")
|> SSHKit.run("touch precious.txt")
```
"""
def umask(context, mask) do
%Context{context | umask: mask}
end
@doc """
Specifies the user under whose name commands are executed.
That user might be different than the user with which
ssh connects to the remote host.
Returns a new, derived context for easy chaining.
## Example
All commands executed in the created `context` will run as `deploy_user`,
although we use the `login_user` to log in to the remote host:
```
context =
{"10.0.0.1", port: 3000, user: "login_user", password: "<PASSWORD>"}
|> SSHKit.context()
|> SSHKit.user("deploy_user")
```
"""
def user(context, name) do
%Context{context | user: name}
end
@doc """
Specifies the group commands are executed with.
Returns a new, derived context for easy chaining.
## Example
All commands executed in the created `context` will run in group `www`:
```
context =
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.group("www")
```
"""
def group(context, name) do
%Context{context | group: name}
end
@doc """
Defines new environment variables or overrides existing ones
for a given context.
Returns a new, derived context for easy chaining.
## Examples
Setting `NODE_ENV=production`:
```
context =
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.env(%{"NODE_ENV" => "production"})
# Run the npm start script with NODE_ENV=production
SSHKit.run(context, "npm start")
```
Modifying the `PATH`:
```
context =
"10.0.0.1"
|> SSHKit.context()
|> SSHKit.env(%{"PATH" => "$HOME/.rbenv/shims:$PATH"})
# Execute the rbenv-installed ruby to print its version
SSHKit.run(context, "ruby --version")
```
"""
def env(context, map) do
%Context{context | env: map}
end
@doc ~S"""
Executes a command in the given context.
Returns a list of tuples, one fore each host in the context.
The resulting tuples have the form `{:ok, output, exit_code}` –
as returned by `SSHKit.SSH.run/3`:
* `exit_code` is the number with which the executed command returned.
If everything went well, that usually is `0`.
* `output` is a keyword list of the output collected from the command.
It has the form:
```
[
stdout: "output on standard out",
stderr: "output on standard error",
stdout: "some more normal output",
…
]
```
## Example
Run a command and verify its output:
```
[{:ok, output, 0}] =
"example.io"
|> SSHKit.context()
|> SSHKit.run("echo \"Hello World!\"")
stdout =
output
|> Keyword.get_values(:stdout)
|> Enum.join()
assert "Hello World!\n" == stdout
```
"""
def run(context, command) do
cmd = Context.build(context, command)
run = fn host ->
{:ok, conn} = SSH.connect(host.name, host.options)
res = SSH.run(conn, cmd)
:ok = SSH.close(conn)
res
end
Enum.map(context.hosts, run)
end
@doc ~S"""
Upload a file or files to the given context.
Returns a list of `:ok` or `{:error, reason}` - one for each host.
Possible options are:
* `as: "remote.txt"` - specify the name of the uploaded file/directory
* all options accepted by `SSHKit.SCP.Upload.transfer/4`
## Examples
Upload all files and folders in current directory to "/workspace":
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.path("/workspace")
|> SSHKit.upload(".", recursive: true)
```
Upload file to different name on host:
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.upload("local.txt", as: "remote.txt")
```
"""
def upload(context, path, options \\ []) do
as_path = Keyword.get(options, :as, Path.basename(path))
remote_path = build_remote_path(context, as_path)
run = fn host ->
{:ok, res} = SSH.connect host.name, host.options, fn conn ->
SCP.upload(conn, path, remote_path, options)
end
res
end
Enum.map(context.hosts, run)
end
@doc ~S"""
Download a file or files from the given context.
Returns a list of `:ok` or `{:error, reason}` - one for each host.
Possible options are:
* `as: "local.txt"` - specify the name of the downloaded file/directory
* all options accepted by `SSHKit.SCP.Upload.transfer/4`
## Examples
Download all files and folders in context directory to current working directory:
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.path("/workspace")
|> SSHKit.upload(".", recursive: true)
```
Download file to different local name:
```
[:ok] =
"example.io"
|> SSHKit.context()
|> SSHKit.download("remote.txt", as: "local.txt")
```
"""
def download(context, path, options \\ []) do
remote = build_remote_path(context, path)
local = Keyword.get(options, :as, Path.basename(path))
run = fn host ->
{:ok, res} = SSH.connect host.name, host.options, fn conn ->
SCP.download(conn, remote, local, options)
end
res
end
Enum.map(context.hosts, run)
end
defp build_remote_path(context, path) do
Path.absname(path, context.path || ".")
end
end
|
lib/sshkit.ex
| 0.901407 | 0.795062 |
sshkit.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.