Code
stringlengths
103
85.9k
Summary
listlengths
0
94
Please provide a description of the function:def save_predefined(self, predefined, client=None): predefined = self.validate_predefined(predefined) self._save(None, predefined, client)
[ "Save this ACL for the current bucket using a predefined ACL.\n\n If :attr:`user_project` is set, bills the API request to that project.\n\n :type predefined: str\n :param predefined: An identifier for a predefined ACL. Must be one\n of the keys in :attr:`PREDEFINED_JSON_ACLS`\n or :attr:`PREDEFINED_XML_ACLS` (which will be\n aliased to the corresponding JSON name).\n If passed, `acl` must be None.\n\n :type client: :class:`~google.cloud.storage.client.Client` or\n ``NoneType``\n :param client: Optional. The client to use. If not passed, falls back\n to the ``client`` stored on the ACL's parent.\n " ]
Please provide a description of the function:def incident_path(cls, project, incident): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}", project=project, incident=incident, )
[ "Return a fully-qualified incident string." ]
Please provide a description of the function:def annotation_path(cls, project, incident, annotation): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/annotations/{annotation}", project=project, incident=incident, annotation=annotation, )
[ "Return a fully-qualified annotation string." ]
Please provide a description of the function:def artifact_path(cls, project, incident, artifact): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/artifacts/{artifact}", project=project, incident=incident, artifact=artifact, )
[ "Return a fully-qualified artifact string." ]
Please provide a description of the function:def role_assignment_path(cls, project, incident, role_assignment): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/roleAssignments/{role_assignment}", project=project, incident=incident, role_assignment=role_assignment, )
[ "Return a fully-qualified role_assignment string." ]
Please provide a description of the function:def subscription_path(cls, project, incident, subscription): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/subscriptions/{subscription}", project=project, incident=incident, subscription=subscription, )
[ "Return a fully-qualified subscription string." ]
Please provide a description of the function:def tag_path(cls, project, incident, tag): return google.api_core.path_template.expand( "projects/{project}/incidents/{incident}/tags/{tag}", project=project, incident=incident, tag=tag, )
[ "Return a fully-qualified tag string." ]
Please provide a description of the function:def signal_path(cls, project, signal): return google.api_core.path_template.expand( "projects/{project}/signals/{signal}", project=project, signal=signal )
[ "Return a fully-qualified signal string." ]
Please provide a description of the function:def create_annotation( self, parent, annotation, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "create_annotation" not in self._inner_api_calls: self._inner_api_calls[ "create_annotation" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_annotation, default_retry=self._method_configs["CreateAnnotation"].retry, default_timeout=self._method_configs["CreateAnnotation"].timeout, client_info=self._client_info, ) request = incidents_service_pb2.CreateAnnotationRequest( parent=parent, annotation=annotation ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_annotation"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Creates an annotation on an existing incident. Only 'text/plain' and\n 'text/markdown' annotations can be created via this method.\n\n Example:\n >>> from google.cloud import irm_v1alpha2\n >>>\n >>> client = irm_v1alpha2.IncidentServiceClient()\n >>>\n >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')\n >>>\n >>> # TODO: Initialize `annotation`:\n >>> annotation = {}\n >>>\n >>> response = client.create_annotation(parent, annotation)\n\n Args:\n parent (str): Resource name of the incident, for example,\n \"projects/{project_id}/incidents/{incident_id}\".\n annotation (Union[dict, ~google.cloud.irm_v1alpha2.types.Annotation]): Only annotation.content is an input argument.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.Annotation`\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.irm_v1alpha2.types.Annotation` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def escalate_incident( self, incident, update_mask=None, subscriptions=None, tags=None, roles=None, artifacts=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "escalate_incident" not in self._inner_api_calls: self._inner_api_calls[ "escalate_incident" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.escalate_incident, default_retry=self._method_configs["EscalateIncident"].retry, default_timeout=self._method_configs["EscalateIncident"].timeout, client_info=self._client_info, ) request = incidents_service_pb2.EscalateIncidentRequest( incident=incident, update_mask=update_mask, subscriptions=subscriptions, tags=tags, roles=roles, artifacts=artifacts, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("incident.name", incident.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["escalate_incident"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Escalates an incident.\n\n Example:\n >>> from google.cloud import irm_v1alpha2\n >>>\n >>> client = irm_v1alpha2.IncidentServiceClient()\n >>>\n >>> # TODO: Initialize `incident`:\n >>> incident = {}\n >>>\n >>> response = client.escalate_incident(incident)\n\n Args:\n incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): The incident to escalate with the new values.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.Incident`\n update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`\n subscriptions (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]]): Subscriptions to add or update. Existing subscriptions with the same\n channel and address as a subscription in the list will be updated.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.Subscription`\n tags (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Tag]]): Tags to add. Tags identical to existing tags will be ignored.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.Tag`\n roles (list[Union[dict, ~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment]]): Roles to add or update. Existing roles with the same type (and title,\n for TYPE_OTHER roles) will be updated.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment`\n artifacts (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]]): Artifacts to add. All artifacts are added without checking for duplicates.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.Artifact`\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.irm_v1alpha2.types.EscalateIncidentResponse` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def send_shift_handoff( self, parent, recipients, subject, cc=None, notes_content_type=None, notes_content=None, incidents=None, preview_only=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "send_shift_handoff" not in self._inner_api_calls: self._inner_api_calls[ "send_shift_handoff" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.send_shift_handoff, default_retry=self._method_configs["SendShiftHandoff"].retry, default_timeout=self._method_configs["SendShiftHandoff"].timeout, client_info=self._client_info, ) request = incidents_service_pb2.SendShiftHandoffRequest( parent=parent, recipients=recipients, subject=subject, cc=cc, notes_content_type=notes_content_type, notes_content=notes_content, incidents=incidents, preview_only=preview_only, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["send_shift_handoff"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Sends a summary of the shift for oncall handoff.\n\n Example:\n >>> from google.cloud import irm_v1alpha2\n >>>\n >>> client = irm_v1alpha2.IncidentServiceClient()\n >>>\n >>> parent = client.project_path('[PROJECT]')\n >>>\n >>> # TODO: Initialize `recipients`:\n >>> recipients = []\n >>>\n >>> # TODO: Initialize `subject`:\n >>> subject = ''\n >>>\n >>> response = client.send_shift_handoff(parent, recipients, subject)\n\n Args:\n parent (str): The resource name of the Stackdriver project that the handoff is being\n sent from. for example, ``projects/{project_id}``\n recipients (list[str]): Email addresses of the recipients of the handoff, for example,\n \"user@example.com\". Must contain at least one entry.\n subject (str): The subject of the email. Required.\n cc (list[str]): Email addresses that should be CC'd on the handoff. Optional.\n notes_content_type (str): Content type string, for example, 'text/plain' or 'text/html'.\n notes_content (str): Additional notes to be included in the handoff. Optional.\n incidents (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]]): The set of incidents that should be included in the handoff. Optional.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.irm_v1alpha2.types.Incident`\n preview_only (bool): If set to true a ShiftHandoffResponse will be returned but the handoff\n will not actually be sent.\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.irm_v1alpha2.types.SendShiftHandoffResponse` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def bigtable_admins(self): result = set() for member in self._bindings.get(BIGTABLE_ADMIN_ROLE, ()): result.add(member) return frozenset(result)
[ "Access to bigtable.admin role memebers\n\n For example:\n\n .. literalinclude:: snippets.py\n :start-after: [START bigtable_admins_policy]\n :end-before: [END bigtable_admins_policy]\n " ]
Please provide a description of the function:def bigtable_readers(self): result = set() for member in self._bindings.get(BIGTABLE_READER_ROLE, ()): result.add(member) return frozenset(result)
[ "Access to bigtable.reader role memebers\n\n For example:\n\n .. literalinclude:: snippets.py\n :start-after: [START bigtable_readers_policy]\n :end-before: [END bigtable_readers_policy]\n " ]
Please provide a description of the function:def bigtable_users(self): result = set() for member in self._bindings.get(BIGTABLE_USER_ROLE, ()): result.add(member) return frozenset(result)
[ "Access to bigtable.user role memebers\n\n For example:\n\n .. literalinclude:: snippets.py\n :start-after: [START bigtable_users_policy]\n :end-before: [END bigtable_users_policy]\n " ]
Please provide a description of the function:def bigtable_viewers(self): result = set() for member in self._bindings.get(BIGTABLE_VIEWER_ROLE, ()): result.add(member) return frozenset(result)
[ "Access to bigtable.viewer role memebers\n\n For example:\n\n .. literalinclude:: snippets.py\n :start-after: [START bigtable_viewers_policy]\n :end-before: [END bigtable_viewers_policy]\n " ]
Please provide a description of the function:def from_pb(cls, policy_pb): policy = cls(policy_pb.etag, policy_pb.version) for binding in policy_pb.bindings: policy[binding.role] = sorted(binding.members) return policy
[ "Factory: create a policy from a protobuf message.\n\n Args:\n policy_pb (google.iam.policy_pb2.Policy): message returned by\n ``get_iam_policy`` gRPC API.\n\n Returns:\n :class:`Policy`: the parsed policy\n " ]
Please provide a description of the function:def to_pb(self): return policy_pb2.Policy( etag=self.etag, version=self.version or 0, bindings=[ policy_pb2.Binding(role=role, members=sorted(self[role])) for role in self ], )
[ "Render a protobuf message.\n\n Returns:\n google.iam.policy_pb2.Policy: a message to be passed to the\n ``set_iam_policy`` gRPC API.\n " ]
Please provide a description of the function:def from_api_repr(cls, resource): etag = resource.get("etag") if etag is not None: resource = resource.copy() resource["etag"] = base64.b64decode(etag.encode("ascii")) return super(Policy, cls).from_api_repr(resource)
[ "Factory: create a policy from a JSON resource.\n\n Overrides the base class version to store :attr:`etag` as bytes.\n\n Args:\n resource (dict): JSON policy resource returned by the\n ``getIamPolicy`` REST API.\n\n Returns:\n :class:`Policy`: the parsed policy\n " ]
Please provide a description of the function:def to_api_repr(self): resource = super(Policy, self).to_api_repr() if self.etag is not None: resource["etag"] = base64.b64encode(self.etag).decode("ascii") return resource
[ "Render a JSON policy resource.\n\n Overrides the base class version to convert :attr:`etag` from bytes\n to JSON-compatible base64-encoded text.\n\n Returns:\n dict: a JSON resource to be passed to the\n ``setIamPolicy`` REST API.\n " ]
Please provide a description of the function:def _check_ddl_statements(value): if not all(isinstance(line, six.string_types) for line in value): raise ValueError("Pass a list of strings") if any("create database" in line.lower() for line in value): raise ValueError("Do not pass a 'CREATE DATABASE' statement") return tuple(value)
[ "Validate DDL Statements used to define database schema.\n\n See\n https://cloud.google.com/spanner/docs/data-definition-language\n\n :type value: list of string\n :param value: DDL statements, excluding the 'CREATE DATABSE' statement\n\n :rtype: tuple\n :returns: tuple of validated DDL statement strings.\n :raises ValueError:\n if elements in ``value`` are not strings, or if ``value`` contains\n a ``CREATE DATABASE`` statement.\n " ]
Please provide a description of the function:def from_pb(cls, database_pb, instance, pool=None): match = _DATABASE_NAME_RE.match(database_pb.name) if match is None: raise ValueError( "Database protobuf name was not in the " "expected format.", database_pb.name, ) if match.group("project") != instance._client.project: raise ValueError( "Project ID on database does not match the " "project ID on the instance's client" ) instance_id = match.group("instance_id") if instance_id != instance.instance_id: raise ValueError( "Instance ID on database does not match the " "Instance ID on the instance" ) database_id = match.group("database_id") return cls(database_id, instance, pool=pool)
[ "Creates an instance of this class from a protobuf.\n\n :type database_pb:\n :class:`google.spanner.v2.spanner_instance_admin_pb2.Instance`\n :param database_pb: A instance protobuf object.\n\n :type instance: :class:`~google.cloud.spanner_v1.instance.Instance`\n :param instance: The instance that owns the database.\n\n :type pool: concrete subclass of\n :class:`~google.cloud.spanner_v1.pool.AbstractSessionPool`.\n :param pool: (Optional) session pool to be used by database.\n\n :rtype: :class:`Database`\n :returns: The database parsed from the protobuf response.\n :raises ValueError:\n if the instance name does not match the expected format\n or if the parsed project ID does not match the project ID\n on the instance's client, or if the parsed instance ID does\n not match the instance's ID.\n " ]
Please provide a description of the function:def spanner_api(self): if self._spanner_api is None: credentials = self._instance._client.credentials if isinstance(credentials, google.auth.credentials.Scoped): credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,)) self._spanner_api = SpannerClient( credentials=credentials, client_info=_CLIENT_INFO ) return self._spanner_api
[ "Helper for session-related API calls." ]
Please provide a description of the function:def create(self): api = self._instance._client.database_admin_api metadata = _metadata_with_prefix(self.name) db_name = self.database_id if "-" in db_name: db_name = "`%s`" % (db_name,) future = api.create_database( parent=self._instance.name, create_statement="CREATE DATABASE %s" % (db_name,), extra_statements=list(self._ddl_statements), metadata=metadata, ) return future
[ "Create this database within its instance\n\n Inclues any configured schema assigned to :attr:`ddl_statements`.\n\n See\n https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase\n\n :rtype: :class:`~google.api_core.operation.Operation`\n :returns: a future used to poll the status of the create request\n :raises Conflict: if the database already exists\n :raises NotFound: if the instance owning the database does not exist\n " ]
Please provide a description of the function:def exists(self): api = self._instance._client.database_admin_api metadata = _metadata_with_prefix(self.name) try: api.get_database_ddl(self.name, metadata=metadata) except NotFound: return False return True
[ "Test whether this database exists.\n\n See\n https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDDL\n\n :rtype: bool\n :returns: True if the database exists, else false.\n " ]
Please provide a description of the function:def reload(self): api = self._instance._client.database_admin_api metadata = _metadata_with_prefix(self.name) response = api.get_database_ddl(self.name, metadata=metadata) self._ddl_statements = tuple(response.statements)
[ "Reload this database.\n\n Refresh any configured schema into :attr:`ddl_statements`.\n\n See\n https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDDL\n\n :raises NotFound: if the database does not exist\n " ]
Please provide a description of the function:def update_ddl(self, ddl_statements, operation_id=""): client = self._instance._client api = client.database_admin_api metadata = _metadata_with_prefix(self.name) future = api.update_database_ddl( self.name, ddl_statements, operation_id=operation_id, metadata=metadata ) return future
[ "Update DDL for this database.\n\n Apply any configured schema from :attr:`ddl_statements`.\n\n See\n https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase\n\n :type ddl_statements: Sequence[str]\n :param ddl_statements: a list of DDL statements to use on this database\n :type operation_id: str\n :param operation_id: (optional) a string ID for the long-running operation\n\n :rtype: :class:`google.api_core.operation.Operation`\n :returns: an operation instance\n :raises NotFound: if the database does not exist\n " ]
Please provide a description of the function:def drop(self): api = self._instance._client.database_admin_api metadata = _metadata_with_prefix(self.name) api.drop_database(self.name, metadata=metadata)
[ "Drop this database.\n\n See\n https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase\n " ]
Please provide a description of the function:def execute_partitioned_dml(self, dml, params=None, param_types=None): if params is not None: if param_types is None: raise ValueError("Specify 'param_types' when passing 'params'.") params_pb = Struct( fields={key: _make_value_pb(value) for key, value in params.items()} ) else: params_pb = None api = self.spanner_api txn_options = TransactionOptions( partitioned_dml=TransactionOptions.PartitionedDml() ) metadata = _metadata_with_prefix(self.name) with SessionCheckout(self._pool) as session: txn = api.begin_transaction(session.name, txn_options, metadata=metadata) txn_selector = TransactionSelector(id=txn.id) restart = functools.partial( api.execute_streaming_sql, session.name, dml, transaction=txn_selector, params=params_pb, param_types=param_types, metadata=metadata, ) iterator = _restart_on_unavailable(restart) result_set = StreamedResultSet(iterator) list(result_set) # consume all partials return result_set.stats.row_count_lower_bound
[ "Execute a partitionable DML statement.\n\n :type dml: str\n :param dml: DML statement\n\n :type params: dict, {str -> column value}\n :param params: values for parameter replacement. Keys must match\n the names used in ``dml``.\n\n :type param_types: dict[str -> Union[dict, .types.Type]]\n :param param_types:\n (Optional) maps explicit types for one or more param values;\n required if parameters are passed.\n\n :rtype: int\n :returns: Count of rows affected by the DML statement.\n " ]
Please provide a description of the function:def batch_snapshot(self, read_timestamp=None, exact_staleness=None): return BatchSnapshot( self, read_timestamp=read_timestamp, exact_staleness=exact_staleness )
[ "Return an object which wraps a batch read / query.\n\n :type read_timestamp: :class:`datetime.datetime`\n :param read_timestamp: Execute all reads at the given timestamp.\n\n :type exact_staleness: :class:`datetime.timedelta`\n :param exact_staleness: Execute all reads at a timestamp that is\n ``exact_staleness`` old.\n\n :rtype: :class:`~google.cloud.spanner_v1.database.BatchSnapshot`\n :returns: new wrapper\n " ]
Please provide a description of the function:def run_in_transaction(self, func, *args, **kw): # Sanity check: Is there a transaction already running? # If there is, then raise a red flag. Otherwise, mark that this one # is running. if getattr(self._local, "transaction_running", False): raise RuntimeError("Spanner does not support nested transactions.") self._local.transaction_running = True # Check out a session and run the function in a transaction; once # done, flip the sanity check bit back. try: with SessionCheckout(self._pool) as session: return session.run_in_transaction(func, *args, **kw) finally: self._local.transaction_running = False
[ "Perform a unit of work in a transaction, retrying on abort.\n\n :type func: callable\n :param func: takes a required positional argument, the transaction,\n and additional positional / keyword arguments as supplied\n by the caller.\n\n :type args: tuple\n :param args: additional positional arguments to be passed to ``func``.\n\n :type kw: dict\n :param kw: optional keyword arguments to be passed to ``func``.\n If passed, \"timeout_secs\" will be removed and used to\n override the default timeout.\n\n :rtype: :class:`datetime.datetime`\n :returns: timestamp of committed transaction\n " ]
Please provide a description of the function:def from_dict(cls, database, mapping): instance = cls(database) session = instance._session = database.session() session._session_id = mapping["session_id"] snapshot = instance._snapshot = session.snapshot() snapshot._transaction_id = mapping["transaction_id"] return instance
[ "Reconstruct an instance from a mapping.\n\n :type database: :class:`~google.cloud.spanner.database.Database`\n :param database: database to use\n\n :type mapping: mapping\n :param mapping: serialized state of the instance\n\n :rtype: :class:`BatchSnapshot`\n " ]
Please provide a description of the function:def to_dict(self): session = self._get_session() snapshot = self._get_snapshot() return { "session_id": session._session_id, "transaction_id": snapshot._transaction_id, }
[ "Return state as a dictionary.\n\n Result can be used to serialize the instance and reconstitute\n it later using :meth:`from_dict`.\n\n :rtype: dict\n " ]
Please provide a description of the function:def _get_session(self): if self._session is None: session = self._session = self._database.session() session.create() return self._session
[ "Create session as needed.\n\n .. note::\n\n Caller is responsible for cleaning up the session after\n all partitions have been processed.\n " ]
Please provide a description of the function:def _get_snapshot(self): if self._snapshot is None: self._snapshot = self._get_session().snapshot( read_timestamp=self._read_timestamp, exact_staleness=self._exact_staleness, multi_use=True, ) self._snapshot.begin() return self._snapshot
[ "Create snapshot if needed." ]
Please provide a description of the function:def generate_read_batches( self, table, columns, keyset, index="", partition_size_bytes=None, max_partitions=None, ): partitions = self._get_snapshot().partition_read( table=table, columns=columns, keyset=keyset, index=index, partition_size_bytes=partition_size_bytes, max_partitions=max_partitions, ) read_info = { "table": table, "columns": columns, "keyset": keyset._to_dict(), "index": index, } for partition in partitions: yield {"partition": partition, "read": read_info.copy()}
[ "Start a partitioned batch read operation.\n\n Uses the ``PartitionRead`` API request to initiate the partitioned\n read. Returns a list of batch information needed to perform the\n actual reads.\n\n :type table: str\n :param table: name of the table from which to fetch data\n\n :type columns: list of str\n :param columns: names of columns to be retrieved\n\n :type keyset: :class:`~google.cloud.spanner_v1.keyset.KeySet`\n :param keyset: keys / ranges identifying rows to be retrieved\n\n :type index: str\n :param index: (Optional) name of index to use, rather than the\n table's primary key\n\n :type partition_size_bytes: int\n :param partition_size_bytes:\n (Optional) desired size for each partition generated. The service\n uses this as a hint, the actual partition size may differ.\n\n :type max_partitions: int\n :param max_partitions:\n (Optional) desired maximum number of partitions generated. The\n service uses this as a hint, the actual number of partitions may\n differ.\n\n :rtype: iterable of dict\n :returns:\n mappings of information used peform actual partitioned reads via\n :meth:`process_read_batch`.\n " ]
Please provide a description of the function:def process_read_batch(self, batch): kwargs = copy.deepcopy(batch["read"]) keyset_dict = kwargs.pop("keyset") kwargs["keyset"] = KeySet._from_dict(keyset_dict) return self._get_snapshot().read(partition=batch["partition"], **kwargs)
[ "Process a single, partitioned read.\n\n :type batch: mapping\n :param batch:\n one of the mappings returned from an earlier call to\n :meth:`generate_read_batches`.\n\n :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`\n :returns: a result set instance which can be used to consume rows.\n " ]
Please provide a description of the function:def generate_query_batches( self, sql, params=None, param_types=None, partition_size_bytes=None, max_partitions=None, ): partitions = self._get_snapshot().partition_query( sql=sql, params=params, param_types=param_types, partition_size_bytes=partition_size_bytes, max_partitions=max_partitions, ) query_info = {"sql": sql} if params: query_info["params"] = params query_info["param_types"] = param_types for partition in partitions: yield {"partition": partition, "query": query_info}
[ "Start a partitioned query operation.\n\n Uses the ``PartitionQuery`` API request to start a partitioned\n query operation. Returns a list of batch information needed to\n peform the actual queries.\n\n :type sql: str\n :param sql: SQL query statement\n\n :type params: dict, {str -> column value}\n :param params: values for parameter replacement. Keys must match\n the names used in ``sql``.\n\n :type param_types: dict[str -> Union[dict, .types.Type]]\n :param param_types:\n (Optional) maps explicit types for one or more param values;\n required if parameters are passed.\n\n :type partition_size_bytes: int\n :param partition_size_bytes:\n (Optional) desired size for each partition generated. The service\n uses this as a hint, the actual partition size may differ.\n\n :type partition_size_bytes: int\n :param partition_size_bytes:\n (Optional) desired size for each partition generated. The service\n uses this as a hint, the actual partition size may differ.\n\n :type max_partitions: int\n :param max_partitions:\n (Optional) desired maximum number of partitions generated. The\n service uses this as a hint, the actual number of partitions may\n differ.\n\n :rtype: iterable of dict\n :returns:\n mappings of information used peform actual partitioned reads via\n :meth:`process_read_batch`.\n " ]
Please provide a description of the function:def process(self, batch): if "query" in batch: return self.process_query_batch(batch) if "read" in batch: return self.process_read_batch(batch) raise ValueError("Invalid batch")
[ "Process a single, partitioned query or read.\n\n :type batch: mapping\n :param batch:\n one of the mappings returned from an earlier call to\n :meth:`generate_query_batches`.\n\n :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet`\n :returns: a result set instance which can be used to consume rows.\n :raises ValueError: if batch does not contain either 'read' or 'query'\n " ]
Please provide a description of the function:def location_path(cls, project, location): return google.api_core.path_template.expand( "projects/{project}/locations/{location}", project=project, location=location, )
[ "Return a fully-qualified location string." ]
Please provide a description of the function:def model_path(cls, project, location, model): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/models/{model}", project=project, location=location, model=model, )
[ "Return a fully-qualified model string." ]
Please provide a description of the function:def model_evaluation_path(cls, project, location, model, model_evaluation): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}", project=project, location=location, model=model, model_evaluation=model_evaluation, )
[ "Return a fully-qualified model_evaluation string." ]
Please provide a description of the function:def annotation_spec_path(cls, project, location, dataset, annotation_spec): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}", project=project, location=location, dataset=dataset, annotation_spec=annotation_spec, )
[ "Return a fully-qualified annotation_spec string." ]
Please provide a description of the function:def table_spec_path(cls, project, location, dataset, table_spec): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}", project=project, location=location, dataset=dataset, table_spec=table_spec, )
[ "Return a fully-qualified table_spec string." ]
Please provide a description of the function:def column_spec_path(cls, project, location, dataset, table_spec, column_spec): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}/columnSpecs/{column_spec}", project=project, location=location, dataset=dataset, table_spec=table_spec, column_spec=column_spec, )
[ "Return a fully-qualified column_spec string." ]
Please provide a description of the function:def create_dataset( self, parent, dataset, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "create_dataset" not in self._inner_api_calls: self._inner_api_calls[ "create_dataset" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_dataset, default_retry=self._method_configs["CreateDataset"].retry, default_timeout=self._method_configs["CreateDataset"].timeout, client_info=self._client_info, ) request = service_pb2.CreateDatasetRequest(parent=parent, dataset=dataset) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_dataset"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Creates a dataset.\n\n Example:\n >>> from google.cloud import automl_v1beta1\n >>>\n >>> client = automl_v1beta1.AutoMlClient()\n >>>\n >>> parent = client.location_path('[PROJECT]', '[LOCATION]')\n >>>\n >>> # TODO: Initialize `dataset`:\n >>> dataset = {}\n >>>\n >>> response = client.create_dataset(parent, dataset)\n\n Args:\n parent (str): The resource name of the project to create the dataset for.\n dataset (Union[dict, ~google.cloud.automl_v1beta1.types.Dataset]): The dataset to create.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.automl_v1beta1.types.Dataset`\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.automl_v1beta1.types.Dataset` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def delete_dataset( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "delete_dataset" not in self._inner_api_calls: self._inner_api_calls[ "delete_dataset" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_dataset, default_retry=self._method_configs["DeleteDataset"].retry, default_timeout=self._method_configs["DeleteDataset"].timeout, client_info=self._client_info, ) request = service_pb2.DeleteDatasetRequest(name=name) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["delete_dataset"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=proto_operations_pb2.OperationMetadata, )
[ "\n Deletes a dataset and all of its contents. Returns empty response in the\n ``response`` field when it completes, and ``delete_details`` in the\n ``metadata`` field.\n\n Example:\n >>> from google.cloud import automl_v1beta1\n >>>\n >>> client = automl_v1beta1.AutoMlClient()\n >>>\n >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')\n >>>\n >>> response = client.delete_dataset(name)\n >>>\n >>> def callback(operation_future):\n ... # Handle result.\n ... result = operation_future.result()\n >>>\n >>> response.add_done_callback(callback)\n >>>\n >>> # Handle metadata.\n >>> metadata = response.metadata()\n\n Args:\n name (str): The resource name of the dataset to delete.\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def create_model( self, parent, model, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "create_model" not in self._inner_api_calls: self._inner_api_calls[ "create_model" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_model, default_retry=self._method_configs["CreateModel"].retry, default_timeout=self._method_configs["CreateModel"].timeout, client_info=self._client_info, ) request = service_pb2.CreateModelRequest(parent=parent, model=model) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_model"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, model_pb2.Model, metadata_type=proto_operations_pb2.OperationMetadata, )
[ "\n Creates a model. Returns a Model in the ``response`` field when it\n completes. When you create a model, several model evaluations are\n created for it: a global evaluation, and one evaluation for each\n annotation spec.\n\n Example:\n >>> from google.cloud import automl_v1beta1\n >>>\n >>> client = automl_v1beta1.AutoMlClient()\n >>>\n >>> parent = client.location_path('[PROJECT]', '[LOCATION]')\n >>>\n >>> # TODO: Initialize `model`:\n >>> model = {}\n >>>\n >>> response = client.create_model(parent, model)\n >>>\n >>> def callback(operation_future):\n ... # Handle result.\n ... result = operation_future.result()\n >>>\n >>> response.add_done_callback(callback)\n >>>\n >>> # Handle metadata.\n >>> metadata = response.metadata()\n\n Args:\n parent (str): Resource name of the parent project where the model is being created.\n model (Union[dict, ~google.cloud.automl_v1beta1.types.Model]): The model to create.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.automl_v1beta1.types.Model`\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): delay = initial while True: # Introduce jitter by yielding a delay that is uniformly distributed # to average out to the delay time. yield min(random.uniform(0.0, delay * 2.0), maximum) delay = delay * multiplier
[ "Generates sleep intervals based on the exponential back-off algorithm.\n\n This implements the `Truncated Exponential Back-off`_ algorithm.\n\n .. _Truncated Exponential Back-off:\n https://cloud.google.com/storage/docs/exponential-backoff\n\n Args:\n initial (float): The minimum about of time to delay. This must\n be greater than 0.\n maximum (float): The maximum about of time to delay.\n multiplier (float): The multiplier applied to the delay.\n\n Yields:\n float: successive sleep intervals.\n " ]
Please provide a description of the function:def retry_target(target, predicate, sleep_generator, deadline, on_error=None): if deadline is not None: deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta( seconds=deadline ) else: deadline_datetime = None last_exc = None for sleep in sleep_generator: try: return target() # pylint: disable=broad-except # This function explicitly must deal with broad exceptions. except Exception as exc: if not predicate(exc): raise last_exc = exc if on_error is not None: on_error(exc) now = datetime_helpers.utcnow() if deadline_datetime is not None and deadline_datetime < now: six.raise_from( exceptions.RetryError( "Deadline of {:.1f}s exceeded while calling {}".format( deadline, target ), last_exc, ), last_exc, ) _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) ) time.sleep(sleep) raise ValueError("Sleep generator stopped yielding sleep values.")
[ "Call a function and retry if it fails.\n\n This is the lowest-level retry helper. Generally, you'll use the\n higher-level retry helper :class:`Retry`.\n\n Args:\n target(Callable): The function to call and retry. This must be a\n nullary function - apply arguments with `functools.partial`.\n predicate (Callable[Exception]): A callable used to determine if an\n exception raised by the target should be considered retryable.\n It should return True to retry or False otherwise.\n sleep_generator (Iterable[float]): An infinite iterator that determines\n how long to sleep between retries.\n deadline (float): How long to keep retrying the target.\n on_error (Callable): A function to call while processing a retryable\n exception. Any error raised by this function will *not* be\n caught.\n\n Returns:\n Any: the return value of the target function.\n\n Raises:\n google.api_core.RetryError: If the deadline is exceeded while retrying.\n ValueError: If the sleep generator stops yielding values.\n Exception: If the target raises a method that isn't retryable.\n " ]
Please provide a description of the function:def open(self): if self.is_active: raise ValueError("Can not open an already open stream.") request_generator = _RequestQueueGenerator( self._request_queue, initial_request=self._initial_request ) call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata) request_generator.call = call # TODO: api_core should expose the future interface for wrapped # callables as well. if hasattr(call, "_wrapped"): # pragma: NO COVER call._wrapped.add_done_callback(self._on_call_done) else: call.add_done_callback(self._on_call_done) self._request_generator = request_generator self.call = call
[ "Opens the stream." ]
Please provide a description of the function:def close(self): if self.call is None: return self._request_queue.put(None) self.call.cancel() self._request_generator = None
[ "Closes the stream." ]
Please provide a description of the function:def send(self, request): if self.call is None: raise ValueError("Can not send() on an RPC that has never been open()ed.") # Don't use self.is_active(), as ResumableBidiRpc will overload it # to mean something semantically different. if self.call.is_active(): self._request_queue.put(request) else: # calling next should cause the call to raise. next(self.call)
[ "Queue a message to be sent on the stream.\n\n Send is non-blocking.\n\n If the underlying RPC has been closed, this will raise.\n\n Args:\n request (protobuf.Message): The request to send.\n " ]
Please provide a description of the function:def _recoverable(self, method, *args, **kwargs): while True: try: return method(*args, **kwargs) except Exception as exc: with self._operational_lock: _LOGGER.debug("Call to retryable %r caused %s.", method, exc) if not self._should_recover(exc): self.close() _LOGGER.debug("Not retrying %r due to %s.", method, exc) self._finalize(exc) raise exc _LOGGER.debug("Re-opening stream from retryable %r.", method) self._reopen()
[ "Wraps a method to recover the stream and retry on error.\n\n If a retryable error occurs while making the call, then the stream will\n be re-opened and the method will be retried. This happens indefinitely\n so long as the error is a retryable one. If an error occurs while\n re-opening the stream, then this method will raise immediately and\n trigger finalization of this object.\n\n Args:\n method (Callable[..., Any]): The method to call.\n args: The args to pass to the method.\n kwargs: The kwargs to pass to the method.\n " ]
Please provide a description of the function:def start(self): with self._operational_lock: ready = threading.Event() thread = threading.Thread( name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._thread_main, args=(ready,) ) thread.daemon = True thread.start() # Other parts of the code rely on `thread.is_alive` which # isn't sufficient to know if a thread is active, just that it may # soon be active. This can cause races. Further protect # against races by using a ready event and wait on it to be set. ready.wait() self._thread = thread _LOGGER.debug("Started helper thread %s", thread.name)
[ "Start the background thread and begin consuming the thread." ]
Please provide a description of the function:def stop(self): with self._operational_lock: self._bidi_rpc.close() if self._thread is not None: # Resume the thread to wake it up in case it is sleeping. self.resume() self._thread.join() self._thread = None
[ "Stop consuming the stream and shutdown the background thread." ]
Please provide a description of the function:def resume(self): with self._wake: self._paused = False self._wake.notifyAll()
[ "Resumes the response stream." ]
Please provide a description of the function:def project_path(cls, user, project): return google.api_core.path_template.expand( "users/{user}/projects/{project}", user=user, project=project )
[ "Return a fully-qualified project string." ]
Please provide a description of the function:def fingerprint_path(cls, user, fingerprint): return google.api_core.path_template.expand( "users/{user}/sshPublicKeys/{fingerprint}", user=user, fingerprint=fingerprint, )
[ "Return a fully-qualified fingerprint string." ]
Please provide a description of the function:def delete_posix_account( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "delete_posix_account" not in self._inner_api_calls: self._inner_api_calls[ "delete_posix_account" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_posix_account, default_retry=self._method_configs["DeletePosixAccount"].retry, default_timeout=self._method_configs["DeletePosixAccount"].timeout, client_info=self._client_info, ) request = oslogin_pb2.DeletePosixAccountRequest(name=name) self._inner_api_calls["delete_posix_account"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Deletes a POSIX account.\n\n Example:\n >>> from google.cloud import oslogin_v1\n >>>\n >>> client = oslogin_v1.OsLoginServiceClient()\n >>>\n >>> name = client.project_path('[USER]', '[PROJECT]')\n >>>\n >>> client.delete_posix_account(name)\n\n Args:\n name (str): A reference to the POSIX account to update. POSIX accounts are\n identified by the project ID they are associated with. A reference to\n the POSIX account is in format ``users/{user}/projects/{project}``.\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def import_ssh_public_key( self, parent, ssh_public_key, project_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "import_ssh_public_key" not in self._inner_api_calls: self._inner_api_calls[ "import_ssh_public_key" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.import_ssh_public_key, default_retry=self._method_configs["ImportSshPublicKey"].retry, default_timeout=self._method_configs["ImportSshPublicKey"].timeout, client_info=self._client_info, ) request = oslogin_pb2.ImportSshPublicKeyRequest( parent=parent, ssh_public_key=ssh_public_key, project_id=project_id ) return self._inner_api_calls["import_ssh_public_key"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Adds an SSH public key and returns the profile information. Default POSIX\n account information is set when no username and UID exist as part of the\n login profile.\n\n Example:\n >>> from google.cloud import oslogin_v1\n >>>\n >>> client = oslogin_v1.OsLoginServiceClient()\n >>>\n >>> parent = client.user_path('[USER]')\n >>>\n >>> # TODO: Initialize `ssh_public_key`:\n >>> ssh_public_key = {}\n >>>\n >>> response = client.import_ssh_public_key(parent, ssh_public_key)\n\n Args:\n parent (str): The unique ID for the user in format ``users/{user}``.\n ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`\n project_id (str): The project ID of the Google Cloud Platform project.\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.oslogin_v1.types.ImportSshPublicKeyResponse` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def update_ssh_public_key( self, name, ssh_public_key, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): # Wrap the transport method to add retry and timeout logic. if "update_ssh_public_key" not in self._inner_api_calls: self._inner_api_calls[ "update_ssh_public_key" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_ssh_public_key, default_retry=self._method_configs["UpdateSshPublicKey"].retry, default_timeout=self._method_configs["UpdateSshPublicKey"].timeout, client_info=self._client_info, ) request = oslogin_pb2.UpdateSshPublicKeyRequest( name=name, ssh_public_key=ssh_public_key, update_mask=update_mask ) return self._inner_api_calls["update_ssh_public_key"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "\n Updates an SSH public key and returns the profile information. This method\n supports patch semantics.\n\n Example:\n >>> from google.cloud import oslogin_v1\n >>>\n >>> client = oslogin_v1.OsLoginServiceClient()\n >>>\n >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]')\n >>>\n >>> # TODO: Initialize `ssh_public_key`:\n >>> ssh_public_key = {}\n >>>\n >>> response = client.update_ssh_public_key(name, ssh_public_key)\n\n Args:\n name (str): The fingerprint of the public key to update. Public keys are identified\n by their SHA-256 fingerprint. The fingerprint of the public key is in\n format ``users/{user}/sshPublicKeys/{fingerprint}``.\n ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`\n update_mask (Union[dict, ~google.cloud.oslogin_v1.types.FieldMask]): Mask to control which fields get updated. Updates all if not present.\n\n If a dict is provided, it must be of the same form as the protobuf\n message :class:`~google.cloud.oslogin_v1.types.FieldMask`\n retry (Optional[google.api_core.retry.Retry]): A retry object used\n to retry requests. If ``None`` is specified, requests will not\n be retried.\n timeout (Optional[float]): The amount of time, in seconds, to wait\n for the request to complete. Note that if ``retry`` is\n specified, the timeout applies to each individual attempt.\n metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata\n that is provided to the method.\n\n Returns:\n A :class:`~google.cloud.oslogin_v1.types.SshPublicKey` instance.\n\n Raises:\n google.api_core.exceptions.GoogleAPICallError: If the request\n failed for any reason.\n google.api_core.exceptions.RetryError: If the request failed due\n to a retryable error and retry attempts failed.\n ValueError: If the parameters are invalid.\n " ]
Please provide a description of the function:def _gc_rule_from_pb(gc_rule_pb): rule_name = gc_rule_pb.WhichOneof("rule") if rule_name is None: return None if rule_name == "max_num_versions": return MaxVersionsGCRule(gc_rule_pb.max_num_versions) elif rule_name == "max_age": max_age = _helpers._duration_pb_to_timedelta(gc_rule_pb.max_age) return MaxAgeGCRule(max_age) elif rule_name == "union": return GCRuleUnion([_gc_rule_from_pb(rule) for rule in gc_rule_pb.union.rules]) elif rule_name == "intersection": rules = [_gc_rule_from_pb(rule) for rule in gc_rule_pb.intersection.rules] return GCRuleIntersection(rules) else: raise ValueError("Unexpected rule name", rule_name)
[ "Convert a protobuf GC rule to a native object.\n\n :type gc_rule_pb: :class:`.table_v2_pb2.GcRule`\n :param gc_rule_pb: The GC rule to convert.\n\n :rtype: :class:`GarbageCollectionRule` or :data:`NoneType <types.NoneType>`\n :returns: An instance of one of the native rules defined\n in :module:`column_family` or :data:`None` if no values were\n set on the protobuf passed in.\n :raises: :class:`ValueError <exceptions.ValueError>` if the rule name\n is unexpected.\n " ]
Please provide a description of the function:def to_pb(self): max_age = _helpers._timedelta_to_duration_pb(self.max_age) return table_v2_pb2.GcRule(max_age=max_age)
[ "Converts the garbage collection rule to a protobuf.\n\n :rtype: :class:`.table_v2_pb2.GcRule`\n :returns: The converted current object.\n " ]
Please provide a description of the function:def to_pb(self): union = table_v2_pb2.GcRule.Union(rules=[rule.to_pb() for rule in self.rules]) return table_v2_pb2.GcRule(union=union)
[ "Converts the union into a single GC rule as a protobuf.\n\n :rtype: :class:`.table_v2_pb2.GcRule`\n :returns: The converted current object.\n " ]
Please provide a description of the function:def to_pb(self): intersection = table_v2_pb2.GcRule.Intersection( rules=[rule.to_pb() for rule in self.rules] ) return table_v2_pb2.GcRule(intersection=intersection)
[ "Converts the intersection into a single GC rule as a protobuf.\n\n :rtype: :class:`.table_v2_pb2.GcRule`\n :returns: The converted current object.\n " ]
Please provide a description of the function:def to_pb(self): if self.gc_rule is None: return table_v2_pb2.ColumnFamily() else: return table_v2_pb2.ColumnFamily(gc_rule=self.gc_rule.to_pb())
[ "Converts the column family to a protobuf.\n\n :rtype: :class:`.table_v2_pb2.ColumnFamily`\n :returns: The converted current object.\n " ]
Please provide a description of the function:def create(self): column_family = self.to_pb() modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification( id=self.column_family_id, create=column_family ) client = self._table._instance._client # data it contains are the GC rule and the column family ID already # stored on this instance. client.table_admin_client.modify_column_families( self._table.name, [modification] )
[ "Create this column family.\n\n For example:\n\n .. literalinclude:: snippets_table.py\n :start-after: [START bigtable_create_column_family]\n :end-before: [END bigtable_create_column_family]\n\n " ]
Please provide a description of the function:def delete(self): modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification( id=self.column_family_id, drop=True ) client = self._table._instance._client # data it contains are the GC rule and the column family ID already # stored on this instance. client.table_admin_client.modify_column_families( self._table.name, [modification] )
[ "Delete this column family.\n\n For example:\n\n .. literalinclude:: snippets_table.py\n :start-after: [START bigtable_delete_column_family]\n :end-before: [END bigtable_delete_column_family]\n\n " ]
Please provide a description of the function:def _maybe_wrap_exception(exception): if isinstance(exception, grpc.RpcError): return exceptions.from_grpc_error(exception) return exception
[ "Wraps a gRPC exception class, if needed." ]
Please provide a description of the function:def close(self, reason=None): with self._closing: if self._closed: return # Stop consuming messages. if self.is_active: _LOGGER.debug("Stopping consumer.") self._consumer.stop() self._consumer = None self._rpc.close() self._rpc = None self._closed = True _LOGGER.debug("Finished stopping manager.") if reason: # Raise an exception if a reason is provided _LOGGER.debug("reason for closing: %s" % reason) if isinstance(reason, Exception): raise reason raise RuntimeError(reason)
[ "Stop consuming messages and shutdown all helper threads.\n\n This method is idempotent. Additional calls will have no effect.\n\n Args:\n reason (Any): The reason to close this. If None, this is considered\n an \"intentional\" shutdown.\n " ]
Please provide a description of the function:def _on_rpc_done(self, future): _LOGGER.info("RPC termination has signaled manager shutdown.") future = _maybe_wrap_exception(future) thread = threading.Thread( name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} ) thread.daemon = True thread.start()
[ "Triggered whenever the underlying RPC terminates without recovery.\n\n This is typically triggered from one of two threads: the background\n consumer thread (when calling ``recv()`` produces a non-recoverable\n error) or the grpc management thread (when cancelling the RPC).\n\n This method is *non-blocking*. It will start another thread to deal\n with shutting everything down. This is to prevent blocking in the\n background consumer and preventing it from being ``joined()``.\n " ]
Please provide a description of the function:def for_document( cls, document_ref, snapshot_callback, snapshot_class_instance, reference_class_instance, ): return cls( document_ref, document_ref._client, { "documents": {"documents": [document_ref._document_path]}, "target_id": WATCH_TARGET_ID, }, document_watch_comparator, snapshot_callback, snapshot_class_instance, reference_class_instance, )
[ "\n Creates a watch snapshot listener for a document. snapshot_callback\n receives a DocumentChange object, but may also start to get\n targetChange and such soon\n\n Args:\n document_ref: Reference to Document\n snapshot_callback: callback to be called on snapshot\n snapshot_class_instance: instance of DocumentSnapshot to make\n snapshots with to pass to snapshot_callback\n reference_class_instance: instance of DocumentReference to make\n references\n\n " ]
Please provide a description of the function:def on_snapshot(self, proto): TargetChange = firestore_pb2.TargetChange target_changetype_dispatch = { TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change, TargetChange.ADD: self._on_snapshot_target_change_add, TargetChange.REMOVE: self._on_snapshot_target_change_remove, TargetChange.RESET: self._on_snapshot_target_change_reset, TargetChange.CURRENT: self._on_snapshot_target_change_current, } target_change = proto.target_change if str(target_change): target_change_type = target_change.target_change_type _LOGGER.debug("on_snapshot: target change: " + str(target_change_type)) meth = target_changetype_dispatch.get(target_change_type) if meth is None: _LOGGER.info( "on_snapshot: Unknown target change " + str(target_change_type) ) self.close( reason="Unknown target change type: %s " % str(target_change_type) ) else: try: meth(proto) except Exception as exc2: _LOGGER.debug("meth(proto) exc: " + str(exc2)) raise # NOTE: # in other implementations, such as node, the backoff is reset here # in this version bidi rpc is just used and will control this. elif str(proto.document_change): _LOGGER.debug("on_snapshot: document change") # No other target_ids can show up here, but we still need to see # if the targetId was in the added list or removed list. target_ids = proto.document_change.target_ids or [] removed_target_ids = proto.document_change.removed_target_ids or [] changed = False removed = False if WATCH_TARGET_ID in target_ids: changed = True if WATCH_TARGET_ID in removed_target_ids: removed = True if changed: _LOGGER.debug("on_snapshot: document change: CHANGED") # google.cloud.firestore_v1beta1.types.DocumentChange document_change = proto.document_change # google.cloud.firestore_v1beta1.types.Document document = document_change.document data = _helpers.decode_dict(document.fields, self._firestore) # Create a snapshot. As Document and Query objects can be # passed we need to get a Document Reference in a more manual # fashion than self._document_reference document_name = document.name db_str = self._firestore._database_string db_str_documents = db_str + "/documents/" if document_name.startswith(db_str_documents): document_name = document_name[len(db_str_documents) :] document_ref = self._firestore.document(document_name) snapshot = self.DocumentSnapshot( reference=document_ref, data=data, exists=True, read_time=None, create_time=document.create_time, update_time=document.update_time, ) self.change_map[document.name] = snapshot elif removed: _LOGGER.debug("on_snapshot: document change: REMOVED") document = proto.document_change.document self.change_map[document.name] = ChangeType.REMOVED # NB: document_delete and document_remove (as far as we, the client, # are concerned) are functionally equivalent elif str(proto.document_delete): _LOGGER.debug("on_snapshot: document change: DELETE") name = proto.document_delete.document self.change_map[name] = ChangeType.REMOVED elif str(proto.document_remove): _LOGGER.debug("on_snapshot: document change: REMOVE") name = proto.document_remove.document self.change_map[name] = ChangeType.REMOVED elif proto.filter: _LOGGER.debug("on_snapshot: filter update") if proto.filter.count != self._current_size(): # We need to remove all the current results. self._reset_docs() # The filter didn't match, so re-issue the query. # TODO: reset stream method? # self._reset_stream(); else: _LOGGER.debug("UNKNOWN TYPE. UHOH") self.close(reason=ValueError("Unknown listen response type: %s" % proto))
[ "\n Called everytime there is a response from listen. Collect changes\n and 'push' the changes in a batch to the customer when we receive\n 'current' from the listen response.\n\n Args:\n listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`):\n Callback method that receives a object to\n " ]
Please provide a description of the function:def push(self, read_time, next_resume_token): deletes, adds, updates = Watch._extract_changes( self.doc_map, self.change_map, read_time ) updated_tree, updated_map, appliedChanges = self._compute_snapshot( self.doc_tree, self.doc_map, deletes, adds, updates ) if not self.has_pushed or len(appliedChanges): # TODO: It is possible in the future we will have the tree order # on insert. For now, we sort here. key = functools.cmp_to_key(self._comparator) keys = sorted(updated_tree.keys(), key=key) self._snapshot_callback( keys, appliedChanges, datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc), ) self.has_pushed = True self.doc_tree = updated_tree self.doc_map = updated_map self.change_map.clear() self.resume_token = next_resume_token
[ "\n Assembles a new snapshot from the current set of changes and invokes\n the user's callback. Clears the current changes on completion.\n " ]
Please provide a description of the function:def _current_size(self): deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None) return len(self.doc_map) + len(adds) - len(deletes)
[ "\n Returns the current count of all documents, including the changes from\n the current changeMap.\n " ]
Please provide a description of the function:def _reset_docs(self): _LOGGER.debug("resetting documents") self.change_map.clear() self.resume_token = None # Mark each document as deleted. If documents are not deleted # they will be sent again by the server. for snapshot in self.doc_tree.keys(): name = snapshot.reference._document_path self.change_map[name] = ChangeType.REMOVED self.current = False
[ "\n Helper to clear the docs on RESET or filter mismatch.\n " ]
Please provide a description of the function:def build_api_url( cls, path, query_params=None, api_base_url=None, api_version=None ): url = cls.API_URL_TEMPLATE.format( api_base_url=(api_base_url or cls.API_BASE_URL), api_version=(api_version or cls.API_VERSION), path=path, ) query_params = query_params or {} if query_params: url += "?" + urlencode(query_params, doseq=True) return url
[ "Construct an API url given a few components, some optional.\n\n Typically, you shouldn't need to use this method.\n\n :type path: str\n :param path: The path to the resource (ie, ``'/b/bucket-name'``).\n\n :type query_params: dict or list\n :param query_params: A dictionary of keys and values (or list of\n key-value pairs) to insert into the query\n string of the URL.\n\n :type api_base_url: str\n :param api_base_url: The base URL for the API endpoint.\n Typically you won't have to provide this.\n\n :type api_version: str\n :param api_version: The version of the API to call.\n Typically you shouldn't provide this and instead\n use the default for the library.\n\n :rtype: str\n :returns: The URL assembled from the pieces provided.\n " ]
Please provide a description of the function:def _make_request( self, method, url, data=None, content_type=None, headers=None, target_object=None, ): headers = headers or {} headers.update(self._EXTRA_HEADERS) headers["Accept-Encoding"] = "gzip" if content_type: headers["Content-Type"] = content_type headers["User-Agent"] = self.USER_AGENT return self._do_request(method, url, headers, data, target_object)
[ "A low level method to send a request to the API.\n\n Typically, you shouldn't need to use this method.\n\n :type method: str\n :param method: The HTTP method to use in the request.\n\n :type url: str\n :param url: The URL to send the request to.\n\n :type data: str\n :param data: The data to send as the body of the request.\n\n :type content_type: str\n :param content_type: The proper MIME type of the data provided.\n\n :type headers: dict\n :param headers: (Optional) A dictionary of HTTP headers to send with\n the request. If passed, will be modified directly\n here with added headers.\n\n :type target_object: object\n :param target_object:\n (Optional) Argument to be used by library callers. This can allow\n custom behavior, for example, to defer an HTTP request and complete\n initialization of the object at a later time.\n\n :rtype: :class:`requests.Response`\n :returns: The HTTP response.\n " ]
Please provide a description of the function:def _do_request( self, method, url, headers, data, target_object ): # pylint: disable=unused-argument return self.http.request(url=url, method=method, headers=headers, data=data)
[ "Low-level helper: perform the actual API request over HTTP.\n\n Allows batch context managers to override and defer a request.\n\n :type method: str\n :param method: The HTTP method to use in the request.\n\n :type url: str\n :param url: The URL to send the request to.\n\n :type headers: dict\n :param headers: A dictionary of HTTP headers to send with the request.\n\n :type data: str\n :param data: The data to send as the body of the request.\n\n :type target_object: object\n :param target_object:\n (Optional) Unused ``target_object`` here but may be used by a\n superclass.\n\n :rtype: :class:`requests.Response`\n :returns: The HTTP response.\n " ]
Please provide a description of the function:def api_request( self, method, path, query_params=None, data=None, content_type=None, headers=None, api_base_url=None, api_version=None, expect_json=True, _target_object=None, ): url = self.build_api_url( path=path, query_params=query_params, api_base_url=api_base_url, api_version=api_version, ) # Making the executive decision that any dictionary # data will be sent properly as JSON. if data and isinstance(data, dict): data = json.dumps(data) content_type = "application/json" response = self._make_request( method=method, url=url, data=data, content_type=content_type, headers=headers, target_object=_target_object, ) if not 200 <= response.status_code < 300: raise exceptions.from_http_response(response) if expect_json and response.content: return response.json() else: return response.content
[ "Make a request over the HTTP transport to the API.\n\n You shouldn't need to use this method, but if you plan to\n interact with the API using these primitives, this is the\n correct one to use.\n\n :type method: str\n :param method: The HTTP method name (ie, ``GET``, ``POST``, etc).\n Required.\n\n :type path: str\n :param path: The path to the resource (ie, ``'/b/bucket-name'``).\n Required.\n\n :type query_params: dict or list\n :param query_params: A dictionary of keys and values (or list of\n key-value pairs) to insert into the query\n string of the URL.\n\n :type data: str\n :param data: The data to send as the body of the request. Default is\n the empty string.\n\n :type content_type: str\n :param content_type: The proper MIME type of the data provided. Default\n is None.\n\n :type headers: dict\n :param headers: extra HTTP headers to be sent with the request.\n\n :type api_base_url: str\n :param api_base_url: The base URL for the API endpoint.\n Typically you won't have to provide this.\n Default is the standard API base URL.\n\n :type api_version: str\n :param api_version: The version of the API to call. Typically\n you shouldn't provide this and instead use\n the default for the library. Default is the\n latest API version supported by\n google-cloud-python.\n\n :type expect_json: bool\n :param expect_json: If True, this method will try to parse the\n response as JSON and raise an exception if\n that cannot be done. Default is True.\n\n :type _target_object: :class:`object`\n :param _target_object:\n (Optional) Protected argument to be used by library callers. This\n can allow custom behavior, for example, to defer an HTTP request\n and complete initialization of the object at a later time.\n\n :raises ~google.cloud.exceptions.GoogleCloudError: if the response code\n is not 200 OK.\n :raises ValueError: if the response content type is not JSON.\n :rtype: dict or str\n :returns: The API response payload, either as a raw string or\n a dictionary if the response is valid JSON.\n " ]
Please provide a description of the function:def _build_label_filter(category, *args, **kwargs): terms = list(args) for key, value in six.iteritems(kwargs): if value is None: continue suffix = None if key.endswith( ("_prefix", "_suffix", "_greater", "_greaterequal", "_less", "_lessequal") ): key, suffix = key.rsplit("_", 1) if category == "resource" and key == "resource_type": key = "resource.type" else: key = ".".join((category, "label", key)) if suffix == "prefix": term = '{key} = starts_with("{value}")' elif suffix == "suffix": term = '{key} = ends_with("{value}")' elif suffix == "greater": term = "{key} > {value}" elif suffix == "greaterequal": term = "{key} >= {value}" elif suffix == "less": term = "{key} < {value}" elif suffix == "lessequal": term = "{key} <= {value}" else: term = '{key} = "{value}"' terms.append(term.format(key=key, value=value)) return " AND ".join(sorted(terms))
[ "Construct a filter string to filter on metric or resource labels." ]
Please provide a description of the function:def select_interval(self, end_time, start_time=None): new_query = copy.deepcopy(self) new_query._end_time = end_time new_query._start_time = start_time return new_query
[ "Copy the query and set the query time interval.\n\n Example::\n\n import datetime\n\n now = datetime.datetime.utcnow()\n query = query.select_interval(\n end_time=now,\n start_time=now - datetime.timedelta(minutes=5))\n\n As a convenience, you can alternatively specify the end time and\n an interval duration when you create the query initially.\n\n :type end_time: :class:`datetime.datetime`\n :param end_time: The end time (inclusive) of the time interval\n for which results should be returned, as a datetime object.\n\n :type start_time: :class:`datetime.datetime`\n :param start_time:\n (Optional) The start time (exclusive) of the time interval\n for which results should be returned, as a datetime object.\n If not specified, the interval is a point in time.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n " ]
Please provide a description of the function:def select_group(self, group_id): new_query = copy.deepcopy(self) new_query._filter.group_id = group_id return new_query
[ "Copy the query and add filtering by group.\n\n Example::\n\n query = query.select_group('1234567')\n\n :type group_id: str\n :param group_id: The ID of a group to filter by.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n " ]
Please provide a description of the function:def select_projects(self, *args): new_query = copy.deepcopy(self) new_query._filter.projects = args return new_query
[ "Copy the query and add filtering by monitored projects.\n\n This is only useful if the target project represents a Stackdriver\n account containing the specified monitored projects.\n\n Examples::\n\n query = query.select_projects('project-1')\n query = query.select_projects('project-1', 'project-2')\n\n :type args: tuple\n :param args: Project IDs limiting the resources to be included\n in the query.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n " ]
Please provide a description of the function:def select_resources(self, *args, **kwargs): new_query = copy.deepcopy(self) new_query._filter.select_resources(*args, **kwargs) return new_query
[ "Copy the query and add filtering by resource labels.\n\n Examples::\n\n query = query.select_resources(zone='us-central1-a')\n query = query.select_resources(zone_prefix='europe-')\n query = query.select_resources(resource_type='gce_instance')\n\n A keyword argument ``<label>=<value>`` ordinarily generates a filter\n expression of the form::\n\n resource.label.<label> = \"<value>\"\n\n However, by adding ``\"_prefix\"`` or ``\"_suffix\"`` to the keyword,\n you can specify a partial match.\n\n ``<label>_prefix=<value>`` generates::\n\n resource.label.<label> = starts_with(\"<value>\")\n\n ``<label>_suffix=<value>`` generates::\n\n resource.label.<label> = ends_with(\"<value>\")\n\n As a special case, ``\"resource_type\"`` is treated as a special\n pseudo-label corresponding to the filter object ``resource.type``.\n For example, ``resource_type=<value>`` generates::\n\n resource.type = \"<value>\"\n\n See the `defined resource types`_.\n\n .. note::\n\n The label ``\"instance_name\"`` is a metric label,\n not a resource label. You would filter on it using\n ``select_metrics(instance_name=...)``.\n\n :type args: tuple\n :param args: Raw filter expression strings to include in the\n conjunction. If just one is provided and no keyword arguments\n are provided, it can be a disjunction.\n\n :type kwargs: dict\n :param kwargs: Label filters to include in the conjunction as\n described above.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n\n .. _defined resource types:\n https://cloud.google.com/monitoring/api/v3/monitored-resources\n " ]
Please provide a description of the function:def select_metrics(self, *args, **kwargs): new_query = copy.deepcopy(self) new_query._filter.select_metrics(*args, **kwargs) return new_query
[ "Copy the query and add filtering by metric labels.\n\n Examples::\n\n query = query.select_metrics(instance_name='myinstance')\n query = query.select_metrics(instance_name_prefix='mycluster-')\n\n A keyword argument ``<label>=<value>`` ordinarily generates a filter\n expression of the form::\n\n metric.label.<label> = \"<value>\"\n\n However, by adding ``\"_prefix\"`` or ``\"_suffix\"`` to the keyword,\n you can specify a partial match.\n\n ``<label>_prefix=<value>`` generates::\n\n metric.label.<label> = starts_with(\"<value>\")\n\n ``<label>_suffix=<value>`` generates::\n\n metric.label.<label> = ends_with(\"<value>\")\n\n If the label's value type is ``INT64``, a similar notation can be\n used to express inequalities:\n\n ``<label>_less=<value>`` generates::\n\n metric.label.<label> < <value>\n\n ``<label>_lessequal=<value>`` generates::\n\n metric.label.<label> <= <value>\n\n ``<label>_greater=<value>`` generates::\n\n metric.label.<label> > <value>\n\n ``<label>_greaterequal=<value>`` generates::\n\n metric.label.<label> >= <value>\n\n :type args: tuple\n :param args: Raw filter expression strings to include in the\n conjunction. If just one is provided and no keyword arguments\n are provided, it can be a disjunction.\n\n :type kwargs: dict\n :param kwargs: Label filters to include in the conjunction as\n described above.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n " ]
Please provide a description of the function:def align(self, per_series_aligner, seconds=0, minutes=0, hours=0): new_query = copy.deepcopy(self) new_query._per_series_aligner = per_series_aligner new_query._alignment_period_seconds = seconds + 60 * (minutes + 60 * hours) return new_query
[ "Copy the query and add temporal alignment.\n\n If ``per_series_aligner`` is not :data:`Aligner.ALIGN_NONE`, each time\n series will contain data points only on the period boundaries.\n\n Example::\n\n from google.cloud.monitoring import enums\n query = query.align(\n enums.Aggregation.Aligner.ALIGN_MEAN, minutes=5)\n\n It is also possible to specify the aligner as a literal string::\n\n query = query.align('ALIGN_MEAN', minutes=5)\n\n :type per_series_aligner: str or\n :class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`\n :param per_series_aligner: The approach to be used to align\n individual time series. For example: :data:`Aligner.ALIGN_MEAN`.\n See\n :class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`\n and the descriptions of the `supported aligners`_.\n\n :type seconds: int\n :param seconds: The number of seconds in the alignment period.\n\n :type minutes: int\n :param minutes: The number of minutes in the alignment period.\n\n :type hours: int\n :param hours: The number of hours in the alignment period.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n\n .. _supported aligners:\n https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\\\n projects.timeSeries/list#Aligner\n " ]
Please provide a description of the function:def reduce(self, cross_series_reducer, *group_by_fields): new_query = copy.deepcopy(self) new_query._cross_series_reducer = cross_series_reducer new_query._group_by_fields = group_by_fields return new_query
[ "Copy the query and add cross-series reduction.\n\n Cross-series reduction combines time series by aggregating their\n data points.\n\n For example, you could request an aggregated time series for each\n combination of project and zone as follows::\n\n from google.cloud.monitoring import enums\n query = query.reduce(enums.Aggregation.Reducer.REDUCE_MEAN,\n 'resource.project_id', 'resource.zone')\n\n :type cross_series_reducer: str or\n :class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`\n :param cross_series_reducer:\n The approach to be used to combine time series. For example:\n :data:`Reducer.REDUCE_MEAN`. See\n :class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`\n and the descriptions of the `supported reducers`_.\n\n :type group_by_fields: strs\n :param group_by_fields:\n Fields to be preserved by the reduction. For example, specifying\n just ``\"resource.zone\"`` will result in one time series per zone.\n The default is to aggregate all of the time series into just one.\n\n :rtype: :class:`Query`\n :returns: The new query object.\n\n .. _supported reducers:\n https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\\\n projects.timeSeries/list#Reducer\n " ]
Please provide a description of the function:def iter(self, headers_only=False, page_size=None): if self._end_time is None: raise ValueError("Query time interval not specified.") params = self._build_query_params(headers_only, page_size) for ts in self._client.list_time_series(**params): yield ts
[ "Yield all time series objects selected by the query.\n\n The generator returned iterates over\n :class:`~google.cloud.monitoring_v3.types.TimeSeries` objects\n containing points ordered from oldest to newest.\n\n Note that the :class:`Query` object itself is an iterable, such that\n the following are equivalent::\n\n for timeseries in query:\n ...\n\n for timeseries in query.iter():\n ...\n\n :type headers_only: bool\n :param headers_only:\n Whether to omit the point data from the time series objects.\n\n :type page_size: int\n :param page_size:\n (Optional) The maximum number of points in each page of results\n from this request. Non-positive values are ignored. Defaults\n to a sensible value set by the API.\n\n :raises: :exc:`ValueError` if the query time interval has not been\n specified.\n " ]
Please provide a description of the function:def _build_query_params(self, headers_only=False, page_size=None): params = {"name": self._project_path, "filter_": self.filter} params["interval"] = types.TimeInterval() params["interval"].end_time.FromDatetime(self._end_time) if self._start_time: params["interval"].start_time.FromDatetime(self._start_time) if ( self._per_series_aligner or self._alignment_period_seconds or self._cross_series_reducer or self._group_by_fields ): params["aggregation"] = types.Aggregation( per_series_aligner=self._per_series_aligner, cross_series_reducer=self._cross_series_reducer, group_by_fields=self._group_by_fields, alignment_period={"seconds": self._alignment_period_seconds}, ) if headers_only: params["view"] = enums.ListTimeSeriesRequest.TimeSeriesView.HEADERS else: params["view"] = enums.ListTimeSeriesRequest.TimeSeriesView.FULL if page_size is not None: params["page_size"] = page_size return params
[ "Return key-value pairs for the list_time_series API call.\n\n :type headers_only: bool\n :param headers_only:\n Whether to omit the point data from the\n :class:`~google.cloud.monitoring_v3.types.TimeSeries` objects.\n\n :type page_size: int\n :param page_size:\n (Optional) The maximum number of points in each page of results\n from this request. Non-positive values are ignored. Defaults\n to a sensible value set by the API.\n " ]
Please provide a description of the function:def from_api_repr(cls, resource, client): project = cls(project_id=resource["projectId"], client=client) project.set_properties_from_api_repr(resource) return project
[ "Factory: construct a project given its API representation.\n\n :type resource: dict\n :param resource: project resource representation returned from the API\n\n :type client: :class:`google.cloud.resource_manager.client.Client`\n :param client: The Client used with this project.\n\n :rtype: :class:`google.cloud.resource_manager.project.Project`\n :returns: The project created.\n " ]
Please provide a description of the function:def set_properties_from_api_repr(self, resource): self.name = resource.get("name") self.number = resource["projectNumber"] self.labels = resource.get("labels", {}) self.status = resource["lifecycleState"] if "parent" in resource: self.parent = resource["parent"]
[ "Update specific properties from its API representation." ]
Please provide a description of the function:def create(self, client=None): client = self._require_client(client) data = {"projectId": self.project_id, "name": self.name, "labels": self.labels} resp = client._connection.api_request( method="POST", path="/projects", data=data ) self.set_properties_from_api_repr(resource=resp)
[ "API call: create the project via a ``POST`` request.\n\n See\n https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/create\n\n :type client: :class:`google.cloud.resource_manager.client.Client` or\n :data:`NoneType <types.NoneType>`\n :param client: the client to use. If not passed, falls back to\n the client stored on the current project.\n " ]
Please provide a description of the function:def update(self, client=None): client = self._require_client(client) data = {"name": self.name, "labels": self.labels, "parent": self.parent} resp = client._connection.api_request(method="PUT", path=self.path, data=data) self.set_properties_from_api_repr(resp)
[ "API call: update the project via a ``PUT`` request.\n\n See\n https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/update\n\n :type client: :class:`google.cloud.resource_manager.client.Client` or\n :data:`NoneType <types.NoneType>`\n :param client: the client to use. If not passed, falls back to\n the client stored on the current project.\n " ]
Please provide a description of the function:def delete(self, client=None, reload_data=False): client = self._require_client(client) client._connection.api_request(method="DELETE", path=self.path) # If the reload flag is set, reload the project. if reload_data: self.reload()
[ "API call: delete the project via a ``DELETE`` request.\n\n See\n https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/delete\n\n This actually changes the status (``lifecycleState``) from ``ACTIVE``\n to ``DELETE_REQUESTED``.\n Later (it's not specified when), the project will move into the\n ``DELETE_IN_PROGRESS`` state, which means the deleting has actually\n begun.\n\n :type client: :class:`google.cloud.resource_manager.client.Client` or\n :data:`NoneType <types.NoneType>`\n :param client: the client to use. If not passed, falls back to\n the client stored on the current project.\n\n :type reload_data: bool\n :param reload_data: Whether to reload the project with the latest\n state. If you want to get the updated status,\n you'll want this set to :data:`True` as the DELETE\n method doesn't send back the updated project.\n Default: :data:`False`.\n " ]
Please provide a description of the function:def _get_meaning(value_pb, is_list=False): meaning = None if is_list: # An empty list will have no values, hence no shared meaning # set among them. if len(value_pb.array_value.values) == 0: return None # We check among all the meanings, some of which may be None, # the rest which may be enum/int values. all_meanings = [ _get_meaning(sub_value_pb) for sub_value_pb in value_pb.array_value.values ] unique_meanings = set(all_meanings) if len(unique_meanings) == 1: # If there is a unique meaning, we preserve it. meaning = unique_meanings.pop() else: # We know len(value_pb.array_value.values) > 0. # If the meaning is not unique, just return all of them. meaning = all_meanings elif value_pb.meaning: # Simple field (int32). meaning = value_pb.meaning return meaning
[ "Get the meaning from a protobuf value.\n\n :type value_pb: :class:`.entity_pb2.Value`\n :param value_pb: The protobuf value to be checked for an\n associated meaning.\n\n :type is_list: bool\n :param is_list: Boolean indicating if the ``value_pb`` contains\n a list value.\n\n :rtype: int\n :returns: The meaning for the ``value_pb`` if one is set, else\n :data:`None`. For a list value, if there are disagreeing\n means it just returns a list of meanings. If all the\n list meanings agree, it just condenses them.\n " ]
Please provide a description of the function:def entity_from_protobuf(pb): key = None if pb.HasField("key"): # Message field (Key) key = key_from_protobuf(pb.key) entity_props = {} entity_meanings = {} exclude_from_indexes = [] for prop_name, value_pb in _property_tuples(pb): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value # Check if the property has an associated meaning. is_list = isinstance(value, list) meaning = _get_meaning(value_pb, is_list=is_list) if meaning is not None: entity_meanings[prop_name] = (meaning, value) # Check if ``value_pb`` was excluded from index. Lists need to be # special-cased and we require all ``exclude_from_indexes`` values # in a list agree. if is_list and len(value) > 0: exclude_values = set( value_pb.exclude_from_indexes for value_pb in value_pb.array_value.values ) if len(exclude_values) != 1: raise ValueError( "For an array_value, subvalues must either " "all be indexed or all excluded from " "indexes." ) if exclude_values.pop(): exclude_from_indexes.append(prop_name) else: if value_pb.exclude_from_indexes: exclude_from_indexes.append(prop_name) entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes) entity.update(entity_props) entity._meanings.update(entity_meanings) return entity
[ "Factory method for creating an entity based on a protobuf.\n\n The protobuf should be one returned from the Cloud Datastore\n Protobuf API.\n\n :type pb: :class:`.entity_pb2.Entity`\n :param pb: The Protobuf representing the entity.\n\n :rtype: :class:`google.cloud.datastore.entity.Entity`\n :returns: The entity derived from the protobuf.\n " ]
Please provide a description of the function:def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False): if name not in entity._meanings: return meaning, orig_value = entity._meanings[name] # Only add the meaning back to the protobuf if the value is # unchanged from when it was originally read from the API. if orig_value is not value: return # For lists, we set meaning on each sub-element. if is_list: if not isinstance(meaning, list): meaning = itertools.repeat(meaning) val_iter = six.moves.zip(value_pb.array_value.values, meaning) for sub_value_pb, sub_meaning in val_iter: if sub_meaning is not None: sub_value_pb.meaning = sub_meaning else: value_pb.meaning = meaning
[ "Add meaning information (from an entity) to a protobuf.\n\n :type entity: :class:`google.cloud.datastore.entity.Entity`\n :param entity: The entity to be turned into a protobuf.\n\n :type name: str\n :param name: The name of the property.\n\n :type value: object\n :param value: The current value stored as property ``name``.\n\n :type value_pb: :class:`.entity_pb2.Value`\n :param value_pb: The protobuf value to add meaning / meanings to.\n\n :type is_list: bool\n :param is_list: (Optional) Boolean indicating if the ``value`` is\n a list value.\n " ]
Please provide a description of the function:def entity_to_protobuf(entity): entity_pb = entity_pb2.Entity() if entity.key is not None: key_pb = entity.key.to_protobuf() entity_pb.key.CopyFrom(key_pb) for name, value in entity.items(): value_is_list = isinstance(value, list) value_pb = _new_value_pb(entity_pb, name) # Set the appropriate value. _set_protobuf_value(value_pb, value) # Add index information to protobuf. if name in entity.exclude_from_indexes: if not value_is_list: value_pb.exclude_from_indexes = True for sub_value in value_pb.array_value.values: sub_value.exclude_from_indexes = True # Add meaning information to protobuf. _set_pb_meaning_from_entity( entity, name, value, value_pb, is_list=value_is_list ) return entity_pb
[ "Converts an entity into a protobuf.\n\n :type entity: :class:`google.cloud.datastore.entity.Entity`\n :param entity: The entity to be turned into a protobuf.\n\n :rtype: :class:`.entity_pb2.Entity`\n :returns: The protobuf representing the entity.\n " ]
Please provide a description of the function:def get_read_options(eventual, transaction_id): if transaction_id is None: if eventual: return datastore_pb2.ReadOptions( read_consistency=datastore_pb2.ReadOptions.EVENTUAL ) else: return datastore_pb2.ReadOptions() else: if eventual: raise ValueError("eventual must be False when in a transaction") else: return datastore_pb2.ReadOptions(transaction=transaction_id)
[ "Validate rules for read options, and assign to the request.\n\n Helper method for ``lookup()`` and ``run_query``.\n\n :type eventual: bool\n :param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG``\n consistency should be used.\n\n :type transaction_id: bytes\n :param transaction_id: A transaction identifier (may be null).\n\n :rtype: :class:`.datastore_pb2.ReadOptions`\n :returns: The read options corresponding to the inputs.\n :raises: :class:`ValueError` if ``eventual`` is ``True`` and the\n ``transaction_id`` is not ``None``.\n " ]