Skip to content

API Reference

Create

Bases: CreateInterface, RESTInterface

Source code in lqs/client/core/create.py
class Create(CreateInterface, RESTInterface):
    service: str = "lqs"

    def __init__(self, app):
        super().__init__(app=app)

    def _api_key(self, **params):
        return self._create_resource("apiKeys", params, models.APIKeyDataResponse)

    def _digestion(self, **params):
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            "digestions",
            params,
            models.DigestionDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _digestion_part(self, **params):
        digestion_id = params.pop("digestion_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"digestions/{digestion_id}/parts",
            params,
            models.DigestionPartDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _digestion_topic(self, **params):
        digestion_id = params.pop("digestion_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"digestions/{digestion_id}/topics",
            params,
            models.DigestionTopicDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _hook(self, **params):
        workflow_id = params.pop("workflow_id")
        return self._create_resource(
            f"workflows/{workflow_id}/hooks", params, models.HookDataResponse
        )

    def _group(self, **params):
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            "groups",
            params,
            models.GroupDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _ingestion(self, **params):
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            "ingestions",
            params,
            models.IngestionDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _ingestion_part(self, **params):
        ingestion_id = params.pop("ingestion_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"ingestions/{ingestion_id}/parts",
            params,
            models.IngestionPartDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _label(self, **params):
        return self._create_resource("labels", params, models.LabelDataResponse)

    def _log(self, **params):
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            "logs",
            params,
            models.LogDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _log_object(self, **params):
        log_id = params.pop("log_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"logs/{log_id}/objects",
            params,
            models.ObjectDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _log_object_part(self, **params):
        log_id = params.pop("log_id")
        object_key = params.pop("object_key")
        return self._create_resource(
            f"logs/{log_id}/objects/{object_key}/parts",
            params,
            models.ObjectPartDataResponse,
        )

    def _object(self, **params):
        raise NotImplementedError

    def _object_part(self, **params):
        raise NotImplementedError

    def _object_store(self, **params):
        return self._create_resource(
            "objectStores", params, models.ObjectStoreDataResponse
        )

    def _query(self, **params):
        log_id = params.pop("log_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"logs/{log_id}/queries",
            params,
            models.QueryDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _record(self, **params):
        topic_id = params.pop("topic_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"topics/{topic_id}/records",
            params,
            models.RecordDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _role(self, **params):
        return self._create_resource("roles", params, models.RoleDataResponse)

    def _tag(self, **params):
        log_id = params.pop("log_id")
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            f"logs/{log_id}/tags",
            params,
            models.TagDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _topic(self, **params):
        lock_token = params.pop("lock_token", None)
        return self._create_resource(
            "topics",
            params,
            models.TopicDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _user(self, **params):
        return self._create_resource("users", params, models.UserDataResponse)

    def _workflow(self, **params):
        return self._create_resource("workflows", params, models.WorkflowDataResponse)

digestion(log_id, name=None, note=None, context=None, locked=False, workflow_id=None, workflow_context=None, state=ProcessState.ready, lock_token=None)

Creates a digestion.

Parameters:

Name Type Description Default
log_id UUID

The ID of the log to which the digestion should be added.

required
name optional

The name of the digestion.

None
context optional

The context to use for the digestion.

None
note optional

A note about the digestion.

None
locked optional

Whether the digestion is locked. Defaults to False.

False
workflow_id optional

The ID of the workflow to use for the digestion.

None
workflow_context optional

The context to use for the workflow.

None
state optional

The state of the digestion. Defaults to ProcessState.ready.

ready

Returns: A data response with the created digestion.

Source code in lqs/interface/core/create.py
def digestion(
    self,
    log_id: UUID,
    name: Optional[str] = None,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    locked: Optional[bool] = False,
    workflow_id: Optional[UUID] = None,
    workflow_context: Optional[dict] = None,
    state: ProcessState = ProcessState.ready,
    lock_token: Optional[str] = None,
):
    """
    Creates a digestion.

    Args:
        log_id: The ID of the log to which the digestion should be added.
        name (optional): The name of the digestion.
        context (optional): The context to use for the digestion.
        note (optional): A note about the digestion.
        locked (optional): Whether the digestion is locked. Defaults to False.
        workflow_id (optional): The ID of the workflow to use for the digestion.
        workflow_context (optional): The context to use for the workflow.
        state (optional): The state of the digestion. Defaults to ProcessState.ready.
    Returns:
        A data response with the created digestion.
    """

    return self._digestion(
        log_id=log_id,
        name=name,
        note=note,
        context=context,
        locked=locked,
        workflow_id=workflow_id,
        workflow_context=workflow_context,
        state=state,
        lock_token=lock_token,
    )

digestion_part(digestion_id, sequence, locked=False, workflow_id=None, workflow_context=None, state=ProcessState.ready, index=None, lock_token=None)

Creates a digestion part.

Parameters:

Name Type Description Default
digestion_id UUID

The ID of the digestion to which the digestion part should be added.

required
sequence int

The sequence of the digestion part.

required
locked optional

Whether the digestion part is locked. Defaults to False.

False
workflow_id optional

The ID of the workflow to use for the digestion part.

None
workflow_context optional

The context to use for the workflow.

None
state optional

The state of the digestion part. Defaults to ProcessState.ready.

ready
index optional

The index of the digestion part.

None

Returns: A data response with the created digestion part.

Source code in lqs/interface/core/create.py
def digestion_part(
    self,
    digestion_id: UUID,
    sequence: int,
    locked: Optional[bool] = False,
    workflow_id: Optional[UUID] = None,
    workflow_context: Optional[dict] = None,
    state: ProcessState = ProcessState.ready,
    index: Optional[List[models.DigestionPartIndex]] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a digestion part.

    Args:
        digestion_id: The ID of the digestion to which the digestion part should be added.
        sequence: The sequence of the digestion part.
        locked (optional): Whether the digestion part is locked. Defaults to False.
        workflow_id (optional): The ID of the workflow to use for the digestion part.
        workflow_context (optional): The context to use for the workflow.
        state (optional): The state of the digestion part. Defaults to ProcessState.ready.
        index (optional): The index of the digestion part.
    Returns:
        A data response with the created digestion part.
    """
    return self._digestion_part(
        digestion_id=digestion_id,
        sequence=sequence,
        locked=locked,
        workflow_id=workflow_id,
        workflow_context=workflow_context,
        state=state,
        index=index,
        lock_token=lock_token,
    )

digestion_topic(digestion_id, topic_id, start_time=None, end_time=None, frequency=None, query_data_filter=None, context_filter=None, lock_token=None)

Creates a digestion topic.

Parameters:

Name Type Description Default
digestion_id UUID

The ID of the digestion to which the digestion topic should be added.

required
topic_id UUID

The ID of the topic to be digested.

required
start_time optional

The start time of the digestion topic.

None
end_time optional

The end time of the digestion topic.

None
frequency optional

The frequency of the digestion topic.

None
query_data_filter optional

The data filter of the digestion topic.

None
context_filter optional

The context filter of the digestion topic.

None

Returns: A data response with the created digestion topic.

Source code in lqs/interface/core/create.py
def digestion_topic(
    self,
    digestion_id: UUID,
    topic_id: UUID,
    start_time: Optional[int] = None,
    end_time: Optional[int] = None,
    frequency: Optional[float] = None,
    query_data_filter: Optional[dict] = None,
    context_filter: Optional[dict] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a digestion topic.

    Args:
        digestion_id: The ID of the digestion to which the digestion topic should be added.
        topic_id: The ID of the topic to be digested.
        start_time (optional): The start time of the digestion topic.
        end_time (optional): The end time of the digestion topic.
        frequency (optional): The frequency of the digestion topic.
        query_data_filter (optional): The data filter of the digestion topic.
        context_filter (optional): The context filter of the digestion topic.
    Returns:
        A data response with the created digestion topic.
    """
    return self._digestion_topic(
        digestion_id=digestion_id,
        topic_id=topic_id,
        start_time=start_time,
        end_time=end_time,
        frequency=frequency,
        query_data_filter=query_data_filter,
        context_filter=context_filter,
        lock_token=lock_token,
    )

group(name, note=None, context=None, locked=False, default_workflow_id=None)

Creates a group.

Parameters:

Name Type Description Default
name str

The name of the group.

required
note optional

A note about the group.

None
context optional

The context to use for the group.

None
locked optional

Whether the group is locked. Defaults to False.

False
default_workflow_id optional

The ID of the default workflow for the group.

None

Returns: A data response with the created group.

Source code in lqs/interface/core/create.py
def group(
    self,
    name: str,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    locked: Optional[bool] = False,
    default_workflow_id: Optional[UUID] = None,
):
    """
    Creates a group.

    Args:
        name: The name of the group.
        note (optional): A note about the group.
        context (optional): The context to use for the group.
        locked (optional): Whether the group is locked. Defaults to False.
        default_workflow_id (optional): The ID of the default workflow for the group.
    Returns:
        A data response with the created group.
    """
    return self._group(
        name=name,
        note=note,
        context=context,
        locked=locked,
        default_workflow_id=default_workflow_id,
    )

hook(workflow_id, trigger_process, trigger_state, name=None, note=None, context=None, managed=False, disabled=False, uri=None, secret=None)

Creates a hook.

Parameters:

Name Type Description Default
workflow_id UUID

The ID of the workflow to which the hook should be added.

required
trigger_process str

The process to trigger.

required
trigger_state str

The state to trigger.

required
name optional

The name of the hook.

None
note optional

A note about the hook.

None
context optional

The context to use for the hook.

None
managed optional

Whether the hook is managed. Defaults to False.

False
disabled optional

Whether the hook is disabled. Defaults to False.

False
uri optional

The URI of the hook.

None
secret optional

The secret of the hook.

None

Returns: A data response with the created hook.

Source code in lqs/interface/core/create.py
def hook(
    self,
    workflow_id: UUID,
    trigger_process: str,
    trigger_state: str,
    name: Optional[str] = None,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    managed: Optional[bool] = False,
    disabled: Optional[bool] = False,
    uri: Optional[str] = None,
    secret: Optional[str] = None,
):
    """
    Creates a hook.

    Args:
        workflow_id: The ID of the workflow to which the hook should be added.
        trigger_process: The process to trigger.
        trigger_state: The state to trigger.
        name (optional): The name of the hook.
        note (optional): A note about the hook.
        context (optional): The context to use for the hook.
        managed (optional): Whether the hook is managed. Defaults to False.
        disabled (optional): Whether the hook is disabled. Defaults to False.
        uri (optional): The URI of the hook.
        secret (optional): The secret of the hook.
    Returns:
        A data response with the created hook.
    """
    return self._hook(
        workflow_id=workflow_id,
        trigger_process=trigger_process,
        trigger_state=trigger_state,
        name=name,
        note=note,
        context=context,
        managed=managed,
        disabled=disabled,
        uri=uri,
        secret=secret,
    )

ingestion(log_id, name=None, note=None, context=None, object_store_id=None, object_key=None, locked=False, workflow_id=None, workflow_context=None, state=ProcessState.ready, lock_token=None)

Creates an ingestion.

Parameters:

Name Type Description Default
log_id UUID

The ID of the log to which the ingestion should be added.

required
name optional

The name of the ingestion.

None
note optional

A note about the ingestion.

None
context optional

The context to use for the ingestion.

None
object_store_id optional

The ID of the object store to use for the ingestion.

None
object_key optional

The key of the object to use for the ingestion.

None
locked optional

Whether the ingestion is locked. Defaults to False.

False
workflow_id optional

The ID of the workflow to use for the ingestion.

None
workflow_context optional

The context to use for the workflow.

None
state optional

The state of the ingestion. Defaults to ProcessState.ready.

ready

Returns: A data response with the created ingestion.

Source code in lqs/interface/core/create.py
def ingestion(
    self,
    log_id: UUID,
    name: Optional[str] = None,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    object_store_id: Optional[UUID] = None,
    object_key: Optional[str] = None,
    locked: Optional[bool] = False,
    workflow_id: Optional[UUID] = None,
    workflow_context: Optional[dict] = None,
    state: ProcessState = ProcessState.ready,
    lock_token: Optional[str] = None,
):
    """
    Creates an ingestion.

    Args:
        log_id: The ID of the log to which the ingestion should be added.
        name (optional): The name of the ingestion.
        note (optional): A note about the ingestion.
        context (optional): The context to use for the ingestion.
        object_store_id (optional): The ID of the object store to use for the ingestion.
        object_key (optional): The key of the object to use for the ingestion.
        locked (optional): Whether the ingestion is locked. Defaults to False.
        workflow_id (optional): The ID of the workflow to use for the ingestion.
        workflow_context (optional): The context to use for the workflow.
        state (optional): The state of the ingestion. Defaults to ProcessState.ready.
    Returns:
        A data response with the created ingestion.
    """
    return self._ingestion(
        log_id=log_id,
        name=name,
        note=note,
        context=context,
        object_store_id=object_store_id,
        object_key=object_key,
        locked=locked,
        workflow_id=workflow_id,
        workflow_context=workflow_context,
        state=state,
        lock_token=lock_token,
    )

ingestion_part(ingestion_id, sequence, source=None, locked=False, workflow_id=None, workflow_context=None, state=ProcessState.ready, index=None, lock_token=None)

Creates an ingestion part.

Parameters:

Name Type Description Default
ingestion_id UUID

The ID of the ingestion to which the ingestion part should be added.

required
sequence int

The sequence of the ingestion part.

required
source optional

The source of the ingestion part.

None
locked optional

Whether the ingestion part is locked. Defaults to False.

False
workflow_id optional

The ID of the workflow to use for the ingestion part.

None
workflow_context optional

The context to use for the workflow.

None
state optional

The state of the ingestion part. Defaults to ProcessState.queued.

ready
index optional

The index of the ingestion part.

None

Returns: A data response with the created ingestion part.

Source code in lqs/interface/core/create.py
def ingestion_part(
    self,
    ingestion_id: UUID,
    sequence: int,
    source: Optional[str] = None,
    locked: Optional[bool] = False,
    workflow_id: Optional[UUID] = None,
    workflow_context: Optional[dict] = None,
    state: ProcessState = ProcessState.ready,
    index: Optional[List[models.IngestionPartIndex]] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates an ingestion part.

    Args:
        ingestion_id: The ID of the ingestion to which the ingestion part should be added.
        sequence: The sequence of the ingestion part.
        source (optional): The source of the ingestion part.
        locked (optional): Whether the ingestion part is locked. Defaults to False.
        workflow_id (optional): The ID of the workflow to use for the ingestion part.
        workflow_context (optional): The context to use for the workflow.
        state (optional): The state of the ingestion part. Defaults to ProcessState.queued.
        index (optional): The index of the ingestion part.
    Returns:
        A data response with the created ingestion part.
    """
    return self._ingestion_part(
        ingestion_id=ingestion_id,
        sequence=sequence,
        source=source,
        locked=locked,
        workflow_id=workflow_id,
        workflow_context=workflow_context,
        state=state,
        index=index,
        lock_token=lock_token,
    )

label(value, note=None)

Creates a label.

Parameters:

Name Type Description Default
value str

The value of the label.

required
note optional

A note about the label.

None

Returns: A data response with the created label.

Source code in lqs/interface/core/create.py
def label(self, value: str, note: Optional[str] = None):
    """
    Creates a label.

    Args:
        value: The value of the label.
        note (optional): A note about the label.
    Returns:
        A data response with the created label.
    """
    return self._label(
        value=value,
        note=note,
    )

log(group_id, name, note=None, context=None, locked=False, default_workflow_id=None, lock_token=None)

Creates a log.

Parameters:

Name Type Description Default
group_id UUID

The ID of the group to which the log should be added.

required
name str

The name of the log.

required
note optional

A note about the log.

None
context optional

The context to use for the log.

None
locked optional

Whether the log is locked. Defaults to False.

False
default_workflow_id optional

The ID of the default workflow for the log.

None

Returns: A data response with the created log.

Source code in lqs/interface/core/create.py
def log(
    self,
    group_id: UUID,
    name: str,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    locked: Optional[bool] = False,
    default_workflow_id: Optional[UUID] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a log.

    Args:
        group_id: The ID of the group to which the log should be added.
        name: The name of the log.
        note (optional): A note about the log.
        context (optional): The context to use for the log.
        locked (optional): Whether the log is locked. Defaults to False.
        default_workflow_id (optional): The ID of the default workflow for the log.
    Returns:
        A data response with the created log.
    """
    return self._log(
        group_id=group_id,
        name=name,
        note=note,
        context=context,
        locked=locked,
        default_workflow_id=default_workflow_id,
        lock_token=lock_token,
    )

log_object(key, log_id, content_type=None, lock_token=None)

Creates a log object.

Parameters:

Name Type Description Default
key str

The key of the log object.

required
log_id UUID

The ID of the log to which the log object should be added.

required
content_type optional

The content type of the log object.

None

Returns: A data response with the created log object.

Source code in lqs/interface/core/create.py
def log_object(
    self,
    key: str,
    log_id: UUID,
    content_type: Optional[str] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a log object.

    Args:
        key: The key of the log object.
        log_id: The ID of the log to which the log object should be added.
        content_type (optional): The content type of the log object.
    Returns:
        A data response with the created log object.
    """
    return self._log_object(
        key=key,
        log_id=log_id,
        content_type=content_type,
        lock_token=lock_token,
    )

log_object_part(object_key, size, log_id, part_number=None)

Creates a log object part.

Parameters:

Name Type Description Default
object_key str

The key of the log object to which the log object part should be added.

required
size int

The size of the log object part.

required
log_id UUID

The ID of the log to which the log object part should be added.

required
part_number optional

The part number of the log object part.

None

Returns: A data response with the created log object part.

Source code in lqs/interface/core/create.py
def log_object_part(
    self,
    object_key: str,
    size: int,
    log_id: UUID,
    part_number: Optional[int] = None,
):
    """
    Creates a log object part.

    Args:
        object_key: The key of the log object to which the log object part should be added.
        size: The size of the log object part.
        log_id: The ID of the log to which the log object part should be added.
        part_number (optional): The part number of the log object part.
    Returns:
        A data response with the created log object part.
    """
    return self._log_object_part(
        object_key=object_key,
        log_id=log_id,
        part_number=part_number,
        size=size,
    )

object(key, object_store_id, content_type=None)

Creates an object.

Parameters:

Name Type Description Default
key str

The key of the object.

required
object_store_id UUID

The ID of the object store to which the object should be added.

required
content_type optional

The content type of the object.

None

Returns: A data response with the created object.

Source code in lqs/interface/core/create.py
def object(
    self,
    key: str,
    object_store_id: UUID,
    content_type: Optional[str] = None,
):
    """
    Creates an object.

    Args:
        key: The key of the object.
        object_store_id: The ID of the object store to which the object should be added.
        content_type (optional): The content type of the object.
    Returns:
        A data response with the created object.
    """
    return self._object(
        key=key,
        object_store_id=object_store_id,
        content_type=content_type,
    )

object_part(object_key, size, object_store_id, part_number=None)

Creates an object part.

Parameters:

Name Type Description Default
object_key str

The key of the object to which the object part should be added.

required
size int

The size of the object part.

required
object_store_id UUID

The ID of the object store to which the object part should be added.

required
part_number optional

The part number of the object part.

None

Returns: A data response with the created object part.

Source code in lqs/interface/core/create.py
def object_part(
    self,
    object_key: str,
    size: int,
    object_store_id: UUID,
    part_number: Optional[int] = None,
):
    """
    Creates an object part.

    Args:
        object_key: The key of the object to which the object part should be added.
        size: The size of the object part.
        object_store_id: The ID of the object store to which the object part should be added.
        part_number (optional): The part number of the object part.
    Returns:
        A data response with the created object part.
    """
    return self._object_part(
        object_key=object_key,
        object_store_id=object_store_id,
        part_number=part_number,
        size=size,
    )

object_store(bucket_name, access_key_id=None, secret_access_key=None, region_name=None, endpoint_url=None, note=None, context=None, disabled=False)

Creates an object store.

Parameters:

Name Type Description Default
bucket_name str

The name of the bucket.

required
access_key_id optional

The access key ID of the object store.

None
secret_access_key optional

The secret access key of the object store.

None
region_name optional

The region name of the object store.

None
endpoint_url optional

The endpoint URL of the object store.

None
note optional

A note about the object store.

None
context optional

The context to use for the object store.

None
disabled optional

Whether the object store is disabled. Defaults to False.

False

Returns: A data response with the created object store.

Source code in lqs/interface/core/create.py
def object_store(
    self,
    bucket_name: str,
    access_key_id: Optional[str] = None,
    secret_access_key: Optional[str] = None,
    region_name: Optional[str] = None,
    endpoint_url: Optional[str] = None,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    disabled: Optional[bool] = False,
):
    """
    Creates an object store.

    Args:
        bucket_name: The name of the bucket.
        access_key_id (optional): The access key ID of the object store.
        secret_access_key (optional): The secret access key of the object store.
        region_name (optional): The region name of the object store.
        endpoint_url (optional): The endpoint URL of the object store.
        note (optional): A note about the object store.
        context (optional): The context to use for the object store.
        disabled (optional): Whether the object store is disabled. Defaults to False.
    Returns:
        A data response with the created object store.
    """
    return self._object_store(
        bucket_name=bucket_name,
        access_key_id=access_key_id,
        secret_access_key=secret_access_key,
        region_name=region_name,
        endpoint_url=endpoint_url,
        note=note,
        context=context,
        disabled=disabled,
    )

query(log_id, name=None, note=None, context=None, statement=None, parameters=None)

Creates a query.

Parameters:

Name Type Description Default
log_id UUID

The ID of the log to which the query should be added.

required
name optional

The name of the query.

None
note optional

A note about the query.

None
context optional

The context to use for the query.

None
statement optional

The statement of the query.

None
parameters optional

The parameters of the query.

None

Returns: A data response with the created query.

Source code in lqs/interface/core/create.py
def query(
    self,
    log_id: UUID,
    name: Optional[str] = None,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    statement: Optional[str] = None,
    parameters: Optional[dict] = None,
):
    """
    Creates a query.

    Args:
        log_id: The ID of the log to which the query should be added.
        name (optional): The name of the query.
        note (optional): A note about the query.
        context (optional): The context to use for the query.
        statement (optional): The statement of the query.
        parameters (optional): The parameters of the query.
    Returns:
        A data response with the created query.
    """
    return self._query(
        log_id=log_id,
        name=name,
        note=note,
        context=context,
        statement=statement,
        parameters=parameters,
    )

record(timestamp, topic_id, note=None, context=None, locked=False, query_data=None, auxiliary_data=None, data_offset=None, data_length=None, chunk_compression=None, chunk_offset=None, chunk_length=None, source=None, lock_token=None)

Creates a record.

Parameters:

Name Type Description Default
timestamp int

The timestamp of the record.

required
topic_id UUID

The ID of the topic to which the record should be added.

required
note optional

A note about the record.

None
context optional

The context to use for the record.

None
locked optional

Whether the record is locked. Defaults to False.

False
query_data optional

A JSON representation of the record's message data which is queryable.

None
auxiliary_data optional

A JSON representation of the record's message data which is not queryable.

None
data_offset optional

The data offset of the record.

None
data_length optional

The data length of the record.

None
chunk_compression optional

The chunk compression of the record.

None
chunk_offset optional

The chunk offset of the record.

None
chunk_length optional

The chunk length of the record.

None
source optional

The source of the record.

None

Returns: A data response with the created record.

Source code in lqs/interface/core/create.py
def record(
    self,
    timestamp: int,
    topic_id: UUID,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    locked: Optional[bool] = False,
    query_data: Optional[dict] = None,
    auxiliary_data: Optional[dict] = None,
    data_offset: Optional[int] = None,
    data_length: Optional[int] = None,
    chunk_compression: Optional[str] = None,
    chunk_offset: Optional[int] = None,
    chunk_length: Optional[int] = None,
    source: Optional[str] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a record.

    Args:
        timestamp: The timestamp of the record.
        topic_id: The ID of the topic to which the record should be added.
        note (optional): A note about the record.
        context (optional): The context to use for the record.
        locked (optional): Whether the record is locked. Defaults to False.
        query_data (optional): A JSON representation of the record's message data which is queryable.
        auxiliary_data (optional): A JSON representation of the record's message data which is not queryable.

        data_offset (optional): The data offset of the record.
        data_length (optional): The data length of the record.
        chunk_compression (optional): The chunk compression of the record.
        chunk_offset (optional): The chunk offset of the record.
        chunk_length (optional): The chunk length of the record.
        source (optional): The source of the record.
    Returns:
        A data response with the created record.
    """
    return self._record(
        timestamp=timestamp,
        topic_id=topic_id,
        note=note,
        context=context,
        locked=locked,
        query_data=query_data,
        auxiliary_data=auxiliary_data,
        data_offset=data_offset,
        data_length=data_length,
        chunk_compression=chunk_compression,
        chunk_offset=chunk_offset,
        chunk_length=chunk_length,
        source=source,
        lock_token=lock_token,
    )

tag(label_id, log_id, topic_id=None, note=None, context=None, start_time=None, end_time=None, lock_token=None)

Creates a tag.

Parameters:

Name Type Description Default
label_id UUID

The ID of the label to which the tag should be added.

required
log_id UUID

The ID of the log to which the tag should be added.

required
topic_id optional

The ID of the topic to which the tag should be added.

None
note optional

A note about the tag.

None
context optional

The context to use for the tag.

None
start_time optional

The start time of the tag.

None
end_time optional

The end time of the tag.

None

Returns: A data response with the created tag.

Source code in lqs/interface/core/create.py
def tag(
    self,
    label_id: UUID,
    log_id: UUID,
    topic_id: Optional[UUID] = None,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    start_time: Optional[int] = None,
    end_time: Optional[int] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a tag.

    Args:
        label_id: The ID of the label to which the tag should be added.
        log_id: The ID of the log to which the tag should be added.
        topic_id (optional): The ID of the topic to which the tag should be added.
        note (optional): A note about the tag.
        context (optional): The context to use for the tag.
        start_time (optional): The start time of the tag.
        end_time (optional): The end time of the tag.
    Returns:
        A data response with the created tag.
    """
    return self._tag(
        label_id=label_id,
        log_id=log_id,
        topic_id=topic_id,
        note=note,
        context=context,
        start_time=start_time,
        end_time=end_time,
        lock_token=lock_token,
    )

topic(log_id, name, note=None, context=None, associated_topic_id=None, locked=False, strict=False, type_name=None, type_encoding=None, type_data=None, type_schema=None, lock_token=None)

Creates a topic.

Parameters:

Name Type Description Default
log_id UUID

The ID of the log to which the topic should be added.

required
name str

The name of the topic.

required
note optional

A note about the topic.

None
context optional

The context to use for the topic.

None
associated_topic_id optional

The ID of the associated topic.

None
locked optional

Whether the topic is locked. Defaults to False.

False
strict optional

Whether the topic is strict. Defaults to False.

False
type_name optional

The type name of the topic.

None
type_encoding optional

The type encoding of the topic.

None
type_data optional

The type data of the topic.

None
type_schema optional

The type schema of the topic.

None

Returns: A data response with the created topic.

Source code in lqs/interface/core/create.py
def topic(
    self,
    log_id: UUID,
    name: str,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    associated_topic_id: Optional[UUID] = None,
    locked: Optional[bool] = False,
    strict: Optional[bool] = False,
    type_name: Optional[str] = None,
    type_encoding: Optional[str] = None,
    type_data: Optional[str] = None,
    type_schema: Optional[dict] = None,
    lock_token: Optional[str] = None,
):
    """
    Creates a topic.

    Args:
        log_id: The ID of the log to which the topic should be added.
        name: The name of the topic.
        note (optional): A note about the topic.
        context (optional): The context to use for the topic.
        associated_topic_id (optional): The ID of the associated topic.
        locked (optional): Whether the topic is locked. Defaults to False.

        strict (optional): Whether the topic is strict. Defaults to False.
        type_name (optional): The type name of the topic.
        type_encoding (optional): The type encoding of the topic.
        type_data (optional): The type data of the topic.
        type_schema (optional): The type schema of the topic.
    Returns:
        A data response with the created topic.
    """
    return self._topic(
        log_id=log_id,
        name=name,
        note=note,
        context=context,
        associated_topic_id=associated_topic_id,
        locked=locked,
        strict=strict,
        type_name=type_name,
        type_encoding=type_encoding,
        type_data=type_data,
        type_schema=type_schema,
        lock_token=lock_token,
    )

workflow(name, note=None, context=None, default=False, disabled=False, managed=False, context_schema=None)

Creates a workflow.

Parameters:

Name Type Description Default
name str

The name of the workflow.

required
note optional

A note about the workflow.

None
context optional

The context to use for the workflow.

None
default optional

Whether the workflow is default. Defaults to False.

False
disabled optional

Whether the workflow is disabled. Defaults to False.

False
managed optional

Whether the workflow is managed. Defaults to False.

False
context_schema optional

The context schema of the workflow.

None

Returns: A data response with the created workflow.

Source code in lqs/interface/core/create.py
def workflow(
    self,
    name: str,
    note: Optional[str] = None,
    context: Optional[dict] = None,
    default: Optional[bool] = False,
    disabled: Optional[bool] = False,
    managed: Optional[bool] = False,
    context_schema: Optional[dict] = None,
):
    """
    Creates a workflow.

    Args:
        name: The name of the workflow.
        note (optional): A note about the workflow.
        context (optional): The context to use for the workflow.

        default (optional): Whether the workflow is default. Defaults to False.
        disabled (optional): Whether the workflow is disabled. Defaults to False.
        managed (optional): Whether the workflow is managed. Defaults to False.
        context_schema (optional): The context schema of the workflow.
    Returns:
        A data response with the created workflow.
    """
    return self._workflow(
        name=name,
        note=note,
        context=context,
        default=default,
        disabled=disabled,
        managed=managed,
        context_schema=context_schema,
    )

List

Bases: ListInterface, RESTInterface

Source code in lqs/client/core/list.py
class List(ListInterface, RESTInterface):
    service: str = "lqs"

    def __init__(self, app):
        super().__init__(app=app)

    def _api_key(self, **params):
        resource_path = "apiKeys" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.APIKeyListResponse
        )
        return result

    def _digestion(self, **params):
        resource_path = "digestions" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.DigestionListResponse
        )
        return result

    def _digestion_part(self, **params):
        digestion_id = params.pop("digestion_id")
        resource_path = f"digestions/{digestion_id}/parts" + self._get_url_param_string(
            params, []
        )
        result = self._get_resource(
            resource_path, response_model=models.DigestionPartListResponse
        )
        return result

    def _digestion_topic(self, **params):
        digestion_id = params.pop("digestion_id")
        resource_path = (
            f"digestions/{digestion_id}/topics" + self._get_url_param_string(params, [])
        )
        result = self._get_resource(
            resource_path, response_model=models.DigestionTopicListResponse
        )
        return result

    def _group(self, **params):
        resource_path = "groups" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.GroupListResponse
        )
        return result

    def _hook(self, **params):
        workflow_id = params.pop("workflow_id")
        resource_path = f"workflows/{workflow_id}/hooks" + self._get_url_param_string(
            params, []
        )
        result = self._get_resource(
            resource_path, response_model=models.HookListResponse
        )
        return result

    def _ingestion(self, **params):
        resource_path = "ingestions" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.IngestionListResponse
        )
        return result

    def _ingestion_part(self, **params):
        ingestion_id = params.pop("ingestion_id")
        resource_path = f"ingestions/{ingestion_id}/parts" + self._get_url_param_string(
            params, []
        )
        result = self._get_resource(
            resource_path, response_model=models.IngestionPartListResponse
        )
        return result

    def _label(self, **params):
        resource_path = "labels" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.LabelListResponse
        )
        return result

    def _log(self, **params):
        resource_path = "logs" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.LogListResponse
        )
        return result

    def _log_object(self, **params):
        log_id = params.pop("log_id")
        resource_path = f"logs/{log_id}/objects" + self._get_url_param_string(
            params, []
        )
        result = self._get_resource(
            resource_path, response_model=models.ObjectListResponse
        )
        return result

    def _log_object_part(self, **params):
        log_id = params.pop("log_id")
        object_key = params.pop("object_key")
        resource_path = (
            f"logs/{log_id}/objects/{object_key}/parts"
            + self._get_url_param_string(params, [])
        )
        result = self._get_resource(
            resource_path, response_model=models.ObjectPartListResponse
        )
        return result

    def _object(self, **params):
        object_store_id = params.pop("object_store_id")
        resource_path = (
            f"objectStores/{object_store_id}/objects"
            + self._get_url_param_string(params, [])
        )
        result = self._get_resource(
            resource_path, response_model=models.ObjectListResponse
        )
        return result

    def _object_part(self, **params):
        raise NotImplementedError

    def _object_store(self, **params):
        resource_path = "objectStores" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.ObjectStoreListResponse
        )
        return result

    def _query(self, **params):
        log_id = params.pop("log_id")
        resource_path = f"logs/{log_id}/queries" + self._get_url_param_string(
            params, []
        )
        result = self._get_resource(
            resource_path, response_model=models.QueryListResponse
        )
        return result

    def _record(self, **params):
        topic_id = params.pop("topic_id")
        resource_path = f"topics/{topic_id}/records" + self._get_url_param_string(
            params, []
        )
        result = self._get_resource(
            resource_path, response_model=models.RecordListResponse
        )
        return result

    def _role(self, **params):
        resource_path = "roles" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.RoleListResponse
        )
        return result

    def _tag(self, **params):
        log_id = params.pop("log_id")
        resource_path = f"logs/{log_id}/tags" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.TagListResponse
        )
        return result

    def _topic(self, **params):
        resource_path = "topics" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.TopicListResponse
        )
        return result

    def _user(self, **params):
        resource_path = "users" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.UserListResponse
        )
        return result

    def _workflow(self, **params):
        resource_path = "workflows" + self._get_url_param_string(params, [])
        result = self._get_resource(
            resource_path, response_model=models.WorkflowListResponse
        )
        return result

digestion(id=None, group_id=None, log_id=None, workflow_id=None, workflow_id_null=None, state=None, name=None, name_like=None, progress_null=None, progress_gte=None, progress_lte=None, error_like=None, note_like=None, context_filter=None, workflow_context_filter=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists digestions.

Source code in lqs/interface/core/list.py
def digestion(
    self,
    id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    workflow_id: Optional[UUID] = None,
    workflow_id_null: Optional[bool] = None,
    state: Optional[str] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    progress_null: Optional[bool] = None,
    progress_gte: Optional[float] = None,
    progress_lte: Optional[float] = None,
    error_like: Optional[str] = None,
    note_like: Optional[str] = None,
    context_filter: Optional[str] = None,
    workflow_context_filter: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists digestions.
    """
    return self._digestion(
        id=id,
        group_id=group_id,
        log_id=log_id,
        workflow_id=workflow_id,
        workflow_id_null=workflow_id_null,
        state=state,
        name=name,
        name_like=name_like,
        progress_null=progress_null,
        progress_gte=progress_gte,
        progress_lte=progress_lte,
        error_like=error_like,
        note_like=note_like,
        context_filter=context_filter,
        workflow_context_filter=workflow_context_filter,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

digestion_part(id=None, group_id=None, log_id=None, sequence=None, digestion_id=None, workflow_id=None, workflow_id_null=None, state=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists digestion parts.

Source code in lqs/interface/core/list.py
def digestion_part(
    self,
    id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    sequence: Optional[int] = None,
    digestion_id: Optional[UUID] = None,
    workflow_id: Optional[UUID] = None,
    workflow_id_null: Optional[bool] = None,
    state: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists digestion parts.
    """
    return self._digestion_part(
        id=id,
        group_id=group_id,
        log_id=log_id,
        sequence=sequence,
        digestion_id=digestion_id,
        workflow_id=workflow_id,
        workflow_id_null=workflow_id_null,
        state=state,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

digestion_topic(id=None, digestion_id=None, group_id=None, log_id=None, topic_id=None, start_time_null=None, start_time_gte=None, start_time_lte=None, end_time_null=None, end_time_gte=None, end_time_lte=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists digestion topics.

Source code in lqs/interface/core/list.py
def digestion_topic(
    self,
    id: Optional[UUID] = None,
    digestion_id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    topic_id: Optional[UUID] = None,
    start_time_null: Optional[bool] = None,
    start_time_gte: Optional[models.Int64] = None,
    start_time_lte: Optional[models.Int64] = None,
    end_time_null: Optional[bool] = None,
    end_time_gte: Optional[models.Int64] = None,
    end_time_lte: Optional[models.Int64] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists digestion topics.
    """
    return self._digestion_topic(
        id=id,
        group_id=group_id,
        log_id=log_id,
        digestion_id=digestion_id,
        topic_id=topic_id,
        start_time_null=start_time_null,
        start_time_gte=start_time_gte,
        start_time_lte=start_time_lte,
        end_time_null=end_time_null,
        end_time_gte=end_time_gte,
        end_time_lte=end_time_lte,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

group(id=None, name=None, name_like=None, default_workflow_id=None, default_workflow_id_null=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists groups.

Source code in lqs/interface/core/list.py
def group(
    self,
    id: Optional[UUID] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    default_workflow_id: Optional[UUID] = None,
    default_workflow_id_null: Optional[bool] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists groups.
    """
    return self._group(
        id=id,
        name=name,
        name_like=name_like,
        default_workflow_id=default_workflow_id,
        default_workflow_id_null=default_workflow_id_null,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

hook(id=None, workflow_id=None, trigger_process=None, trigger_state=None, name=None, name_like=None, note_like=None, managed=None, disabled=None, uri=None, uri_like=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists hooks.

Source code in lqs/interface/core/list.py
def hook(
    self,
    id: Optional[UUID] = None,
    workflow_id: Optional[UUID] = None,
    trigger_process: Optional[str] = None,
    trigger_state: Optional[str] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    note_like: Optional[str] = None,
    managed: Optional[bool] = None,
    disabled: Optional[bool] = None,
    uri: Optional[str] = None,
    uri_like: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists hooks.
    """
    return self._hook(
        id=id,
        workflow_id=workflow_id,
        trigger_process=trigger_process,
        trigger_state=trigger_state,
        name=name,
        name_like=name_like,
        note_like=note_like,
        uri=uri,
        uri_like=uri_like,
        managed=managed,
        disabled=disabled,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

ingestion(id=None, group_id=None, log_id=None, object_store_id=None, name=None, name_like=None, object_key=None, object_key_like=None, workflow_id=None, workflow_id_null=None, workflow_context_filter=None, state=None, progress_null=None, progress_gte=None, progress_lte=None, error_like=None, note_like=None, context_filter=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists ingestions.

Source code in lqs/interface/core/list.py
def ingestion(
    self,
    id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    object_store_id: Optional[UUID] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    object_key: Optional[str] = None,
    object_key_like: Optional[str] = None,
    workflow_id: Optional[UUID] = None,
    workflow_id_null: Optional[bool] = None,
    workflow_context_filter: Optional[str] = None,
    state: Optional[str] = None,
    progress_null: Optional[bool] = None,
    progress_gte: Optional[float] = None,
    progress_lte: Optional[float] = None,
    error_like: Optional[str] = None,
    note_like: Optional[str] = None,
    context_filter: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists ingestions.
    """
    return self._ingestion(
        id=id,
        group_id=group_id,
        log_id=log_id,
        object_store_id=object_store_id,
        name=name,
        name_like=name_like,
        object_key=object_key,
        object_key_like=object_key_like,
        workflow_id=workflow_id,
        workflow_id_null=workflow_id_null,
        workflow_context_filter=workflow_context_filter,
        state=state,
        progress_null=progress_null,
        progress_gte=progress_gte,
        progress_lte=progress_lte,
        error_like=error_like,
        note_like=note_like,
        context_filter=context_filter,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

ingestion_part(id=None, group_id=None, log_id=None, ingestion_id=None, sequence=None, source=None, workflow_id=None, workflow_id_null=None, state=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists ingestion parts.

Source code in lqs/interface/core/list.py
def ingestion_part(
    self,
    id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    ingestion_id: Optional[UUID] = None,
    sequence: Optional[int] = None,
    source: Optional[str] = None,
    workflow_id: Optional[UUID] = None,
    workflow_id_null: Optional[bool] = None,
    state: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists ingestion parts.
    """
    return self._ingestion_part(
        id=id,
        group_id=group_id,
        log_id=log_id,
        sequence=sequence,
        ingestion_id=ingestion_id,
        workflow_id=workflow_id,
        workflow_id_null=workflow_id_null,
        state=state,
        source=source,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

label(id=None, value=None, value_like=None, note_like=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists labels.

Source code in lqs/interface/core/list.py
def label(
    self,
    id: Optional[UUID] = None,
    value: Optional[str] = None,
    value_like: Optional[str] = None,
    note_like: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists labels.
    """
    return self._label(
        id=id,
        value=value,
        value_like=value_like,
        note_like=note_like,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

log(id=None, group_id=None, default_workflow_id=None, default_workflow_id_null=None, name=None, name_like=None, locked=None, note_like=None, context_filter=None, time_adjustment_null=None, time_adjustment_gte=None, time_adjustment_lte=None, start_time_null=None, start_time_gte=None, start_time_lte=None, end_time_null=None, end_time_gte=None, end_time_lte=None, record_size_gte=None, record_size_lte=None, record_count_gte=None, record_count_lte=None, object_size_gte=None, object_size_lte=None, object_count_gte=None, object_count_lte=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists logs.

Source code in lqs/interface/core/list.py
def log(
    self,
    id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    default_workflow_id: Optional[UUID] = None,
    default_workflow_id_null: Optional[bool] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    locked: Optional[bool] = None,
    note_like: Optional[str] = None,
    context_filter: Optional[str] = None,
    time_adjustment_null: Optional[bool] = None,
    time_adjustment_gte: Optional[models.Int64] = None,
    time_adjustment_lte: Optional[models.Int64] = None,
    start_time_null: Optional[bool] = None,
    start_time_gte: Optional[models.Int64] = None,
    start_time_lte: Optional[models.Int64] = None,
    end_time_null: Optional[bool] = None,
    end_time_gte: Optional[models.Int64] = None,
    end_time_lte: Optional[models.Int64] = None,
    record_size_gte: Optional[int] = None,
    record_size_lte: Optional[int] = None,
    record_count_gte: Optional[int] = None,
    record_count_lte: Optional[int] = None,
    object_size_gte: Optional[int] = None,
    object_size_lte: Optional[int] = None,
    object_count_gte: Optional[int] = None,
    object_count_lte: Optional[int] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists logs.
    """
    return self._log(
        id=id,
        group_id=group_id,
        default_workflow_id=default_workflow_id,
        default_workflow_id_null=default_workflow_id_null,
        name=name,
        name_like=name_like,
        locked=locked,
        note_like=note_like,
        context_filter=context_filter,
        time_adjustment_null=time_adjustment_null,
        time_adjustment_gte=time_adjustment_gte,
        time_adjustment_lte=time_adjustment_lte,
        start_time_null=start_time_null,
        start_time_gte=start_time_gte,
        start_time_lte=start_time_lte,
        end_time_null=end_time_null,
        end_time_gte=end_time_gte,
        end_time_lte=end_time_lte,
        record_size_gte=record_size_gte,
        record_size_lte=record_size_lte,
        record_count_gte=record_count_gte,
        record_count_lte=record_count_lte,
        object_size_gte=object_size_gte,
        object_size_lte=object_size_lte,
        object_count_gte=object_count_gte,
        object_count_lte=object_count_lte,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

log_object(log_id, processing=False, max_keys=None, prefix=None, start_after=None, delimiter=None, continuation_token=None)

Lists log objects.

Source code in lqs/interface/core/list.py
def log_object(
    self,
    log_id: UUID,
    processing: Optional[bool] = False,
    max_keys: Optional[int] = None,
    prefix: Optional[str] = None,
    start_after: Optional[str] = None,
    delimiter: Optional[str] = None,
    continuation_token: Optional[str] = None,
):
    """
    Lists log objects.
    """
    return self._log_object(
        log_id=log_id,
        processing=processing,
        max_keys=max_keys,
        prefix=prefix,
        start_after=start_after,
        delimiter=delimiter,
        continuation_token=continuation_token,
    )

log_object_part(object_key, log_id, max_parts=None, part_number_marker=None)

Lists log object parts.

Source code in lqs/interface/core/list.py
def log_object_part(
    self,
    object_key: str,
    log_id: UUID,
    max_parts: Optional[int] = None,
    part_number_marker: Optional[int] = None,
):
    """
    Lists log object parts.
    """
    return self._log_object_part(
        log_id=log_id,
        object_key=object_key,
        max_parts=max_parts,
        part_number_marker=part_number_marker,
    )

object(object_store_id, processing=False, max_keys=None, prefix=None, start_after=None, delimiter=None, continuation_token=None)

Lists objects.

Source code in lqs/interface/core/list.py
def object(
    self,
    object_store_id: UUID,
    processing: Optional[bool] = False,
    max_keys: Optional[int] = None,
    prefix: Optional[str] = None,
    start_after: Optional[str] = None,
    delimiter: Optional[str] = None,
    continuation_token: Optional[str] = None,
):
    """
    Lists objects.
    """
    return self._object(
        object_store_id=object_store_id,
        processing=processing,
        max_keys=max_keys,
        prefix=prefix,
        start_after=start_after,
        delimiter=delimiter,
        continuation_token=continuation_token,
    )

object_part(object_key, object_store_id, max_parts=None, part_number_marker=None)

Lists object parts.

Source code in lqs/interface/core/list.py
def object_part(
    self,
    object_key: str,
    object_store_id: UUID,
    max_parts: Optional[int] = None,
    part_number_marker: Optional[int] = None,
):
    """
    Lists object parts.
    """
    return self._object_part(
        object_store_id=object_store_id,
        object_key=object_key,
        max_parts=max_parts,
        part_number_marker=part_number_marker,
    )

object_store(id=None, bucket_name=None, access_key_id=None, region_name=None, endpoint_url=None, note=None, note_like=None, disabled=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists object stores.

Source code in lqs/interface/core/list.py
def object_store(
    self,
    id: Optional[UUID] = None,
    bucket_name: Optional[str] = None,
    access_key_id: Optional[str] = None,
    region_name: Optional[str] = None,
    endpoint_url: Optional[str] = None,
    note: Optional[str] = None,
    note_like: Optional[str] = None,
    disabled: Optional[bool] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists object stores.
    """
    return self._object_store(
        id=id,
        bucket_name=bucket_name,
        access_key_id=access_key_id,
        region_name=region_name,
        endpoint_url=endpoint_url,
        note=note,
        note_like=note_like,
        disabled=disabled,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

query(id=None, log_id=None, name=None, name_like=None, note_like=None, statement=None, statement_like=None, workflow_id=None, workflow_id_null=None, workflow_context_filter=None, context_filter=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists queries.

Source code in lqs/interface/core/list.py
def query(
    self,
    id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    note_like: Optional[str] = None,
    statement: Optional[str] = None,
    statement_like: Optional[str] = None,
    workflow_id: Optional[UUID] = None,
    workflow_id_null: Optional[bool] = None,
    workflow_context_filter: Optional[str] = None,
    context_filter: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists queries.
    """
    return self._query(
        id=id,
        log_id=log_id,
        name=name,
        name_like=name_like,
        note_like=note_like,
        statement=statement,
        statement_like=statement_like,
        workflow_id=workflow_id,
        workflow_id_null=workflow_id_null,
        workflow_context_filter=workflow_context_filter,
        context_filter=context_filter,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

record(topic_id, timestamp=None, log_id=None, ingestion_id=None, workflow_id=None, workflow_id_null=None, error_like=None, note_like=None, source=None, query_data_filter=None, context_filter=None, altered=None, frequency=None, timestamp_gt=None, timestamp_lt=None, timestamp_gte=None, timestamp_lte=None, data_length_gte=None, data_length_lte=None, data_offset_gte=None, data_offset_lte=None, chunk_compression=None, chunk_offset_gte=None, chunk_offset_lte=None, chunk_length_gte=None, chunk_length_lte=None, include_auxiliary_data=False, include_raw_data=False, include_count=True, offset=0, limit=100, order='timestamp', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists records.

Source code in lqs/interface/core/list.py
def record(
    self,
    topic_id: UUID,
    timestamp: Optional[models.Int64] = None,
    log_id: Optional[UUID] = None,
    ingestion_id: Optional[UUID] = None,
    workflow_id: Optional[UUID] = None,
    workflow_id_null: Optional[bool] = None,
    error_like: Optional[str] = None,
    note_like: Optional[str] = None,
    source: Optional[str] = None,
    query_data_filter: Optional[str] = None,
    context_filter: Optional[str] = None,
    altered: Optional[bool] = None,
    frequency: Optional[float] = None,
    timestamp_gt: Optional[models.Int64] = None,
    timestamp_lt: Optional[models.Int64] = None,
    timestamp_gte: Optional[models.Int64] = None,
    timestamp_lte: Optional[models.Int64] = None,
    data_length_gte: Optional[int] = None,
    data_length_lte: Optional[int] = None,
    data_offset_gte: Optional[int] = None,
    data_offset_lte: Optional[int] = None,
    chunk_compression: Optional[str] = None,
    chunk_offset_gte: Optional[int] = None,
    chunk_offset_lte: Optional[int] = None,
    chunk_length_gte: Optional[int] = None,
    chunk_length_lte: Optional[int] = None,
    include_auxiliary_data: Optional[bool] = False,
    include_raw_data: Optional[bool] = False,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "timestamp",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists records.
    """
    return self._record(
        topic_id=topic_id,
        timestamp=timestamp,
        log_id=log_id,
        ingestion_id=ingestion_id,
        workflow_id=workflow_id,
        workflow_id_null=workflow_id_null,
        error_like=error_like,
        note_like=note_like,
        source=source,
        query_data_filter=query_data_filter,
        context_filter=context_filter,
        altered=altered,
        frequency=frequency,
        timestamp_gt=timestamp_gt,
        timestamp_lt=timestamp_lt,
        timestamp_gte=timestamp_gte,
        timestamp_lte=timestamp_lte,
        data_length_gte=data_length_gte,
        data_length_lte=data_length_lte,
        data_offset_gte=data_offset_gte,
        data_offset_lte=data_offset_lte,
        chunk_compression=chunk_compression,
        chunk_offset_gte=chunk_offset_gte,
        chunk_offset_lte=chunk_offset_lte,
        chunk_length_gte=chunk_length_gte,
        chunk_length_lte=chunk_length_lte,
        include_auxiliary_data=include_auxiliary_data,
        include_raw_data=include_raw_data,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

tag(id=None, log_id=None, label_id=None, topic_id=None, note=None, note_like=None, context_filter=None, start_time_null=None, start_time_gte=None, start_time_lte=None, end_time_null=None, end_time_gte=None, end_time_lte=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists tags.

Source code in lqs/interface/core/list.py
def tag(
    self,
    id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    label_id: Optional[UUID] = None,
    topic_id: Optional[UUID] = None,
    note: Optional[str] = None,
    note_like: Optional[str] = None,
    context_filter: Optional[str] = None,
    start_time_null: Optional[bool] = None,
    start_time_gte: Optional[models.Int64] = None,
    start_time_lte: Optional[models.Int64] = None,
    end_time_null: Optional[bool] = None,
    end_time_gte: Optional[models.Int64] = None,
    end_time_lte: Optional[models.Int64] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists tags.
    """
    return self._tag(
        id=id,
        label_id=label_id,
        log_id=log_id,
        topic_id=topic_id,
        note=note,
        note_like=note_like,
        context_filter=context_filter,
        start_time_null=start_time_null,
        start_time_gte=start_time_gte,
        start_time_lte=start_time_lte,
        end_time_null=end_time_null,
        end_time_gte=end_time_gte,
        end_time_lte=end_time_lte,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

topic(id=None, log_id=None, group_id=None, name=None, name_like=None, associated_topic_id=None, latched=None, strict=None, locked=None, context_filter=None, start_time_null=None, start_time_gte=None, start_time_lte=None, end_time_null=None, end_time_gte=None, end_time_lte=None, record_size_gte=None, record_size_lte=None, record_count_gte=None, record_count_lte=None, type_name=None, type_name_like=None, type_encoding=None, type_data=None, type_data_like=None, type_schema_filter=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists topics.

Source code in lqs/interface/core/list.py
def topic(
    self,
    id: Optional[UUID] = None,
    log_id: Optional[UUID] = None,
    group_id: Optional[UUID] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    associated_topic_id: Optional[UUID] = None,
    latched: Optional[bool] = None,
    strict: Optional[bool] = None,
    locked: Optional[bool] = None,
    context_filter: Optional[str] = None,
    start_time_null: Optional[bool] = None,
    start_time_gte: Optional[models.Int64] = None,
    start_time_lte: Optional[models.Int64] = None,
    end_time_null: Optional[bool] = None,
    end_time_gte: Optional[models.Int64] = None,
    end_time_lte: Optional[models.Int64] = None,
    record_size_gte: Optional[int] = None,
    record_size_lte: Optional[int] = None,
    record_count_gte: Optional[int] = None,
    record_count_lte: Optional[int] = None,
    type_name: Optional[str] = None,
    type_name_like: Optional[str] = None,
    type_encoding: Optional[models.TypeEncoding] = None,
    type_data: Optional[str] = None,
    type_data_like: Optional[str] = None,
    type_schema_filter: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists topics.
    """
    return self._topic(
        id=id,
        group_id=group_id,
        name=name,
        name_like=name_like,
        log_id=log_id,
        associated_topic_id=associated_topic_id,
        latched=latched,
        strict=strict,
        locked=locked,
        context_filter=context_filter,
        start_time_null=start_time_null,
        start_time_gte=start_time_gte,
        start_time_lte=start_time_lte,
        end_time_null=end_time_null,
        end_time_gte=end_time_gte,
        end_time_lte=end_time_lte,
        record_size_gte=record_size_gte,
        record_size_lte=record_size_lte,
        record_count_gte=record_count_gte,
        record_count_lte=record_count_lte,
        type_name=type_name,
        type_name_like=type_name_like,
        type_encoding=type_encoding,
        type_data=type_data,
        type_data_like=type_data_like,
        type_schema_filter=type_schema_filter,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

workflow(id=None, name=None, name_like=None, default=None, disabled=None, managed=None, context_schema_filter=None, include_count=True, offset=0, limit=100, order='created_at', sort='ASC', created_by=None, updated_by=None, deleted_by=None, updated_by_null=None, deleted_by_null=None, updated_at_null=None, deleted_at_null=None, created_at_lte=None, updated_at_lte=None, deleted_at_lte=None, created_at_gte=None, updated_at_gte=None, deleted_at_gte=None)

Lists workflows.

Source code in lqs/interface/core/list.py
def workflow(
    self,
    id: Optional[UUID] = None,
    name: Optional[str] = None,
    name_like: Optional[str] = None,
    default: Optional[bool] = None,
    disabled: Optional[bool] = None,
    managed: Optional[bool] = None,
    context_schema_filter: Optional[str] = None,
    include_count: Optional[bool] = True,
    offset: Optional[int] = 0,
    limit: Optional[int] = 100,
    order: Optional[str] = "created_at",
    sort: Optional[str] = "ASC",
    created_by: Optional[UUID] = None,
    updated_by: Optional[UUID] = None,
    deleted_by: Optional[UUID] = None,
    updated_by_null: Optional[bool] = None,
    deleted_by_null: Optional[bool] = None,
    updated_at_null: Optional[bool] = None,
    deleted_at_null: Optional[bool] = None,
    created_at_lte: Optional[datetime] = None,
    updated_at_lte: Optional[datetime] = None,
    deleted_at_lte: Optional[datetime] = None,
    created_at_gte: Optional[datetime] = None,
    updated_at_gte: Optional[datetime] = None,
    deleted_at_gte: Optional[datetime] = None,
):
    """
    Lists workflows.
    """
    return self._workflow(
        id=id,
        name=name,
        name_like=name_like,
        default=default,
        disabled=disabled,
        managed=managed,
        context_schema_filter=context_schema_filter,
        include_count=include_count,
        offset=offset,
        limit=limit,
        order=order,
        sort=sort,
        created_by=created_by,
        updated_by=updated_by,
        deleted_by=deleted_by,
        updated_by_null=updated_by_null,
        deleted_by_null=deleted_by_null,
        updated_at_null=updated_at_null,
        deleted_at_null=deleted_at_null,
        created_at_lte=created_at_lte,
        updated_at_lte=updated_at_lte,
        deleted_at_lte=deleted_at_lte,
        created_at_gte=created_at_gte,
        updated_at_gte=updated_at_gte,
        deleted_at_gte=deleted_at_gte,
    )

Fetch

Bases: FetchInterface, RESTInterface

Source code in lqs/client/core/fetch.py
class Fetch(FetchInterface, RESTInterface):
    service: str = "lqs"

    def __init__(self, app):
        super().__init__(app=app)

    def _api_key(self, **params):
        api_key_id = params.pop("api_key_id")
        result = self._get_resource(
            f"apiKeys/{api_key_id}", response_model=models.APIKeyDataResponse
        )
        return result

    def _digestion(self, **params):
        digestion_id = params.pop("digestion_id")
        result = self._get_resource(
            f"digestions/{digestion_id}", response_model=models.DigestionDataResponse
        )
        return result

    def _digestion_part(self, **kwargs):
        digestion_id = kwargs.pop("digestion_id")
        digestion_part_id = kwargs.pop("digestion_part_id")
        result = self._get_resource(
            f"digestions/{digestion_id}/parts/{digestion_part_id}",
            response_model=models.DigestionPartDataResponse,
        )
        return result

    def _digestion_topic(self, **params):
        digestion_id = params.pop("digestion_id")
        digestion_topic_id = params.pop("digestion_topic_id")
        result = self._get_resource(
            f"digestions/{digestion_id}/topics/{digestion_topic_id}",
            response_model=models.DigestionTopicDataResponse,
        )
        return result

    def _group(self, **params):
        group_id = params.pop("group_id")
        result = self._get_resource(
            f"groups/{group_id}", response_model=models.GroupDataResponse
        )
        return result

    def _hook(self, **params):
        workflow_id = params.pop("workflow_id")
        hook_id = params.pop("hook_id")
        result = self._get_resource(
            f"workflows/{workflow_id}/hooks/{hook_id}",
            response_model=models.HookDataResponse,
        )
        return result

    def _ingestion(self, **params):
        ingestion_id = params.pop("ingestion_id")
        result = self._get_resource(
            f"ingestions/{ingestion_id}", response_model=models.IngestionDataResponse
        )
        return result

    def _ingestion_part(self, **params):
        ingestion_id = params.pop("ingestion_id")
        ingestion_part_id = params.pop("ingestion_part_id")
        result = self._get_resource(
            f"ingestions/{ingestion_id}/parts/{ingestion_part_id}",
            response_model=models.IngestionPartDataResponse,
        )
        return result

    def _label(self, **params):
        label_id = params.pop("label_id")
        result = self._get_resource(
            f"labels/{label_id}", response_model=models.LabelDataResponse
        )
        return result

    def _log(self, **params):
        log_id = params.pop("log_id")
        result = self._get_resource(
            f"logs/{log_id}", response_model=models.LogDataResponse
        )
        return result

    def _log_object(self, **params):
        log_id = params.pop("log_id")
        object_key = params.pop("object_key")

        resource_path = (
            f"logs/{log_id}/objects/{object_key}"
            + self._get_url_param_string(params, [])
        )

        if params.get("redirect", False):
            offset = params.pop("offset", None)
            length = params.pop("length", None)
            headers = {}
            if offset is not None:
                if length is not None:
                    headers["Range"] = f"bytes={offset}-{offset + length - 1}"
                else:
                    if offset < 0:
                        headers["Range"] = f"bytes={offset}"
                    else:
                        headers["Range"] = f"bytes={offset}-"
            elif length is not None:
                headers["Range"] = f"bytes=0-{length - 1}"
            result = self._get_resource(
                resource_path, expected_content_type=None, additional_headers=headers
            )
        else:
            result = self._get_resource(
                resource_path, response_model=models.ObjectDataResponse
            )
        return result

    def _log_object_part(self, **params):
        log_id = params.pop("log_id")
        object_key = params.pop("object_key")
        part_number = params.pop("part_number")
        result = self._get_resource(
            f"logs/{log_id}/objects/{object_key}/parts/{part_number}",
            response_model=models.ObjectPartDataResponse,
        )
        return result

    def _me(self, **params):
        result = self._get_resource("users/me", response_model=models.MeDataResponse)
        return result

    def _object(self, **params):
        object_store_id = params.pop("object_store_id")
        object_key = params.pop("object_key")

        resource_path = (
            f"objectStores/{object_store_id}/objects/{object_key}"
            + self._get_url_param_string(params, [])
        )

        if params.get("redirect", False):
            offset = params.pop("offset", None)
            length = params.pop("length", None)
            headers = {}
            if offset is not None:
                if length is not None:
                    headers["Range"] = f"bytes={offset}-{offset + length - 1}"
                else:
                    if offset < 0:
                        headers["Range"] = f"bytes={offset}"
                    else:
                        headers["Range"] = f"bytes={offset}-"
            elif length is not None:
                headers["Range"] = f"bytes=0-{length - 1}"
            result = self._get_resource(
                resource_path, expected_content_type=None, additional_headers=headers
            )
        else:
            result = self._get_resource(
                resource_path, response_model=models.ObjectDataResponse
            )
        return result

    def _object_part(self, **params):
        raise NotImplementedError

    def _object_store(self, **params):
        object_store_id = params.pop("object_store_id")
        result = self._get_resource(
            f"objectStores/{object_store_id}",
            response_model=models.ObjectStoreDataResponse,
        )
        return result

    def _query(self, **params):
        log_id = params.pop("log_id")
        query_id = params.pop("query_id")
        result = self._get_resource(
            f"logs/{log_id}/queries/{query_id}", response_model=models.QueryDataResponse
        )
        return result

    def _record(self, **params):
        topic_id = params.pop("topic_id")
        timestamp = params.pop("timestamp")
        url_params = self._get_url_param_string(params, [])
        result = self._get_resource(
            f"topics/{topic_id}/records/{timestamp}" + url_params,
            response_model=models.RecordDataResponse,
        )
        return result

    def _role(self, **params):
        role_id = params.pop("role_id")
        result = self._get_resource(
            f"roles/{role_id}", response_model=models.RoleDataResponse
        )
        return result

    def _tag(self, **params):
        log_id = params.pop("log_id")
        tag_id = params.pop("tag_id")
        result = self._get_resource(
            f"logs/{log_id}/tags/{tag_id}", response_model=models.TagDataResponse
        )
        return result

    def _topic(self, **params):
        topic_id = params.pop("topic_id")
        result = self._get_resource(
            f"topics/{topic_id}", response_model=models.TopicDataResponse
        )
        return result

    def _user(self, **params):
        user_id = params.pop("user_id")
        result = self._get_resource(
            f"users/{user_id}", response_model=models.UserDataResponse
        )
        return result

    def _workflow(self, **params):
        workflow_id = params.pop("workflow_id")
        result = self._get_resource(
            f"workflows/{workflow_id}", response_model=models.WorkflowDataResponse
        )
        return result

digestion(digestion_id)

Fetches a digestion by ID.

Parameters:

Name Type Description Default
digestion_id UUID

The ID of the digestion to fetch.

required

Returns: A data response for the digestion.

Source code in lqs/interface/core/fetch.py
def digestion(self, digestion_id: UUID):
    """
    Fetches a digestion by ID.

    Args:
        digestion_id: The ID of the digestion to fetch.
    Returns:
        A data response for the digestion.
    """
    return self._digestion(
        digestion_id=digestion_id,
    )

digestion_part(digestion_part_id, digestion_id=None)

Fetches a digestion part by ID.

Parameters:

Name Type Description Default
digestion_part_id UUID

The ID of the digestion part to fetch.

required
digestion_id Optional[UUID]

The ID of the digestion to which the digestion part belongs.

None

Returns: A data response for the digestion part.

Source code in lqs/interface/core/fetch.py
def digestion_part(
    self, digestion_part_id: UUID, digestion_id: Optional[UUID] = None
):
    """
    Fetches a digestion part by ID.

    Args:
        digestion_part_id: The ID of the digestion part to fetch.
        digestion_id: The ID of the digestion to which the digestion part belongs.
    Returns:
        A data response for the digestion part.
    """
    return self._digestion_part(
        digestion_id=digestion_id,
        digestion_part_id=digestion_part_id,
    )

digestion_topic(digestion_topic_id, digestion_id=None)

Fetches a digestion topic by ID.

Parameters:

Name Type Description Default
digestion_topic_id UUID

The ID of the digestion topic to fetch.

required
digestion_id Optional[UUID]

The ID of the digestion to which the digestion topic belongs.

None

Returns: A data response for the digestion topic.

Source code in lqs/interface/core/fetch.py
def digestion_topic(
    self, digestion_topic_id: UUID, digestion_id: Optional[UUID] = None
):
    """
    Fetches a digestion topic by ID.

    Args:
        digestion_topic_id: The ID of the digestion topic to fetch.
        digestion_id: The ID of the digestion to which the digestion topic belongs.
    Returns:
        A data response for the digestion topic.
    """
    return self._digestion_topic(
        digestion_id=digestion_id,
        digestion_topic_id=digestion_topic_id,
    )

group(group_id)

Fetches a group by ID.

Parameters:

Name Type Description Default
group_id UUID

The ID of the group to fetch.

required

Returns: A data response for the group.

Source code in lqs/interface/core/fetch.py
def group(self, group_id: UUID):
    """
    Fetches a group by ID.

    Args:
        group_id: The ID of the group to fetch.
    Returns:
        A data response for the group.
    """
    return self._group(
        group_id=group_id,
    )

hook(hook_id, workflow_id=None)

Fetches a hook by ID.

Parameters:

Name Type Description Default
hook_id UUID

The ID of the hook to fetch.

required
workflow_id Optional[UUID]

The ID of the workflow to which the hook belongs.

None

Returns: A data response for the hook.

Source code in lqs/interface/core/fetch.py
def hook(self, hook_id: UUID, workflow_id: Optional[UUID] = None):
    """
    Fetches a hook by ID.

    Args:
        hook_id: The ID of the hook to fetch.
        workflow_id: The ID of the workflow to which the hook belongs.
    Returns:
        A data response for the hook.
    """
    return self._hook(
        workflow_id=workflow_id,
        hook_id=hook_id,
    )

ingestion(ingestion_id)

Fetches an ingestion by ID.

Parameters:

Name Type Description Default
ingestion_id UUID

The ID of the ingestion to fetch.

required

Returns: A data response for the ingestion.

Source code in lqs/interface/core/fetch.py
def ingestion(self, ingestion_id: UUID):
    """
    Fetches an ingestion by ID.

    Args:
        ingestion_id: The ID of the ingestion to fetch.
    Returns:
        A data response for the ingestion.
    """
    return self._ingestion(
        ingestion_id=ingestion_id,
    )

ingestion_part(ingestion_part_id, ingestion_id=None)

Fetches an ingestion part by ID.

Parameters:

Name Type Description Default
ingestion_part_id UUID

The ID of the ingestion part to fetch.

required
ingestion_id Optional[UUID]

The ID of the ingestion to which the ingestion part belongs.

None

Returns: A data response for the ingestion part.

Source code in lqs/interface/core/fetch.py
def ingestion_part(
    self, ingestion_part_id: UUID, ingestion_id: Optional[UUID] = None
):
    """
    Fetches an ingestion part by ID.

    Args:
        ingestion_part_id: The ID of the ingestion part to fetch.
        ingestion_id: The ID of the ingestion to which the ingestion part belongs.
    Returns:
        A data response for the ingestion part.
    """
    return self._ingestion_part(
        ingestion_id=ingestion_id,
        ingestion_part_id=ingestion_part_id,
    )

label(label_id)

Fetches a label by ID.

Parameters:

Name Type Description Default
label_id UUID

The ID of the label to fetch.

required

Returns: A data response for the label.

Source code in lqs/interface/core/fetch.py
def label(self, label_id: UUID):
    """
    Fetches a label by ID.

    Args:
        label_id: The ID of the label to fetch.
    Returns:
        A data response for the label.
    """
    return self._label(
        label_id=label_id,
    )

log(log_id)

Fetches a log by ID.

Parameters:

Name Type Description Default
log_id UUID

The ID of the log to fetch.

required

Returns: A data response for the log.

Source code in lqs/interface/core/fetch.py
def log(self, log_id: UUID):
    """
    Fetches a log by ID.

    Args:
        log_id: The ID of the log to fetch.
    Returns:
        A data response for the log.
    """
    return self._log(
        log_id=log_id,
    )

log_object(object_key, log_id, redirect=False, offset=None, length=None)

Fetches an object by key.

Parameters:

Name Type Description Default
object_key str

The key of the object to fetch.

required
log_id UUID

The ID of the log to which the object belongs.

required
redirect Optional[bool]

Whether to redirect to the object store or return the object directly. Defaults to False.

False
offset Optional[int]

The offset from which to read the object.

None
length Optional[int]

The length of the object to read.

None

Returns: A data response for the object or the object itself as bytes if redirect is True.

Source code in lqs/interface/core/fetch.py
def log_object(
    self,
    object_key: str,
    log_id: UUID,
    redirect: Optional[bool] = False,
    offset: Optional[int] = None,
    length: Optional[int] = None,
) -> Union[models.ObjectDataResponse, bytes]:
    """
    Fetches an object by key.

    Args:
        object_key: The key of the object to fetch.
        log_id: The ID of the log to which the object belongs.
        redirect: Whether to redirect to the object store or return the object directly. Defaults to False.
        offset: The offset from which to read the object.
        length: The length of the object to read.
    Returns:
        A data response for the object or the object itself as bytes if redirect is True.
    """
    return self._log_object(
        log_id=log_id,
        object_key=object_key,
        redirect=redirect,
        offset=offset,
        length=length,
    )

log_object_part(object_key, part_number, log_id)

Fetches an object part by key and part number.

Parameters:

Name Type Description Default
object_key str

The key of the object to fetch.

required
part_number int

The part number of the object to fetch.

required
log_id UUID

The ID of the log to which the object belongs.

required

Returns: A data response for the object part.

Source code in lqs/interface/core/fetch.py
def log_object_part(self, object_key: str, part_number: int, log_id: UUID):
    """
    Fetches an object part by key and part number.

    Args:
        object_key: The key of the object to fetch.
        part_number: The part number of the object to fetch.
        log_id: The ID of the log to which the object belongs.
    Returns:
        A data response for the object part.
    """
    return self._log_object_part(
        object_key=object_key,
        part_number=part_number,
        log_id=log_id,
    )

object(object_key, object_store_id, redirect=False, offset=None, length=None)

Fetches an object by key.

Parameters:

Name Type Description Default
object_key str

The key of the object to fetch.

required
object_store_id UUID

The ID of the object store to which the object belongs.

required
redirect Optional[bool]

Whether to redirect to the object store or return the object directly. Defaults to False.

False
offset Optional[int]

The offset from which to read the object.

None
length Optional[int]

The length of the object to read.

None

Returns: A data response for the object or the object itself as bytes if redirect is True.

Source code in lqs/interface/core/fetch.py
def object(
    self,
    object_key: str,
    object_store_id: UUID,
    redirect: Optional[bool] = False,
    offset: Optional[int] = None,
    length: Optional[int] = None,
) -> Union[models.ObjectDataResponse, bytes]:
    """
    Fetches an object by key.

    Args:
        object_key: The key of the object to fetch.
        object_store_id: The ID of the object store to which the object belongs.
        redirect: Whether to redirect to the object store or return the object directly. Defaults to False.
        offset: The offset from which to read the object.
        length: The length of the object to read.
    Returns:
        A data response for the object or the object itself as bytes if redirect is True.
    """
    return self._object(
        object_store_id=object_store_id,
        object_key=object_key,
        redirect=redirect,
        offset=offset,
        length=length,
    )

object_part(object_key, part_number, object_store_id)

Fetches an object part by key and part number.

Parameters:

Name Type Description Default
object_key str

The key of the object to fetch.

required
part_number int

The part number of the object to fetch.

required
object_store_id UUID

The ID of the object store to which the object belongs.

required

Returns: A data response for the object part.

Source code in lqs/interface/core/fetch.py
def object_part(self, object_key: str, part_number: int, object_store_id: UUID):
    """
    Fetches an object part by key and part number.

    Args:
        object_key: The key of the object to fetch.
        part_number: The part number of the object to fetch.
        object_store_id: The ID of the object store to which the object belongs.
    Returns:
        A data response for the object part.
    """
    return self._object_part(
        object_key=object_key,
        part_number=part_number,
        object_store_id=object_store_id,
    )

object_store(object_store_id)

Fetches an object store by ID.

Parameters:

Name Type Description Default
object_store_id UUID

The ID of the object store to fetch.

required

Returns: A data response for the object store.

Source code in lqs/interface/core/fetch.py
def object_store(self, object_store_id: UUID):
    """
    Fetches an object store by ID.

    Args:
        object_store_id: The ID of the object store to fetch.
    Returns:
        A data response for the object store.
    """
    return self._object_store(
        object_store_id=object_store_id,
    )

query(query_id, log_id=None)

Fetches a query by ID.

Parameters:

Name Type Description Default
query_id UUID

The ID of the query to fetch.

required
log_id Optional[UUID]

The ID of the log to which the query belongs.

None

Returns: A data response for the query.

Source code in lqs/interface/core/fetch.py
def query(self, query_id: UUID, log_id: Optional[UUID] = None):
    """
    Fetches a query by ID.

    Args:
        query_id: The ID of the query to fetch.
        log_id: The ID of the log to which the query belongs.
    Returns:
        A data response for the query.
    """
    return self._query(
        log_id=log_id,
        query_id=query_id,
    )

record(timestamp, topic_id, include_auxiliary_data=False, include_raw_data=False)

Fetches a record by timestamp and topic ID.

Parameters:

Name Type Description Default
timestamp Int64

The timestamp of the record to fetch.

required
topic_id UUID

The ID of the topic to which the record belongs.

required
include_auxiliary_data bool

Whether to include auxiliary data in the record. Defaults to False.

False
include_raw_data bool

Whether to include raw data in the record. Defaults to False.

False

Returns: A data response for the record.

Source code in lqs/interface/core/fetch.py
def record(
    self,
    timestamp: models.Int64,
    topic_id: UUID,
    include_auxiliary_data: bool = False,
    include_raw_data: bool = False,
):
    """
    Fetches a record by timestamp and topic ID.

    Args:
        timestamp: The timestamp of the record to fetch.
        topic_id: The ID of the topic to which the record belongs.
        include_auxiliary_data: Whether to include auxiliary data in the record. Defaults to False.
        include_raw_data: Whether to include raw data in the record. Defaults to False.
    Returns:
        A data response for the record.
    """
    return self._record(
        timestamp=timestamp,
        topic_id=topic_id,
        include_auxiliary_data=include_auxiliary_data,
        include_raw_data=include_raw_data,
    )

tag(tag_id, log_id=None)

Fetches a tag by ID.

Parameters:

Name Type Description Default
tag_id UUID

The ID of the tag to fetch.

required
log_id Optional[UUID]

The ID of the log to which the tag belongs.

None

Returns: A data response for the tag.

Source code in lqs/interface/core/fetch.py
def tag(self, tag_id: UUID, log_id: Optional[UUID] = None):
    """
    Fetches a tag by ID.

    Args:
        tag_id: The ID of the tag to fetch.
        log_id: The ID of the log to which the tag belongs.
    Returns:
        A data response for the tag.
    """
    return self._tag(
        log_id=log_id,
        tag_id=tag_id,
    )

topic(topic_id)

Fetches a topic by ID.

Parameters:

Name Type Description Default
topic_id UUID

The ID of the topic to fetch.

required

Returns: A data response for the topic.

Source code in lqs/interface/core/fetch.py
def topic(self, topic_id: UUID):
    """
    Fetches a topic by ID.

    Args:
        topic_id: The ID of the topic to fetch.
    Returns:
        A data response for the topic.
    """
    return self._topic(
        topic_id=topic_id,
    )

workflow(workflow_id)

Fetches a workflow by ID.

Parameters:

Name Type Description Default
workflow_id UUID

The ID of the workflow to fetch.

required

Returns: A data response for the workflow.

Source code in lqs/interface/core/fetch.py
def workflow(self, workflow_id: UUID):
    """
    Fetches a workflow by ID.

    Args:
        workflow_id: The ID of the workflow to fetch.
    Returns:
        A data response for the workflow.
    """
    return self._workflow(
        workflow_id=workflow_id,
    )

Update

Bases: UpdateInterface, RESTInterface

Source code in lqs/client/core/update.py
class Update(UpdateInterface, RESTInterface):
    service: str = "lqs"

    def __init__(self, app):
        super().__init__(app=app)

    def _api_key(self, **params):
        api_key_id = params.pop("api_key_id")
        data = params.pop("data")
        return self._update_resource(
            f"apiKeys/{api_key_id}", data, models.APIKeyDataResponse
        )

    def _digestion(self, **params):
        digestion_id = params.pop("digestion_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"digestions/{digestion_id}",
            data,
            models.DigestionDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _digestion_part(self, **params):
        digestion_id = params.pop("digestion_id")
        digestion_part_id = params.pop("digestion_part_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"digestions/{digestion_id}/parts/{digestion_part_id}",
            data,
            models.DigestionPartDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _digestion_topic(self, **params):
        digestion_id = params.pop("digestion_id")
        digestion_topic_id = params.pop("digestion_topic_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"digestions/{digestion_id}/topics/{digestion_topic_id}",
            data,
            models.DigestionTopicDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _group(self, **params):
        group_id = params.pop("group_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"groups/{group_id}",
            data,
            models.GroupDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _hook(self, **params):
        workflow_id = params.pop("workflow_id")
        hook_id = params.pop("hook_id")
        data = params.pop("data")
        return self._update_resource(
            f"workflows/{workflow_id}/hooks/{hook_id}", data, models.HookDataResponse
        )

    def _ingestion(self, **params):
        ingestion_id = params.pop("ingestion_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"ingestions/{ingestion_id}",
            data,
            models.IngestionDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _ingestion_part(self, **params):
        ingestion_id = params.pop("ingestion_id")
        ingestion_part_id = params.pop("ingestion_part_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"ingestions/{ingestion_id}/parts/{ingestion_part_id}",
            data,
            models.IngestionPartDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _label(self, **params):
        label_id = params.pop("label_id")
        data = params.pop("data")
        return self._update_resource(
            f"labels/{label_id}", data, models.LabelDataResponse
        )

    def _log(self, **params):
        log_id = params.pop("log_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"logs/{log_id}",
            data,
            models.LogDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _log_object(self, **params):
        log_id = params.pop("log_id")
        object_key = params.pop("object_key")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"logs/{log_id}/objects/{object_key}",
            data,
            models.ObjectDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _object(self, **params):
        raise NotImplementedError

    def _object_store(self, **params):
        object_store_id = params.pop("object_store_id")
        data = params.pop("data")
        return self._update_resource(
            f"objectStores/{object_store_id}", data, models.ObjectStoreDataResponse
        )

    def _query(self, **params):
        log_id = params.pop("log_id")
        query_id = params.pop("query_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"logs/{log_id}/queries/{query_id}",
            data,
            models.QueryDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _record(self, **params):
        topic_id = params.pop("topic_id")
        timestamp = params.pop("timestamp")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"topics/{topic_id}/records/{timestamp}",
            data,
            models.RecordDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _role(self, **params):
        role_id = params.pop("role_id")
        data = params.pop("data")
        return self._update_resource(f"roles/{role_id}", data, models.RoleDataResponse)

    def _tag(self, **params):
        log_id = params.pop("log_id")
        tag_id = params.pop("tag_id")
        lock_token = params.pop("lock_token", None)
        data = params.pop("data")
        return self._update_resource(
            f"logs/{log_id}/tags/{tag_id}",
            data,
            models.TagDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _topic(self, **params):
        topic_id = params.pop("topic_id")
        data = params.pop("data")
        lock_token = params.pop("lock_token", None)
        return self._update_resource(
            f"topics/{topic_id}",
            data,
            models.TopicDataResponse,
            additiona_params={"lock_token": lock_token},
        )

    def _user(self, **params):
        user_id = params.pop("user_id")
        data = params.pop("data")
        return self._update_resource(f"users/{user_id}", data, models.UserDataResponse)

    def _workflow(self, **params):
        workflow_id = params.pop("workflow_id")
        data = params.pop("data")
        return self._update_resource(
            f"workflows/{workflow_id}", data, models.WorkflowDataResponse
        )

digestion(digestion_id, data, lock_token=None)

Update a digestion.

Parameters:

Name Type Description Default
digestion_id UUID

The id of the digestion to update.

required
data DigestionUpdateRequest

The data to update the digestion with.

required
lock_token Optional[str]

TODO: update this description

None

Returns: A data response containing the updated digestion.

Source code in lqs/interface/core/update.py
def digestion(
    self,
    digestion_id: UUID,
    data: models.DigestionUpdateRequest,
    lock_token: Optional[str] = None,
):
    """
    Update a digestion.

    Args:
        digestion_id: The id of the digestion to update.
        data: The data to update the digestion with.
        lock_token: TODO: update this description
    Returns:
        A data response containing the updated digestion.
    """
    return self._digestion(
        digestion_id=digestion_id,
        data=self._process_data(data),
        lock_token=lock_token,
    )

digestion_part(digestion_part_id, data, digestion_id=None, lock_token=None)

Update a digestion part.

Parameters:

Name Type Description Default
digestion_part_id UUID

The id of the digestion part to update.

required
data DigestionPartUpdateRequest

The data to update the digestion part with.

required
digestion_id optional

The id of the digestion to which the digestion part belongs.

None

Returns: A data response containing the updated digestion part.

Source code in lqs/interface/core/update.py
def digestion_part(
    self,
    digestion_part_id: UUID,
    data: models.DigestionPartUpdateRequest,
    digestion_id: Optional[UUID] = None,
    lock_token: Optional[str] = None,
):
    """
    Update a digestion part.

    Args:
        digestion_part_id: The id of the digestion part to update.
        data: The data to update the digestion part with.
        digestion_id (optional): The id of the digestion to which the digestion part belongs.
    Returns:
        A data response containing the updated digestion part.
    """
    return self._digestion_part(
        digestion_id=digestion_id,
        digestion_part_id=digestion_part_id,
        data=self._process_data(data),
        lock_token=lock_token,
    )

digestion_topic(digestion_topic_id, data, digestion_id=None, lock_token=None)

Update a digestion topic.

Parameters:

Name Type Description Default
digestion_topic_id UUID

The id of the digestion topic to update.

required
data DigestionTopicUpdateRequest

The data to update the digestion topic with.

required
digestion_id optional

The id of the digestion to which the digestion topic belongs.

None

Returns: A data response containing the updated digestion topic.

Source code in lqs/interface/core/update.py
def digestion_topic(
    self,
    digestion_topic_id: UUID,
    data: models.DigestionTopicUpdateRequest,
    digestion_id: Optional[UUID] = None,
    lock_token: Optional[str] = None,
):
    """
    Update a digestion topic.

    Args:
        digestion_topic_id: The id of the digestion topic to update.
        data: The data to update the digestion topic with.
        digestion_id (optional): The id of the digestion to which the digestion topic belongs.
    Returns:
        A data response containing the updated digestion topic.
    """
    return self._digestion_topic(
        digestion_id=digestion_id,
        digestion_topic_id=digestion_topic_id,
        data=self._process_data(data),
        lock_token=lock_token,
    )