Skip to content

Commit

Permalink
Update SDK to version 0.5.11
Browse files Browse the repository at this point in the history
  • Loading branch information
Roboto-Bot-O committed Aug 1, 2024
1 parent e2b90ea commit c757303
Show file tree
Hide file tree
Showing 11 changed files with 237 additions and 42 deletions.
35 changes: 27 additions & 8 deletions src/roboto/domain/datasets/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

import pathspec

from ...association import Association
from ...auth import Permissions
from ...env import RobotoEnv
from ...exceptions import (
Expand Down Expand Up @@ -418,6 +419,9 @@ def remove_tags(
"""Remove each tag in this sequence if it exists"""
self.update(metadata_changeset=MetadataChangeset(remove_tags=tags))

def to_association(self) -> Association:
return Association.dataset(self.dataset_id)

def to_dict(self) -> dict[str, typing.Any]:
return self.__record.model_dump(mode="json")

Expand Down Expand Up @@ -465,18 +469,18 @@ def rename_directory(self, old_path: str, new_path: str) -> DirectoryRecord:
def upload_directory(
self,
directory_path: pathlib.Path,
include_patterns: typing.Optional[list[str]] = None,
exclude_patterns: typing.Optional[list[str]] = None,
delete_after_upload: bool = False,
) -> None:
"""
Upload everything, recursively, in directory, ignoring files that match any of the ignore patterns.
`exclude_patterns` is a list of gitignore-style patterns.
See https://git-scm.com/docs/gitignore#_pattern_format.
Uploads all files and directories recursively from the specified directory path. You can use
`include_patterns` and `exclude_patterns` to control what files and directories are uploaded, and can
use `delete_after_upload` to clean up your local filesystem after the uploads succeed.
Example:
>>> from roboto.domain import datasets
>>> dataset = datasets.Dataset(...)
>>> from roboto import Dataset
>>> dataset = Dataset(...)
>>> dataset.upload_directory(
... pathlib.Path("/path/to/directory"),
... exclude_patterns=[
Expand All @@ -486,12 +490,21 @@ def upload_directory(
... "**/*.log",
... ],
... )
Notes:
- Both `include_patterns` and `exclude_patterns` follow the 'gitignore' pattern format described
in https://git-scm.com/docs/gitignore#_pattern_format.
- If both `include_patterns` and `exclude_patterns` are provided, files matching
`exclude_patterns` will be excluded even if they match `include_patterns`.
"""
include_spec: typing.Optional[pathspec.PathSpec] = excludespec_from_patterns(
include_patterns
)
exclude_spec: typing.Optional[pathspec.PathSpec] = excludespec_from_patterns(
exclude_patterns
)
all_files = self.__list_directory_files(
directory_path, exclude_spec=exclude_spec
directory_path, include_spec=include_spec, exclude_spec=exclude_spec
)
file_destination_paths = {
path: os.path.relpath(path, directory_path) for path in all_files
Expand Down Expand Up @@ -609,13 +622,19 @@ def _flush_manifest_item_completions(
def __list_directory_files(
self,
directory_path: pathlib.Path,
include_spec: typing.Optional[pathspec.PathSpec] = None,
exclude_spec: typing.Optional[pathspec.PathSpec] = None,
) -> collections.abc.Iterable[pathlib.Path]:
all_files = set()

for root, _, files in os.walk(directory_path):
for file in files:
if not exclude_spec or not exclude_spec.match_file(file):
should_include = include_spec is None or include_spec.match_file(file)
should_exclude = exclude_spec is not None and exclude_spec.match_file(
file
)

if should_include and not should_exclude:
all_files.add(pathlib.Path(root, file))

return all_files
Expand Down
4 changes: 3 additions & 1 deletion src/roboto/domain/events/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@
from .operations import (
CreateEventRequest,
QueryEventsForAssociationsRequest,
UpdateEventRequest,
)
from .record import EventRecord

__all__ = [
"Event",
"CreateEventRequest",
"Event",
"EventRecord",
"QueryEventsForAssociationsRequest",
"UpdateEventRequest",
]
42 changes: 42 additions & 0 deletions src/roboto/domain/events/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,16 @@

from ...association import Association
from ...http import RobotoClient
from ...sentinels import NotSet, NotSetType
from ...time import to_epoch_nanoseconds
from ...updates import (
MetadataChangeset,
StrSequence,
)
from .operations import (
CreateEventRequest,
QueryEventsForAssociationsRequest,
UpdateEventRequest,
)
from .record import EventRecord

Expand Down Expand Up @@ -123,5 +129,41 @@ def record(self) -> EventRecord:
def delete(self) -> None:
self.__roboto_client.delete(f"v1/events/id/{self.event_id}")

def put_metadata(self, metadata: dict[str, typing.Any]) -> "Event":
return self.update(metadata_changeset=MetadataChangeset(put_fields=metadata))

def put_tags(self, tags: list[str]) -> "Event":
return self.update(metadata_changeset=MetadataChangeset(put_tags=tags))

def remove_metadata(
self,
metadata: StrSequence,
) -> "Event":
return self.update(metadata_changeset=MetadataChangeset(remove_fields=metadata))

def remove_tags(
self,
tags: StrSequence,
) -> "Event":
return self.update(metadata_changeset=MetadataChangeset(remove_tags=tags))

def set_description(self, description: typing.Optional[str]) -> "Event":
return self.update(description=description)

def to_dict(self) -> dict[str, typing.Any]:
return self.__record.model_dump(mode="json")

def update(
self,
description: typing.Optional[typing.Union[str, NotSetType]] = NotSet,
metadata_changeset: typing.Optional[MetadataChangeset] = None,
) -> "Event":
request = UpdateEventRequest(
description=description, metadata_changeset=metadata_changeset
)

self.__record = self.__roboto_client.put(
f"/v1/events/id/{self.event_id}", data=request
).to_record(EventRecord)

return self
23 changes: 23 additions & 0 deletions src/roboto/domain/events/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import pydantic

from ...association import Association
from ...sentinels import NotSet, NotSetType
from ...updates import MetadataChangeset


class CreateEventRequest(pydantic.BaseModel):
Expand Down Expand Up @@ -61,3 +63,24 @@ class QueryEventsForAssociationsRequest(pydantic.BaseModel):

page_token: typing.Optional[str] = None
"""Token to use to fetch the next page of results, use None for the first page."""


class UpdateEventRequest(pydantic.BaseModel):
"""
Request payload for the Update Event operation. Allows any of the mutable fields of an event to be changed.
"""

description: typing.Optional[typing.Union[str, NotSetType]] = NotSet
"""
An optional human-readable description of the event.
"""

metadata_changeset: typing.Optional[MetadataChangeset] = None
"""
Metadata and tag changes to make for this event
"""

# This is required to get NotSet/NotSetType to serialize appropriately.
model_config = pydantic.ConfigDict(
extra="forbid", json_schema_extra=NotSetType.openapi_schema_modifier
)
3 changes: 3 additions & 0 deletions src/roboto/domain/files/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,9 @@ def put_metadata(self, metadata: dict[str, typing.Any]) -> "File":
def put_tags(self, tags: list[str]) -> "File":
return self.update(metadata_changeset=MetadataChangeset(put_tags=tags))

def to_association(self) -> Association:
return Association.file(self.file_id)

def to_dict(self) -> dict[str, Any]:
return self.__record.model_dump(mode="json")

Expand Down
34 changes: 22 additions & 12 deletions src/roboto/domain/topics/record.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,18 +84,33 @@ class MessagePathRecord(pydantic.BaseModel):
Path to a typed attribute within individual datum records contained within a Topic.
"""

canonical_data_type: CanonicalDataType
"""Normalized data type, used primarily internally by the Roboto Platform."""

created: datetime.datetime
created_by: str

data_type: str
"""
'Native'/framework-specific data type of the attribute at this path.
E.g. "float32", "unint8[]", "geometry_msgs/Pose", "string".
"""

message_path: str
"""
Dot-delimited path to the attribute within the datum record.
"""

data_type: str
metadata: collections.abc.Mapping[str, typing.Any] = pydantic.Field(
default_factory=dict,
)
"""
'Native'/framework-specific data type of the attribute at this path.
E.g. "float32", "unint8[]", "geometry_msgs/Pose", "string".
Key-value pairs to associate with this metadata for discovery and search, e.g.
`{ 'min': '0.71', 'max': '1.77 }`
"""

canonical_data_type: CanonicalDataType
modified: datetime.datetime
modified_by: str

representations: collections.abc.MutableSequence[RepresentationRecord] = (
pydantic.Field(default_factory=list)
Expand All @@ -104,17 +119,12 @@ class MessagePathRecord(pydantic.BaseModel):
Zero to many Representations of this MessagePath.
"""

topic_id: int
"""Internal identifier for Topic with which this MessagePath is associated."""

topic_message_path_id: int
"""Internal identifier for this MessagePath, joined to a particular Topic."""

metadata: collections.abc.Mapping[str, typing.Any] = pydantic.Field(
default_factory=dict,
)
"""
Key-value pairs to associate with this metadata for discovery and search, e.g.
`{ 'min': '0.71', 'max': '1.77 }`
"""


class TopicRecord(pydantic.BaseModel):
"""
Expand Down
3 changes: 3 additions & 0 deletions src/roboto/domain/topics/topic.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,9 @@ def set_default_representation(
self.__refresh()
return representation_record

def to_association(self) -> Association:
return Association.topic(self.record.topic_id)

def update(
self,
end_time: typing.Union[typing.Optional[int], NotSetType] = NotSet,
Expand Down
35 changes: 30 additions & 5 deletions src/roboto/upload_agent/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def configure_subcommand(args: argparse.Namespace) -> None:
configure()


def run(auto_create_upload_configs: bool) -> None:
def run(auto_create_upload_configs: bool, merge_uploads: bool) -> None:
if not agent_config_file.is_file():
logger.error(
f"No upload agent config file found at {agent_config_file}. Please run "
Expand Down Expand Up @@ -140,7 +140,9 @@ def run(auto_create_upload_configs: bool) -> None:
if auto_create_upload_configs:
upload_agent.create_upload_configs()

uploaded_datasets = upload_agent.process_uploads()
uploaded_datasets = upload_agent.process_uploads(
merge_uploads=merge_uploads
)
except filelock.Timeout:
logger.info(
"Roboto upload agent appears to already be running, nothing to do. If you don't think this is correct, "
Expand All @@ -152,7 +154,9 @@ def run(auto_create_upload_configs: bool) -> None:
logger.info("Uploaded %d datasets", len(uploaded_datasets))


def run_forever(scan_period_seconds: int, auto_create_upload_configs: bool) -> None:
def run_forever(
scan_period_seconds: int, auto_create_upload_configs: bool, merge_uploads: bool
) -> None:
print(
"Starting roboto-agent in run forever mode, press Ctrl+C to stop.",
file=sys.stdout,
Expand All @@ -161,7 +165,10 @@ def run_forever(scan_period_seconds: int, auto_create_upload_configs: bool) -> N
try:
while True:
logger.info("Running upload agent")
run(auto_create_upload_configs=auto_create_upload_configs)
run(
auto_create_upload_configs=auto_create_upload_configs,
merge_uploads=merge_uploads,
)
logger.info(
f"Run completed, sleeping for {scan_period_seconds} seconds before next attempt."
)
Expand All @@ -175,9 +182,13 @@ def run_subcommand(args: argparse.Namespace) -> None:
run_forever(
scan_period_seconds=30,
auto_create_upload_configs=args.auto_create_upload_configs,
merge_uploads=args.merge_uploads,
)
else:
run(auto_create_upload_configs=args.auto_create_upload_configs)
run(
auto_create_upload_configs=args.auto_create_upload_configs,
merge_uploads=args.merge_uploads,
)


def main():
Expand Down Expand Up @@ -211,6 +222,20 @@ def main():
+ "and sleeps between runs.",
action="store_true",
)
run_parser.add_argument(
"-m",
"--merge-uploads",
action="store_true",
help=(
"If set, all uploads will be merged into a single dataset. If combined with "
"--auto-create-upload-configs, this will allow you to set many disparate output locations as your "
"search paths, and still unite everything under a single dataset. "
"Any tags/metadata/description set in .roboto_upload.json files will be applied sequentially as updates "
"to the created dataset. If there are collisions, like multiple different descriptions or multiple "
"metadata values for the same key, the last one encountered will be used, and the traversal order will "
"be non-deterministic."
),
)
run_parser.add_argument(
"-a",
"--auto-create-upload-configs",
Expand Down
Loading

0 comments on commit c757303

Please sign in to comment.