From 1ac65fa0bfcdd01b151b98134b842364311059fd Mon Sep 17 00:00:00 2001 From: Sohambutala Date: Wed, 1 May 2024 14:57:52 -0700 Subject: [PATCH 1/2] Renamed `echoflow` to `echodataflow` --- .github/workflows/main.yaml | 2 +- .github/workflows/python-publish.yml | 2 +- Dockerfile | 8 +- README.md | 18 +- ...orker.sh => deploy_echodataflow_worker.sh} | 18 +- docs/configuration/blocks.md | 42 +- docs/configuration/datastore.md | 18 +- docs/configuration/pipeline.md | 30 +- docs/setup.md | 6 +- docs/source/_config.yml | 6 +- docs/source/aws/datastore.yaml | 4 +- docs/source/aws/datastoreconfiguration.md | 2 +- docs/source/aws/intro.md | 6 +- docs/source/aws/notebook.ipynb | 54 +- docs/source/aws/pipeline.yaml | 16 +- docs/source/aws/pipelineconfiguration.md | 20 +- docs/source/aws/prereq.md | 12 +- docs/source/aws/setup.md | 46 +- docs/source/conf.py | 6 +- docs/source/configuration/blocks.md | 42 +- docs/source/configuration/datastore.md | 16 +- docs/source/configuration/pipeline.md | 30 +- docs/source/gettingstarted.md | 44 +- docs/source/index.md | 6 +- docs/source/local/datastore.yaml | 2 +- docs/source/local/datastoreconfiguration.md | 6 +- .../echoflow_compute_TS_output.json | 2 +- .../echoflow_open_raw_output.json | 2 +- docs/source/local/intro.md | 6 +- docs/source/local/notebook.ipynb | 108 +-- docs/source/local/pipeline.yaml | 8 +- docs/source/local/pipelineconfiguration.md | 12 +- docs/source/local/setup.md | 36 +- {echoflow => echodataflow}/__init__.py | 20 +- .../aspects/__init__.py | 0 .../aspects/echodataflow_aspect.py | 26 +- .../aspects/singleton_echodataflow.py | 44 +- {echoflow => echodataflow}/config/__init__.py | 0 .../config/datastore.yaml | 12 +- .../config/logging_config.yaml | 10 +- .../config/pipeline.yaml | 36 +- .../echodataflow_cli.py | 170 ++-- {echoflow => echodataflow}/models/__init__.py | 0 .../models/datastore.py | 10 +- .../models/db_log_model.py | 4 +- .../models/echodataflow_config.py | 26 +- .../models/output_model.py | 4 +- {echoflow => echodataflow}/models/pipeline.py | 4 +- .../rule_engine/__init__.py | 0 .../rule_engine/dependency_engine.py | 0 .../rule_engine/echodataflow_rules.txt | 5 + {echoflow => echodataflow}/stages/__init__.py | 0 .../stages/docker_trigger.py | 4 +- .../stages/echodataflow.py | 122 +-- .../stages/echodataflow_trigger.py | 34 +- .../stages/subflows/__init__.py | 0 .../stages/subflows/add_depth.py | 30 +- .../stages/subflows/add_location.py | 30 +- .../stages/subflows/apply_mask.py | 32 +- .../stages/subflows/combine_echodata.py | 28 +- .../stages/subflows/compute_MVBS.py | 28 +- .../stages/subflows/compute_SV.py | 30 +- .../stages/subflows/compute_TS.py | 30 +- .../stages/subflows/frequency_differencing.py | 32 +- .../stages/subflows/initialization_flow.py | 20 +- .../stages/subflows/open_raw.py | 30 +- .../tests/AWS/AWSEchoflowDemo.ipynb | 144 +-- .../tests/AWS/datastore.yaml | 2 +- .../tests/AWS/pipeline.yaml | 16 +- {echoflow => echodataflow}/tests/__init__.py | 0 .../tests/offline/EK60_SH1707_Shimada.txt | 10 + .../tests/offline/LocalEchoflowDemo.ipynb | 868 ++++++++++++++++++ .../tests/offline/datastore.yaml | 4 +- .../Bell_M._Shimada-SH1707-EK60.json | 1 + .../echodataflow_apply_mask_output.json | 1 + .../echodataflow_combine_echodata_output.json | 1 + .../echodataflow_compute_MVBS_output.json | 1 + .../echodataflow_compute_SV_output.json | 1 + ...ataflow_frequency_differencing_output.json | 1 + .../echodataflow_open_raw_output.json | 1 + echodataflow/tests/offline/pipeline.yaml | 40 + {echoflow => echodataflow}/utils/__init__.py | 0 .../utils/config_utils.py | 20 +- .../utils/databse_utils.py | 4 +- .../utils/file_utils.py | 24 +- .../utils/function_utils.py | 0 {echoflow => echodataflow}/utils/log_util.py | 30 +- .../utils/rest_utils.py | 0 echoflow/rule_engine/echoflow_rules.txt | 5 - .../tests/offline/LocalEchoflowDemo.ipynb | 868 ------------------ echoflow/tests/offline/pipeline.yaml | 36 - environment.yml | 4 +- .../echoflow_cli.py | 82 +- .../datastore.py | 6 +- .../_sources/aws/datastoreconfiguration.md | 4 +- jupyterbook/_build/html/_sources/aws/intro.md | 6 +- .../_build/html/_sources/aws/notebook.ipynb | 54 +- .../_sources/aws/pipelineconfiguration.md | 22 +- .../_build/html/_sources/aws/prereq.md | 12 +- jupyterbook/_build/html/_sources/aws/setup.md | 46 +- .../html/_sources/configuration/blocks.md | 42 +- .../html/_sources/configuration/datastore.md | 16 +- .../html/_sources/configuration/pipeline.md | 30 +- .../_build/html/_sources/gettingstarted.md | 44 +- jupyterbook/_build/html/_sources/intro.md | 6 +- .../_sources/local/datastoreconfiguration.md | 8 +- .../_build/html/_sources/local/intro.md | 6 +- .../_build/html/_sources/local/notebook.ipynb | 108 +-- .../_sources/local/pipelineconfiguration.md | 14 +- .../_build/html/_sources/local/setup.md | 36 +- .../html/aws/datastoreconfiguration.html | 30 +- jupyterbook/_build/html/aws/intro.html | 32 +- jupyterbook/_build/html/aws/notebook.html | 46 +- .../html/aws/pipelineconfiguration.html | 44 +- jupyterbook/_build/html/aws/prereq.html | 38 +- jupyterbook/_build/html/aws/setup.html | 86 +- .../_build/html/configuration/blocks.html | 86 +- .../_build/html/configuration/datastore.html | 40 +- .../_build/html/configuration/pipeline.html | 50 +- jupyterbook/_build/html/genindex.html | 22 +- jupyterbook/_build/html/gettingstarted.html | 88 +- jupyterbook/_build/html/intro.html | 36 +- .../html/local/datastoreconfiguration.html | 32 +- jupyterbook/_build/html/local/intro.html | 36 +- jupyterbook/_build/html/local/notebook.html | 48 +- .../html/local/pipelineconfiguration.html | 34 +- jupyterbook/_build/html/local/setup.html | 78 +- jupyterbook/_build/html/search.html | 22 +- jupyterbook/_build/html/searchindex.js | 2 +- .../_build/jupyter_execute/aws/notebook.ipynb | 18 +- .../jupyter_execute/local/notebook.ipynb | 20 +- jupyterbook/_config.yml | 6 +- jupyterbook/aws/datastore.yaml | 4 +- jupyterbook/aws/datastoreconfiguration.md | 2 +- jupyterbook/aws/intro.md | 6 +- jupyterbook/aws/notebook.ipynb | 54 +- jupyterbook/aws/pipeline.yaml | 16 +- jupyterbook/aws/pipelineconfiguration.md | 20 +- jupyterbook/aws/prereq.md | 12 +- jupyterbook/aws/setup.md | 46 +- jupyterbook/configuration/blocks.md | 42 +- jupyterbook/configuration/datastore.md | 16 +- jupyterbook/configuration/pipeline.md | 30 +- jupyterbook/gettingstarted.md | 44 +- jupyterbook/intro.md | 6 +- jupyterbook/local/datastore.yaml | 2 +- jupyterbook/local/datastoreconfiguration.md | 6 +- .../echoflow_compute_TS_output.json | 2 +- .../echoflow_open_raw_output.json | 2 +- jupyterbook/local/intro.md | 6 +- jupyterbook/local/notebook.ipynb | 108 +-- jupyterbook/local/pipeline.yaml | 8 +- jupyterbook/local/pipelineconfiguration.md | 12 +- jupyterbook/local/setup.md | 36 +- notebooks/ConfigurationDemo.ipynb | 18 +- notebooks/FlowDemo.ipynb | 22 +- notebooks/aop_test.ipynb | 2 +- notebooks/database_test copy.ipynb | 10 +- notebooks/logging_demo.ipynb | 22 +- notebooks/s3_demo.ipynb | 20 +- pyproject.toml | 2 +- setup.cfg | 6 +- 162 files changed, 2789 insertions(+), 2768 deletions(-) rename deployment/{deploy_echoflow_worker.sh => deploy_echodataflow_worker.sh} (69%) rename {echoflow => echodataflow}/__init__.py (68%) rename {echoflow => echodataflow}/aspects/__init__.py (100%) rename echoflow/aspects/echoflow_aspect.py => echodataflow/aspects/echodataflow_aspect.py (82%) rename echoflow/aspects/singleton_echoflow.py => echodataflow/aspects/singleton_echodataflow.py (83%) rename {echoflow => echodataflow}/config/__init__.py (100%) rename {echoflow => echodataflow}/config/datastore.yaml (69%) rename {echoflow => echodataflow}/config/logging_config.yaml (86%) rename {echoflow => echodataflow}/config/pipeline.yaml (79%) rename echoflow/echoflow_cli.py => echodataflow/echodataflow_cli.py (77%) rename {echoflow => echodataflow}/models/__init__.py (100%) rename {echoflow => echodataflow}/models/datastore.py (96%) rename {echoflow => echodataflow}/models/db_log_model.py (97%) rename echoflow/models/echoflow_config.py => echodataflow/models/echodataflow_config.py (72%) rename {echoflow => echodataflow}/models/output_model.py (92%) rename {echoflow => echodataflow}/models/pipeline.py (96%) rename {echoflow => echodataflow}/rule_engine/__init__.py (100%) rename {echoflow => echodataflow}/rule_engine/dependency_engine.py (100%) create mode 100644 echodataflow/rule_engine/echodataflow_rules.txt rename {echoflow => echodataflow}/stages/__init__.py (100%) rename {echoflow => echodataflow}/stages/docker_trigger.py (88%) rename echoflow/stages/echoflow.py => echodataflow/stages/echodataflow.py (81%) rename echoflow/stages/echoflow_trigger.py => echodataflow/stages/echodataflow_trigger.py (86%) rename {echoflow => echodataflow}/stages/subflows/__init__.py (100%) rename {echoflow => echodataflow}/stages/subflows/add_depth.py (89%) rename {echoflow => echodataflow}/stages/subflows/add_location.py (89%) rename {echoflow => echodataflow}/stages/subflows/apply_mask.py (88%) rename {echoflow => echodataflow}/stages/subflows/combine_echodata.py (92%) rename {echoflow => echodataflow}/stages/subflows/compute_MVBS.py (90%) rename {echoflow => echodataflow}/stages/subflows/compute_SV.py (85%) rename {echoflow => echodataflow}/stages/subflows/compute_TS.py (85%) rename {echoflow => echodataflow}/stages/subflows/frequency_differencing.py (87%) rename {echoflow => echodataflow}/stages/subflows/initialization_flow.py (92%) rename {echoflow => echodataflow}/stages/subflows/open_raw.py (88%) rename {echoflow => echodataflow}/tests/AWS/AWSEchoflowDemo.ipynb (98%) rename {echoflow => echodataflow}/tests/AWS/datastore.yaml (92%) rename {echoflow => echodataflow}/tests/AWS/pipeline.yaml (51%) rename {echoflow => echodataflow}/tests/__init__.py (100%) create mode 100644 echodataflow/tests/offline/EK60_SH1707_Shimada.txt create mode 100644 echodataflow/tests/offline/LocalEchoflowDemo.ipynb rename {echoflow => echodataflow}/tests/offline/datastore.yaml (89%) create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/Bell_M._Shimada-SH1707-EK60.json create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/echodataflow_apply_mask_output.json create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/echodataflow_combine_echodata_output.json create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/echodataflow_compute_MVBS_output.json create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/echodataflow_compute_SV_output.json create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/echodataflow_frequency_differencing_output.json create mode 100644 echodataflow/tests/offline/echodataflow-output/json_metadata/echodataflow_open_raw_output.json create mode 100644 echodataflow/tests/offline/pipeline.yaml rename {echoflow => echodataflow}/utils/__init__.py (100%) rename {echoflow => echodataflow}/utils/config_utils.py (96%) rename {echoflow => echodataflow}/utils/databse_utils.py (99%) rename {echoflow => echodataflow}/utils/file_utils.py (97%) rename {echoflow => echodataflow}/utils/function_utils.py (100%) rename {echoflow => echodataflow}/utils/log_util.py (75%) rename {echoflow => echodataflow}/utils/rest_utils.py (100%) delete mode 100644 echoflow/rule_engine/echoflow_rules.txt delete mode 100644 echoflow/tests/offline/LocalEchoflowDemo.ipynb delete mode 100644 echoflow/tests/offline/pipeline.yaml diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index e8c0e50..44e73c3 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -18,7 +18,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: Install echoflow + - name: Install echodataflow run: pip install .[all] - name: Run unit tests env: diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index ea7ce86..69afc33 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -6,7 +6,7 @@ # separate terms of service, privacy policy, and support # documentation. -name: echoflow-dev +name: echodataflow-dev on: release: diff --git a/Dockerfile b/Dockerfile index b93db02..269a545 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,15 +4,15 @@ FROM prefecthq/prefect:2-python3.10 # Add our requirements.txt file to the image and install dependencies COPY requirements.txt . -# Update setuptools and pip before installing echoflow +# Update setuptools and pip before installing echodataflow RUN pip install --upgrade pip setuptools -RUN pip install --no-cache-dir --trusted-host pypi.python.org echoflow +RUN pip install --no-cache-dir --trusted-host pypi.python.org echodataflow # RUN pip install -r requirements.txt --trusted-host pypi.python.org --no-cache-dir -RUN echoflow init +RUN echodataflow init # Run our flow script when the container starts -CMD ["python", "-m", "echoflow.docker_trigger.py"] +CMD ["python", "-m", "echodataflow.docker_trigger.py"] EXPOSE 4200 \ No newline at end of file diff --git a/README.md b/README.md index 5defeae..3d6ee84 100644 --- a/README.md +++ b/README.md @@ -14,15 +14,15 @@ This guide will walk you through the initial steps to set up and run your Echoda To keep your Echodataflow environment isolated, it's recommended to create a virtual environment using Conda or Python's built-in `venv` module. Here's an example using Conda: ```bash -conda create --name echoflow-env -conda activate echoflow-env +conda create --name echodataflow-env +conda activate echodataflow-env ``` Or, using Python's venv: ```bash -python -m venv echoflow-env -source echoflow-env/bin/activate # On Windows, use `echoflow-env\Scripts\activate` +python -m venv echodataflow-env +source echodataflow-env/bin/activate # On Windows, use `echodataflow-env\Scripts\activate` ``` ## 2. Clone the Project @@ -48,7 +48,7 @@ To kickstart your journey with Echodataflow and Prefect, follow these simple ini Begin by initializing Echodataflow with the following command: ```bash -echoflow init +echodataflow init ``` This command sets up the groundwork for your Echodataflow environment, preparing it for seamless usage. @@ -64,7 +64,7 @@ prefect cloud login - If you don't have a Prefect Cloud account yet, you can use local prefect account. This is especially useful for those who are just starting out and want to explore Prefect without an account. ```bash -prefect profiles create echoflow-local +prefect profiles create echodataflow-local ``` The initialization process will ensure that both Echodataflow and Prefect are properly set up and ready for you to dive into your cloud-based workflows. @@ -79,13 +79,13 @@ Open the [pipeline.yaml](./docs/configuration/pipeline.md) file. This YAML confi Customize the [datastore.yaml](./docs/configuration/datastore.md) file to define the source and destination for your pipeline's data. This is where Echodataflow will fetch and store data as it executes the pipeline. ## 8. Execute the Pipeline -You're now ready to execute your Echodataflow pipeline! Use the echoflow_start function, which is a central piece of Echodataflow, to kick off your pipeline. Import this function from Echodataflow and provide the paths or URLs of the configuration files. You can also pass additional options or storage options as needed. Here's an example: +You're now ready to execute your Echodataflow pipeline! Use the echodataflow_start function, which is a central piece of Echodataflow, to kick off your pipeline. Import this function from Echodataflow and provide the paths or URLs of the configuration files. You can also pass additional options or storage options as needed. Here's an example: Customize the paths, block name, storage type, and options based on your requirements. ```python -from echoflow import echoflow_start, StorageType, load_block +from echodataflow import echodataflow_start, StorageType, load_block dataset_config = # url or path of datastore.yaml pipeline_config = # url or path of pipeline.yaml @@ -94,7 +94,7 @@ logfile_config = # url or path of logging.yaml (Optional) aws = load_block(name="", type=) options = {"storage_options_override": False} # Enabling this assigns the block for universal use, avoiding the need for repetitive configurations when employing a single credential block throughout the application. -data = echoflow_start(dataset_config=dataset_config, pipeline_config=pipeline_config, logging_config=logfile_config, storage_options=aws, options=options) +data = echodataflow_start(dataset_config=dataset_config, pipeline_config=pipeline_config, logging_config=logfile_config, storage_options=aws, options=options) ``` ## License diff --git a/deployment/deploy_echoflow_worker.sh b/deployment/deploy_echodataflow_worker.sh similarity index 69% rename from deployment/deploy_echoflow_worker.sh rename to deployment/deploy_echodataflow_worker.sh index efbc433..a321b57 100644 --- a/deployment/deploy_echoflow_worker.sh +++ b/deployment/deploy_echodataflow_worker.sh @@ -1,19 +1,19 @@ #!/bin/bash # Step 1: Create a Python Virtual Environment -python3 -m venv $HOME/env/echoflow-prod -source $HOME/env/echoflow-prod/bin/activate +python3 -m venv $HOME/env/echodataflow-prod +source $HOME/env/echodataflow-prod/bin/activate -# Step 2: Clone the Echoflow Repository +# Step 2: Clone the Echodataflow Repository cd $HOME/ -git clone https://github.com/OSOceanAcoustics/echoflow.git -cd $HOME/echoflow +git clone https://github.com/OSOceanAcoustics/echodataflow.git +cd $HOME/echodataflow # Step 3: Checkout the Dev Branch and Update (Optional) - Skip if using Prod/main branch git checkout dev git pull origin dev -# Step 4: Install the Echoflow Project in Editable Mode +# Step 4: Install the Echodataflow Project in Editable Mode pip install -e . # Step 5: Log in to Prefect Cloud and Set Your API Key - Change to step 5b if using prefect locally @@ -22,7 +22,7 @@ read prefectKey prefect cloud login -k $prefectKey # Step 5b: Setup prefect locally -# prefect profile create echoflow-local +# prefect profile create echodataflow-local # Step 6: Set Up the Prefect Worker as a Systemd Service echo "Enter Work Pool Name: " @@ -36,7 +36,7 @@ Description=Prefect-Worker [Service] User=$(whoami) -WorkingDirectory=$HOME/echoflow +WorkingDirectory=$HOME/echodataflow ExecStart=$(which prefect) agent start --pool $workPool Restart=always @@ -53,4 +53,4 @@ sudo systemctl enable prefect-worker.service # Step 8: Start the Prefect Worker Service sudo systemctl start prefect-worker.service -echo "Setup completed. The Echoflow worker is now running. Send tasks to $workPool using Prefect UI or CLI." \ No newline at end of file +echo "Setup completed. The Echodataflow worker is now running. Send tasks to $workPool using Prefect UI or CLI." \ No newline at end of file diff --git a/docs/configuration/blocks.md b/docs/configuration/blocks.md index b471f77..07c008d 100644 --- a/docs/configuration/blocks.md +++ b/docs/configuration/blocks.md @@ -1,62 +1,62 @@ -# Echoflow Configuration and Credential Blocks +# Echodataflow Configuration and Credential Blocks -Echoflow leverages the concept of "blocks" from Prefect, which serve as containers for storing various types of data, including credentials and sensitive information. Currently, Echoflow supports two types of blocks: Azure Cosmos DB Credentials Block and AWS Credentials Block. These blocks allow you to securely store sensitive data while benefiting from Prefect's robust integration capabilities. +Echodataflow leverages the concept of "blocks" from Prefect, which serve as containers for storing various types of data, including credentials and sensitive information. Currently, Echodataflow supports two types of blocks: Azure Cosmos DB Credentials Block and AWS Credentials Block. These blocks allow you to securely store sensitive data while benefiting from Prefect's robust integration capabilities. For a deeper understanding of blocks, you can refer to the [Prefect documentation](https://docs.prefect.io/2.11.5/concepts/blocks/). -## Types of Blocks in Echoflow +## Types of Blocks in Echodataflow -In the context of Echoflow, there are two main categories of blocks: +In the context of Echodataflow, there are two main categories of blocks: -### 1. Echoflow Configuration Blocks +### 1. Echodataflow Configuration Blocks -These blocks serve as repositories for references to credential blocks, as well as repositories for the various Prefect profiles that have been established using Echoflow's functions. +These blocks serve as repositories for references to credential blocks, as well as repositories for the various Prefect profiles that have been established using Echodataflow's functions. ### 2. Credential Blocks -Credential blocks store sensitive information, such as authentication keys and tokens, securely. Echoflow integrates with Prefect's capabilities to ensure that sensitive data is protected. +Credential blocks store sensitive information, such as authentication keys and tokens, securely. Echodataflow integrates with Prefect's capabilities to ensure that sensitive data is protected. ## Creating Credential Blocks -Credential blocks can be conveniently created using an `.ini` file. By leveraging Prefect's integration, Echoflow ensures that the credentials stored in these blocks are handled securely. To create a credential block, you can follow these steps: +Credential blocks can be conveniently created using an `.ini` file. By leveraging Prefect's integration, Echodataflow ensures that the credentials stored in these blocks are handled securely. To create a credential block, you can follow these steps: -1. Open the `credentials.ini` file, which is located under the `.echoflow` directory in your home directory. +1. Open the `credentials.ini` file, which is located under the `.echodataflow` directory in your home directory. ```bash # Terminal command -cd ~/.echoflow +cd ~/.echodataflow ``` 2. Place the necessary credential information within the `credentials.ini` file. ```bash # Terminal command nano credentials.ini # Or use any of your favourite editors ``` -3. Store the updated `.ini` file in the `.echoflow` directory, which resides in your home directory. -4. Utilize [echoflow load-credentials](../../echoflow/stages/subflows/echoflow.py#load_credential_configuration) command to generate a new credential block, leveraging the content from the `.ini` file. +3. Store the updated `.ini` file in the `.echodataflow` directory, which resides in your home directory. +4. Utilize [echodataflow load-credentials](../../echodataflow/stages/subflows/echodataflow.py#load_credential_configuration) command to generate a new credential block, leveraging the content from the `.ini` file. ```bash -echoflow load-credentials +echodataflow load-credentials ``` -5. Add the name of the block in pipeline or datastore yaml configuration files under `storage_options` section with the appropriate storage type (refer [StorageType](../../echoflow/config/models/datastore.py#StorageType)). +5. Add the name of the block in pipeline or datastore yaml configuration files under `storage_options` section with the appropriate storage type (refer [StorageType](../../echodataflow/config/models/datastore.py#StorageType)). ```yaml # Example storage_options: - block_name: echoflow-aws-credentials # Name of the block containing credentials + block_name: echodataflow-aws-credentials # Name of the block containing credentials type: AWS # Specify the storage type using StorageType enum ``` By providing the block name and storage type, ensure that the correct block is used for storage operations, and maintain clarity regarding the chosen storage type. -Once a credential block is created, it can be managed through the Prefect Dashboard. Additionally, if needed, you can use the `echoflow load-credentials` command with the `--sync` argument to ensure your blocks stay up-to-date with any changes made in the Prefect UI. This ensures that your configurations remain accurate and aligned across the application. **It is highly recommended to create new blocks whenever possible, as modifying existing blocks can lead to data loss or conflicts.** +Once a credential block is created, it can be managed through the Prefect Dashboard. Additionally, if needed, you can use the `echodataflow load-credentials` command with the `--sync` argument to ensure your blocks stay up-to-date with any changes made in the Prefect UI. This ensures that your configurations remain accurate and aligned across the application. **It is highly recommended to create new blocks whenever possible, as modifying existing blocks can lead to data loss or conflicts.** -## Considerations When Using `echoflow load-credentials` +## Considerations When Using `echodataflow load-credentials` -When utilizing the `echoflow load-credentials` command, be aware of the following considerations: +When utilizing the `echodataflow load-credentials` command, be aware of the following considerations: -- **Overwriting Values**: When using `echoflow load-credentials`, all the values from the `.ini` file will be written to the credential block, potentially overwriting existing values. Exercise caution when using this command to prevent unintentional data loss. +- **Overwriting Values**: When using `echodataflow load-credentials`, all the values from the `.ini` file will be written to the credential block, potentially overwriting existing values. Exercise caution when using this command to prevent unintentional data loss. - **Creating New Blocks**: To maintain data integrity and security, it's advised to create new blocks rather than modifying existing ones. If editing an existing block becomes necessary, it should be done through the Prefect Dashboard. -- **Sync Argument**: The `--sync` argument is available in the `echoflow load-credentials` command. When set, this option syncs the credential block updates with the Prefect UI. This feature facilitates the seamless management of blocks through the dashboard, enhancing collaboration and control over credentials. +- **Sync Argument**: The `--sync` argument is available in the `echodataflow load-credentials` command. When set, this option syncs the credential block updates with the Prefect UI. This feature facilitates the seamless management of blocks through the dashboard, enhancing collaboration and control over credentials. -By adhering to these guidelines, you can ensure the secure management of sensitive information while effectively configuring and utilizing Echoflow within your projects. +By adhering to these guidelines, you can ensure the secure management of sensitive information while effectively configuring and utilizing Echodataflow within your projects. # Configuration File Explanation: credentials.ini diff --git a/docs/configuration/datastore.md b/docs/configuration/datastore.md index 73e8975..66b3b8e 100644 --- a/docs/configuration/datastore.md +++ b/docs/configuration/datastore.md @@ -1,10 +1,10 @@ -# Echoflow Run Configuration Documentation +# Echodataflow Run Configuration Documentation -This document provides detailed explanations for the keys used in the provided YAML configuration used to define an Echoflow run. +This document provides detailed explanations for the keys used in the provided YAML configuration used to define an Echodataflow run. ## Run Details -- `name`: This key specifies the name of the Echoflow run. It is used to identify and label the execution of the Echoflow process. +- `name`: This key specifies the name of the Echodataflow run. It is used to identify and label the execution of the Echodataflow process. - `sonar_model`: This key indicates the model of the sonar used for data collection during the run. - `raw_regex`: This key indicates the regex to be used while parsing the source directory to match the files to be processed. @@ -25,13 +25,13 @@ This document provides detailed explanations for the keys used in the provided Y ## Notes -- The provided configuration serves as a structured setup for executing an Echoflow run, allowing customization through the specified keys. +- The provided configuration serves as a structured setup for executing an Echodataflow run, allowing customization through the specified keys. - Dynamic placeholders like `ship_name`, `survey_name`, and `sonar_model` are replaced with actual values based on the context. Example: ```yaml -name: Bell_M._Shimada-SH1707-EK60 # Name of the Echoflow Run +name: Bell_M._Shimada-SH1707-EK60 # Name of the Echodataflow Run sonar_model: EK60 # Sonar Model raw_regex: (.*)-?D(?P\w{1,8})-T(?P