diff --git a/CNAME b/CNAME new file mode 100644 index 00000000..aff0750e --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +clouddrift.org diff --git a/_autosummary/clouddrift.adapters.gdp.cast_float64_variables_to_float32.html b/_autosummary/clouddrift.adapters.gdp.cast_float64_variables_to_float32.html new file mode 100644 index 00000000..b6c774c5 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.cast_float64_variables_to_float32.html @@ -0,0 +1,587 @@ + + + + + + + + + + + clouddrift.adapters.gdp.cast_float64_variables_to_float32 — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.cast_float64_variables_to_float32

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.cast_float64_variables_to_float32#

+
+
+clouddrift.adapters.gdp.cast_float64_variables_to_float32(ds: Dataset, variables_to_skip: list[str] = ['time', 'lat', 'lon']) Dataset[source]#
+

Cast all float64 variables except variables_to_skip to float32. +Extra precision from float64 is not needed and takes up memory and disk +space.

+
+

Parameters#

+
+
dsxr.Dataset

Dataset to modify

+
+
variables_to_skiplist[str]

List of variables to skip; default is [“time”, “lat”, “lon”].

+
+
+
+
+

Returns#

+
+
dsxr.Dataset

Modified dataset

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.cut_str.html b/_autosummary/clouddrift.adapters.gdp.cut_str.html new file mode 100644 index 00000000..3d7e8c16 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.cut_str.html @@ -0,0 +1,585 @@ + + + + + + + + + + + clouddrift.adapters.gdp.cut_str — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.cut_str

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.cut_str#

+
+
+clouddrift.adapters.gdp.cut_str(value: str, max_length: int) chararray[source]#
+

Cut a string to a specific length and return it as a numpy chararray.

+
+

Parameters#

+
+
valuestr

String to cut

+
+
max_lengthint

Length of the output

+
+
+
+
+

Returns#

+
+
outnp.chararray

String with max_length characters

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.decode_date.html b/_autosummary/clouddrift.adapters.gdp.decode_date.html new file mode 100644 index 00000000..e245a3ff --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.decode_date.html @@ -0,0 +1,587 @@ + + + + + + + + + + + clouddrift.adapters.gdp.decode_date — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.decode_date

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.decode_date#

+
+
+clouddrift.adapters.gdp.decode_date(t)[source]#
+

The date format is specified as ‘seconds since 1970-01-01 00:00:00’ but +the missing values are stored as -1e+34 which is not supported by the +default parsing mechanism in xarray.

+

This function returns replaced the missing value by NaN and returns a +datetime instance.

+
+

Parameters#

+
+
tarray

Array of time values

+
+
+
+
+

Returns#

+
+
outdatetime

Datetime instance with the missing value replaced by NaN

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.drogue_presence.html b/_autosummary/clouddrift.adapters.gdp.drogue_presence.html new file mode 100644 index 00000000..6401babf --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.drogue_presence.html @@ -0,0 +1,585 @@ + + + + + + + + + + + clouddrift.adapters.gdp.drogue_presence — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.drogue_presence

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.drogue_presence#

+
+
+clouddrift.adapters.gdp.drogue_presence(lost_time, time) bool[source]#
+

Create drogue status from the drogue lost time and the trajectory time.

+
+

Parameters#

+
+
lost_time

Timestamp of the drogue loss (or NaT)

+
+
time

Observation time

+
+
+
+
+

Returns#

+
+
outbool

True if drogues and False otherwise

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.fetch_netcdf.html b/_autosummary/clouddrift.adapters.gdp.fetch_netcdf.html new file mode 100644 index 00000000..346ca435 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.fetch_netcdf.html @@ -0,0 +1,578 @@ + + + + + + + + + + + clouddrift.adapters.gdp.fetch_netcdf — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.fetch_netcdf

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.fetch_netcdf#

+
+
+clouddrift.adapters.gdp.fetch_netcdf(url: str, file: str)[source]#
+

Download and save the file from the given url, if not already downloaded.

+
+

Parameters#

+
+
urlstr

URL from which to download the file.

+
+
filestr

Name of the file to save.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.fill_values.html b/_autosummary/clouddrift.adapters.gdp.fill_values.html new file mode 100644 index 00000000..1562f0f3 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.fill_values.html @@ -0,0 +1,579 @@ + + + + + + + + + + + clouddrift.adapters.gdp.fill_values — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.fill_values

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.fill_values#

+
+
+clouddrift.adapters.gdp.fill_values(var, default=nan)[source]#
+

Change fill values (-1e+34, inf, -inf) in var array to the value +specified by default.

+
+

Parameters#

+
+
vararray

Array to fill

+
+
defaultfloat

Default value to use for fill values

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.get_gdp_metadata.html b/_autosummary/clouddrift.adapters.gdp.get_gdp_metadata.html new file mode 100644 index 00000000..4a14fa6d --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.get_gdp_metadata.html @@ -0,0 +1,576 @@ + + + + + + + + + + + clouddrift.adapters.gdp.get_gdp_metadata — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.get_gdp_metadata

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.get_gdp_metadata#

+
+
+clouddrift.adapters.gdp.get_gdp_metadata() DataFrame[source]#
+

Download and parse GDP metadata and return it as a Pandas DataFrame.

+
+

Returns#

+
+
dfpd.DataFrame

Sorted list of drifters as a pandas DataFrame.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.html b/_autosummary/clouddrift.adapters.gdp.html new file mode 100644 index 00000000..76ee6593 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.html @@ -0,0 +1,824 @@ + + + + + + + + + + + clouddrift.adapters.gdp — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.adapters.gdp#

+

This module provides functions and metadata to convert the Global Drifter +Program (GDP) data to a clouddrift.RaggedArray instance. The functions +defined in this module are common to both hourly (clouddrift.adapters.gdp1h) +and six-hourly (clouddrift.adapters.gdp6h) GDP modules.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

cast_float64_variables_to_float32(ds[, ...])

Cast all float64 variables except variables_to_skip to float32.

cut_str(value, max_length)

Cut a string to a specific length and return it as a numpy chararray.

decode_date(t)

The date format is specified as 'seconds since 1970-01-01 00:00:00' but the missing values are stored as -1e+34 which is not supported by the default parsing mechanism in xarray.

drogue_presence(lost_time, time)

Create drogue status from the drogue lost time and the trajectory time.

fetch_netcdf(url, file)

Download and save the file from the given url, if not already downloaded.

fill_values(var[, default])

Change fill values (-1e+34, inf, -inf) in var array to the value specified by default.

get_gdp_metadata()

Download and parse GDP metadata and return it as a Pandas DataFrame.

order_by_date(df, idx)

From the previously sorted DataFrame of directory files, return the unique set of drifter IDs sorted by their start date (the date of the first quality-controlled data point).

parse_directory_file(filename)

Read a GDP directory file that contains metadata of drifter releases.

rowsize(index, **kwargs)

str_to_float(value[, default])

Convert a string to float, while returning the value of default if the string is not convertible to a float, or if it's a NaN.

+
+
+clouddrift.adapters.gdp.cast_float64_variables_to_float32(ds: Dataset, variables_to_skip: list[str] = ['time', 'lat', 'lon']) Dataset[source]#
+

Cast all float64 variables except variables_to_skip to float32. +Extra precision from float64 is not needed and takes up memory and disk +space.

+
+

Parameters#

+
+
dsxr.Dataset

Dataset to modify

+
+
variables_to_skiplist[str]

List of variables to skip; default is [“time”, “lat”, “lon”].

+
+
+
+
+

Returns#

+
+
dsxr.Dataset

Modified dataset

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.cut_str(value: str, max_length: int) chararray[source]#
+

Cut a string to a specific length and return it as a numpy chararray.

+
+

Parameters#

+
+
valuestr

String to cut

+
+
max_lengthint

Length of the output

+
+
+
+
+

Returns#

+
+
outnp.chararray

String with max_length characters

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.decode_date(t)[source]#
+

The date format is specified as ‘seconds since 1970-01-01 00:00:00’ but +the missing values are stored as -1e+34 which is not supported by the +default parsing mechanism in xarray.

+

This function returns replaced the missing value by NaN and returns a +datetime instance.

+
+

Parameters#

+
+
tarray

Array of time values

+
+
+
+
+

Returns#

+
+
outdatetime

Datetime instance with the missing value replaced by NaN

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.drogue_presence(lost_time, time) bool[source]#
+

Create drogue status from the drogue lost time and the trajectory time.

+
+

Parameters#

+
+
lost_time

Timestamp of the drogue loss (or NaT)

+
+
time

Observation time

+
+
+
+
+

Returns#

+
+
outbool

True if drogues and False otherwise

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.fetch_netcdf(url: str, file: str)[source]#
+

Download and save the file from the given url, if not already downloaded.

+
+

Parameters#

+
+
urlstr

URL from which to download the file.

+
+
filestr

Name of the file to save.

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.fill_values(var, default=nan)[source]#
+

Change fill values (-1e+34, inf, -inf) in var array to the value +specified by default.

+
+

Parameters#

+
+
vararray

Array to fill

+
+
defaultfloat

Default value to use for fill values

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.get_gdp_metadata() DataFrame[source]#
+

Download and parse GDP metadata and return it as a Pandas DataFrame.

+
+

Returns#

+
+
dfpd.DataFrame

Sorted list of drifters as a pandas DataFrame.

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.order_by_date(df: DataFrame, idx: list[int]) ndarray[int][source]#
+

From the previously sorted DataFrame of directory files, return the +unique set of drifter IDs sorted by their start date (the date of the first +quality-controlled data point).

+
+

Parameters#

+
+
idxlist

List of drifters to include in the ragged array

+
+
+
+
+

Returns#

+
+
idxlist

Unique set of drifter IDs sorted by their start date.

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.parse_directory_file(filename: str) DataFrame[source]#
+

Read a GDP directory file that contains metadata of drifter releases.

+
+

Parameters#

+
+
filenamestr

Name of the directory file to parse.

+
+
+
+
+

Returns#

+
+
dfpd.DataFrame

List of drifters from a single directory file as a pandas DataFrame.

+
+
+
+
+ +
+
+clouddrift.adapters.gdp.str_to_float(value: str, default: float = nan) float[source]#
+

Convert a string to float, while returning the value of default if the +string is not convertible to a float, or if it’s a NaN.

+
+

Parameters#

+
+
valuestr

String to convert to float

+
+
defaultfloat

Default value to return if the string is not convertible to float

+
+
+
+
+

Returns#

+
+
outfloat

Float value of the string, or default if the string is not convertible to float.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.order_by_date.html b/_autosummary/clouddrift.adapters.gdp.order_by_date.html new file mode 100644 index 00000000..70888286 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.order_by_date.html @@ -0,0 +1,585 @@ + + + + + + + + + + + clouddrift.adapters.gdp.order_by_date — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.order_by_date

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.order_by_date#

+
+
+clouddrift.adapters.gdp.order_by_date(df: DataFrame, idx: list[int]) ndarray[int][source]#
+

From the previously sorted DataFrame of directory files, return the +unique set of drifter IDs sorted by their start date (the date of the first +quality-controlled data point).

+
+

Parameters#

+
+
idxlist

List of drifters to include in the ragged array

+
+
+
+
+

Returns#

+
+
idxlist

Unique set of drifter IDs sorted by their start date.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.parse_directory_file.html b/_autosummary/clouddrift.adapters.gdp.parse_directory_file.html new file mode 100644 index 00000000..96e7a2c1 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.parse_directory_file.html @@ -0,0 +1,583 @@ + + + + + + + + + + + clouddrift.adapters.gdp.parse_directory_file — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.parse_directory_file

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.parse_directory_file#

+
+
+clouddrift.adapters.gdp.parse_directory_file(filename: str) DataFrame[source]#
+

Read a GDP directory file that contains metadata of drifter releases.

+
+

Parameters#

+
+
filenamestr

Name of the directory file to parse.

+
+
+
+
+

Returns#

+
+
dfpd.DataFrame

List of drifters from a single directory file as a pandas DataFrame.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.rowsize.html b/_autosummary/clouddrift.adapters.gdp.rowsize.html new file mode 100644 index 00000000..41914913 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.rowsize.html @@ -0,0 +1,568 @@ + + + + + + + + + + + clouddrift.adapters.gdp.rowsize — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.rowsize

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.rowsize#

+
+
+clouddrift.adapters.gdp.rowsize(index: int, **kwargs) int[source]#
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp.str_to_float.html b/_autosummary/clouddrift.adapters.gdp.str_to_float.html new file mode 100644 index 00000000..74fd4cf4 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp.str_to_float.html @@ -0,0 +1,586 @@ + + + + + + + + + + + clouddrift.adapters.gdp.str_to_float — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp.str_to_float

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp.str_to_float#

+
+
+clouddrift.adapters.gdp.str_to_float(value: str, default: float = nan) float[source]#
+

Convert a string to float, while returning the value of default if the +string is not convertible to a float, or if it’s a NaN.

+
+

Parameters#

+
+
valuestr

String to convert to float

+
+
defaultfloat

Default value to return if the string is not convertible to float

+
+
+
+
+

Returns#

+
+
outfloat

Float value of the string, or default if the string is not convertible to float.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp1h.download.html b/_autosummary/clouddrift.adapters.gdp1h.download.html new file mode 100644 index 00000000..8173f0fa --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp1h.download.html @@ -0,0 +1,590 @@ + + + + + + + + + + + clouddrift.adapters.gdp1h.download — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp1h.download

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp1h.download#

+
+
+clouddrift.adapters.gdp1h.download(drifter_ids: list = None, n_random_id: int = None, url: str = 'https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/hourly_product/v2.01/', tmp_path: str = None)[source]#
+

Download individual NetCDF files from the AOML server.

+
+

Parameters#

+
+
drifter_idslist

List of drifter to retrieve (Default: all)

+
+
n_random_idint

Randomly select n_random_id drifter IDs to download (Default: None)

+
+
urlstr

URL from which to download the data (Default: GDP_DATA_URL). Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL.

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp on Linux)

+
+
+
+
+

Returns#

+
+
outlist

List of retrieved drifters

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp1h.html b/_autosummary/clouddrift.adapters.gdp1h.html new file mode 100644 index 00000000..d4a57604 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp1h.html @@ -0,0 +1,702 @@ + + + + + + + + + + + clouddrift.adapters.gdp1h — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp1h

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp1h#

+

This module provides functions and metadata that can be used to convert the +hourly Global Drifter Program (GDP) data to a clouddrift.RaggedArray +instance.

+

Functions

+ + + + + + + + + + + + +

download([drifter_ids, n_random_id, url, ...])

Download individual NetCDF files from the AOML server.

preprocess(index, **kwargs)

Extract and preprocess the Lagrangian data and attributes.

to_raggedarray([drifter_ids, n_random_id, ...])

Download and process individual GDP hourly files and return a RaggedArray instance with the data.

+
+
+clouddrift.adapters.gdp1h.download(drifter_ids: list = None, n_random_id: int = None, url: str = 'https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/hourly_product/v2.01/', tmp_path: str = None)[source]#
+

Download individual NetCDF files from the AOML server.

+
+

Parameters#

+
+
drifter_idslist

List of drifter to retrieve (Default: all)

+
+
n_random_idint

Randomly select n_random_id drifter IDs to download (Default: None)

+
+
urlstr

URL from which to download the data (Default: GDP_DATA_URL). Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL.

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp on Linux)

+
+
+
+
+

Returns#

+
+
outlist

List of retrieved drifters

+
+
+
+
+ +
+
+clouddrift.adapters.gdp1h.preprocess(index: int, **kwargs) Dataset[source]#
+

Extract and preprocess the Lagrangian data and attributes.

+

This function takes an identification number that can be used to create a +file or url pattern or select data from a Dataframe. It then preprocesses +the data and returns a clean Xarray Dataset.

+
+

Parameters#

+
+
indexint

Drifter’s identification number

+
+
+
+
+

Returns#

+
+
dsxr.Dataset

Xarray Dataset containing the data and attributes

+
+
+
+
+ +
+
+clouddrift.adapters.gdp1h.to_raggedarray(drifter_ids: list[int] | None = None, n_random_id: int | None = None, url: str | None = 'https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/hourly_product/v2.01/', tmp_path: str | None = None) RaggedArray[source]#
+

Download and process individual GDP hourly files and return a RaggedArray +instance with the data.

+
+

Parameters#

+
+
drifter_idslist[int], optional

List of drifters to retrieve (Default: all)

+
+
n_random_idlist[int], optional

Randomly select n_random_id drifter NetCDF files

+
+
urlstr, optional

URL from which to download the data (Default: GDP_DATA_URL). +Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL.

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp on Linux)

+
+
+
+
+

Returns#

+
+
outRaggedArray

A RaggedArray instance of the requested dataset

+
+
+
+
+

Examples#

+

Invoke to_raggedarray without any arguments to download all drifter data +from the 2.01 GDP feed:

+
>>> from clouddrift.adapters.gdp1h import to_raggedarray
+>>> ra = to_raggedarray()
+
+
+

To download a random sample of 100 drifters, for example for development +or testing, use the n_random_id argument:

+
>>> ra = to_raggedarray(n_random_id=100)
+
+
+

To download a specific list of drifters, use the drifter_ids argument:

+
>>> ra = to_raggedarray(drifter_ids=[44136, 54680, 83463])
+
+
+

To download the experimental 2.01 GDP feed, use the url argument to +specify the experimental feed URL:

+
>>> from clouddrift.adapters.gdp1h import GDP_DATA_URL_EXPERIMENTAL, to_raggedarray
+>>> ra = to_raggedarray(url=GDP_DATA_URL_EXPERIMENTAL)
+
+
+

Finally, to_raggedarray returns a RaggedArray instance which provides +a convenience method to emit a xarray.Dataset instance:

+
>>> ds = ra.to_xarray()
+
+
+

To write the ragged array dataset to a NetCDF file on disk, do

+
>>> ds.to_netcdf("gdp1h.nc", format="NETCDF4")
+
+
+

Alternatively, to write the ragged array to a Parquet file, first create +it as an Awkward Array:

+
>>> arr = ra.to_awkward()
+>>> arr.to_parquet("gdp1h.parquet")
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp1h.preprocess.html b/_autosummary/clouddrift.adapters.gdp1h.preprocess.html new file mode 100644 index 00000000..24f1243d --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp1h.preprocess.html @@ -0,0 +1,586 @@ + + + + + + + + + + + clouddrift.adapters.gdp1h.preprocess — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp1h.preprocess

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp1h.preprocess#

+
+
+clouddrift.adapters.gdp1h.preprocess(index: int, **kwargs) Dataset[source]#
+

Extract and preprocess the Lagrangian data and attributes.

+

This function takes an identification number that can be used to create a +file or url pattern or select data from a Dataframe. It then preprocesses +the data and returns a clean Xarray Dataset.

+
+

Parameters#

+
+
indexint

Drifter’s identification number

+
+
+
+
+

Returns#

+
+
dsxr.Dataset

Xarray Dataset containing the data and attributes

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp1h.to_raggedarray.html b/_autosummary/clouddrift.adapters.gdp1h.to_raggedarray.html new file mode 100644 index 00000000..bb3ad5dd --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp1h.to_raggedarray.html @@ -0,0 +1,631 @@ + + + + + + + + + + + clouddrift.adapters.gdp1h.to_raggedarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp1h.to_raggedarray

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp1h.to_raggedarray#

+
+
+clouddrift.adapters.gdp1h.to_raggedarray(drifter_ids: list[int] | None = None, n_random_id: int | None = None, url: str | None = 'https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/hourly_product/v2.01/', tmp_path: str | None = None) RaggedArray[source]#
+

Download and process individual GDP hourly files and return a RaggedArray +instance with the data.

+
+

Parameters#

+
+
drifter_idslist[int], optional

List of drifters to retrieve (Default: all)

+
+
n_random_idlist[int], optional

Randomly select n_random_id drifter NetCDF files

+
+
urlstr, optional

URL from which to download the data (Default: GDP_DATA_URL). +Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL.

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp on Linux)

+
+
+
+
+

Returns#

+
+
outRaggedArray

A RaggedArray instance of the requested dataset

+
+
+
+
+

Examples#

+

Invoke to_raggedarray without any arguments to download all drifter data +from the 2.01 GDP feed:

+
>>> from clouddrift.adapters.gdp1h import to_raggedarray
+>>> ra = to_raggedarray()
+
+
+

To download a random sample of 100 drifters, for example for development +or testing, use the n_random_id argument:

+
>>> ra = to_raggedarray(n_random_id=100)
+
+
+

To download a specific list of drifters, use the drifter_ids argument:

+
>>> ra = to_raggedarray(drifter_ids=[44136, 54680, 83463])
+
+
+

To download the experimental 2.01 GDP feed, use the url argument to +specify the experimental feed URL:

+
>>> from clouddrift.adapters.gdp1h import GDP_DATA_URL_EXPERIMENTAL, to_raggedarray
+>>> ra = to_raggedarray(url=GDP_DATA_URL_EXPERIMENTAL)
+
+
+

Finally, to_raggedarray returns a RaggedArray instance which provides +a convenience method to emit a xarray.Dataset instance:

+
>>> ds = ra.to_xarray()
+
+
+

To write the ragged array dataset to a NetCDF file on disk, do

+
>>> ds.to_netcdf("gdp1h.nc", format="NETCDF4")
+
+
+

Alternatively, to write the ragged array to a Parquet file, first create +it as an Awkward Array:

+
>>> arr = ra.to_awkward()
+>>> arr.to_parquet("gdp1h.parquet")
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp6h.download.html b/_autosummary/clouddrift.adapters.gdp6h.download.html new file mode 100644 index 00000000..d3eec6c2 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp6h.download.html @@ -0,0 +1,590 @@ + + + + + + + + + + + clouddrift.adapters.gdp6h.download — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp6h.download

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp6h.download#

+
+
+clouddrift.adapters.gdp6h.download(drifter_ids: list = None, n_random_id: int = None, url: str = 'https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/6h/', tmp_path: str = '/tmp/clouddrift/gdp6h')[source]#
+

Download individual NetCDF files from the AOML server.

+
+

Parameters#

+
+
drifter_idslist

List of drifter to retrieve (Default: all)

+
+
n_random_idint

Randomly select n_random_id drifter IDs to download (Default: None)

+
+
urlstr

URL from which to download the data (Default: GDP_DATA_URL). Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL.

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp6h on Linux)

+
+
+
+
+

Returns#

+
+
outlist

List of retrieved drifters

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp6h.html b/_autosummary/clouddrift.adapters.gdp6h.html new file mode 100644 index 00000000..d32789b4 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp6h.html @@ -0,0 +1,693 @@ + + + + + + + + + + + clouddrift.adapters.gdp6h — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp6h

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp6h#

+

This module provides functions and metadata that can be used to convert the +6-hourly Global Drifter Program (GDP) data to a clouddrift.RaggedArray +instance.

+

Functions

+ + + + + + + + + + + + +

download([drifter_ids, n_random_id, url, ...])

Download individual NetCDF files from the AOML server.

preprocess(index, **kwargs)

Extract and preprocess the Lagrangian data and attributes.

to_raggedarray([drifter_ids, n_random_id, ...])

Download and process individual GDP 6-hourly files and return a RaggedArray instance with the data.

+
+
+clouddrift.adapters.gdp6h.download(drifter_ids: list = None, n_random_id: int = None, url: str = 'https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/6h/', tmp_path: str = '/tmp/clouddrift/gdp6h')[source]#
+

Download individual NetCDF files from the AOML server.

+
+

Parameters#

+
+
drifter_idslist

List of drifter to retrieve (Default: all)

+
+
n_random_idint

Randomly select n_random_id drifter IDs to download (Default: None)

+
+
urlstr

URL from which to download the data (Default: GDP_DATA_URL). Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL.

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp6h on Linux)

+
+
+
+
+

Returns#

+
+
outlist

List of retrieved drifters

+
+
+
+
+ +
+
+clouddrift.adapters.gdp6h.preprocess(index: int, **kwargs) Dataset[source]#
+

Extract and preprocess the Lagrangian data and attributes.

+

This function takes an identification number that can be used to create a +file or url pattern or select data from a Dataframe. It then preprocesses +the data and returns a clean Xarray Dataset.

+
+

Parameters#

+
+
indexint

Drifter’s identification number

+
+
+
+
+

Returns#

+
+
dsxr.Dataset

Xarray Dataset containing the data and attributes

+
+
+
+
+ +
+
+clouddrift.adapters.gdp6h.to_raggedarray(drifter_ids: list[int] | None = None, n_random_id: int | None = None, tmp_path: str | None = '/tmp/clouddrift/gdp6h') RaggedArray[source]#
+

Download and process individual GDP 6-hourly files and return a +RaggedArray instance with the data.

+
+

Parameters#

+
+
drifter_idslist[int], optional

List of drifters to retrieve (Default: all)

+
+
n_random_idlist[int], optional

Randomly select n_random_id drifter NetCDF files

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp6h on Linux)

+
+
+
+
+

Returns#

+
+
outRaggedArray

A RaggedArray instance of the requested dataset

+
+
+
+
+

Examples#

+

Invoke to_raggedarray without any arguments to download all drifter data +from the 6-hourly GDP feed:

+
>>> from clouddrift.adapters.gdp6h import to_raggedarray
+>>> ra = to_raggedarray()
+
+
+

To download a random sample of 100 drifters, for example for development +or testing, use the n_random_id argument:

+
>>> ra = to_raggedarray(n_random_id=100)
+
+
+

To download a specific list of drifters, use the drifter_ids argument:

+
>>> ra = to_raggedarray(drifter_ids=[54375, 114956, 126934])
+
+
+

Finally, to_raggedarray returns a RaggedArray instance which provides +a convenience method to emit a xarray.Dataset instance:

+
>>> ds = ra.to_xarray()
+
+
+

To write the ragged array dataset to a NetCDF file on disk, do

+
>>> ds.to_netcdf("gdp6h.nc", format="NETCDF4")
+
+
+

Alternatively, to write the ragged array to a Parquet file, first create +it as an Awkward Array:

+
>>> arr = ra.to_awkward()
+>>> arr.to_parquet("gdp6h.parquet")
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp6h.preprocess.html b/_autosummary/clouddrift.adapters.gdp6h.preprocess.html new file mode 100644 index 00000000..d02ec5a0 --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp6h.preprocess.html @@ -0,0 +1,586 @@ + + + + + + + + + + + clouddrift.adapters.gdp6h.preprocess — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp6h.preprocess

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp6h.preprocess#

+
+
+clouddrift.adapters.gdp6h.preprocess(index: int, **kwargs) Dataset[source]#
+

Extract and preprocess the Lagrangian data and attributes.

+

This function takes an identification number that can be used to create a +file or url pattern or select data from a Dataframe. It then preprocesses +the data and returns a clean Xarray Dataset.

+
+

Parameters#

+
+
indexint

Drifter’s identification number

+
+
+
+
+

Returns#

+
+
dsxr.Dataset

Xarray Dataset containing the data and attributes

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.gdp6h.to_raggedarray.html b/_autosummary/clouddrift.adapters.gdp6h.to_raggedarray.html new file mode 100644 index 00000000..fb3d3b2f --- /dev/null +++ b/_autosummary/clouddrift.adapters.gdp6h.to_raggedarray.html @@ -0,0 +1,622 @@ + + + + + + + + + + + clouddrift.adapters.gdp6h.to_raggedarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.gdp6h.to_raggedarray

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.gdp6h.to_raggedarray#

+
+
+clouddrift.adapters.gdp6h.to_raggedarray(drifter_ids: list[int] | None = None, n_random_id: int | None = None, tmp_path: str | None = '/tmp/clouddrift/gdp6h') RaggedArray[source]#
+

Download and process individual GDP 6-hourly files and return a +RaggedArray instance with the data.

+
+

Parameters#

+
+
drifter_idslist[int], optional

List of drifters to retrieve (Default: all)

+
+
n_random_idlist[int], optional

Randomly select n_random_id drifter NetCDF files

+
+
tmp_pathstr, optional

Path to the directory where the individual NetCDF files are stored +(default varies depending on operating system; /tmp/clouddrift/gdp6h on Linux)

+
+
+
+
+

Returns#

+
+
outRaggedArray

A RaggedArray instance of the requested dataset

+
+
+
+
+

Examples#

+

Invoke to_raggedarray without any arguments to download all drifter data +from the 6-hourly GDP feed:

+
>>> from clouddrift.adapters.gdp6h import to_raggedarray
+>>> ra = to_raggedarray()
+
+
+

To download a random sample of 100 drifters, for example for development +or testing, use the n_random_id argument:

+
>>> ra = to_raggedarray(n_random_id=100)
+
+
+

To download a specific list of drifters, use the drifter_ids argument:

+
>>> ra = to_raggedarray(drifter_ids=[54375, 114956, 126934])
+
+
+

Finally, to_raggedarray returns a RaggedArray instance which provides +a convenience method to emit a xarray.Dataset instance:

+
>>> ds = ra.to_xarray()
+
+
+

To write the ragged array dataset to a NetCDF file on disk, do

+
>>> ds.to_netcdf("gdp6h.nc", format="NETCDF4")
+
+
+

Alternatively, to write the ragged array to a Parquet file, first create +it as an Awkward Array:

+
>>> arr = ra.to_awkward()
+>>> arr.to_parquet("gdp6h.parquet")
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.glad.get_dataframe.html b/_autosummary/clouddrift.adapters.glad.get_dataframe.html new file mode 100644 index 00000000..d7439d31 --- /dev/null +++ b/_autosummary/clouddrift.adapters.glad.get_dataframe.html @@ -0,0 +1,569 @@ + + + + + + + + + + + clouddrift.adapters.glad.get_dataframe — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.glad.get_dataframe

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.glad.get_dataframe#

+
+
+clouddrift.adapters.glad.get_dataframe() DataFrame[source]#
+

Get the GLAD dataset as a pandas DataFrame.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.glad.html b/_autosummary/clouddrift.adapters.glad.html new file mode 100644 index 00000000..6895d6c0 --- /dev/null +++ b/_autosummary/clouddrift.adapters.glad.html @@ -0,0 +1,606 @@ + + + + + + + + + + + clouddrift.adapters.glad — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.glad

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.glad#

+

This module defines functions used to adapt the Grand LAgrangian Deployment +(GLAD) dataset as a ragged-array Xarray Dataset.

+

The dataset and its description are hosted at https://doi.org/10.7266/N7VD6WC8.

+
+

Example#

+
>>> from clouddrift.adapters import glad
+>>> ds = glad.to_xarray()
+
+
+
+
+

Reference#

+

Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012. Distributed by: Gulf of Mexico Research Initiative Information and Data Cooperative (GRIIDC), Harte Research Institute, Texas A&M University–Corpus Christi. doi:10.7266/N7VD6WC8

+
+

Functions

+ + + + + + + + + +

get_dataframe()

Get the GLAD dataset as a pandas DataFrame.

to_xarray()

Return the GLAD data as a ragged-array Xarray Dataset.

+
+
+clouddrift.adapters.glad.get_dataframe() DataFrame[source]#
+

Get the GLAD dataset as a pandas DataFrame.

+
+ +
+
+clouddrift.adapters.glad.to_xarray() Dataset[source]#
+

Return the GLAD data as a ragged-array Xarray Dataset.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.glad.to_xarray.html b/_autosummary/clouddrift.adapters.glad.to_xarray.html new file mode 100644 index 00000000..4b991930 --- /dev/null +++ b/_autosummary/clouddrift.adapters.glad.to_xarray.html @@ -0,0 +1,569 @@ + + + + + + + + + + + clouddrift.adapters.glad.to_xarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.glad.to_xarray

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.glad.to_xarray#

+
+
+clouddrift.adapters.glad.to_xarray() Dataset[source]#
+

Return the GLAD data as a ragged-array Xarray Dataset.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.mosaic.get_dataframes.html b/_autosummary/clouddrift.adapters.mosaic.get_dataframes.html new file mode 100644 index 00000000..7d6b0538 --- /dev/null +++ b/_autosummary/clouddrift.adapters.mosaic.get_dataframes.html @@ -0,0 +1,570 @@ + + + + + + + + + + + clouddrift.adapters.mosaic.get_dataframes — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.mosaic.get_dataframes

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.mosaic.get_dataframes#

+
+
+clouddrift.adapters.mosaic.get_dataframes() tuple[DataFrame, DataFrame][source]#
+

Get the MOSAiC data (obs dimension in the target Dataset) and metadata +(traj dimension in the target dataset ) as pandas DataFrames.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.mosaic.get_file_urls.html b/_autosummary/clouddrift.adapters.mosaic.get_file_urls.html new file mode 100644 index 00000000..d767094c --- /dev/null +++ b/_autosummary/clouddrift.adapters.mosaic.get_file_urls.html @@ -0,0 +1,569 @@ + + + + + + + + + + + clouddrift.adapters.mosaic.get_file_urls — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.mosaic.get_file_urls

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.mosaic.get_file_urls#

+
+
+clouddrift.adapters.mosaic.get_file_urls(xml: str) list[str][source]#
+

Pass the MOSAiC XML string and return the list of filenames and URLs.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.mosaic.get_repository_metadata.html b/_autosummary/clouddrift.adapters.mosaic.get_repository_metadata.html new file mode 100644 index 00000000..386b8118 --- /dev/null +++ b/_autosummary/clouddrift.adapters.mosaic.get_repository_metadata.html @@ -0,0 +1,570 @@ + + + + + + + + + + + clouddrift.adapters.mosaic.get_repository_metadata — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.mosaic.get_repository_metadata

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.mosaic.get_repository_metadata#

+
+
+clouddrift.adapters.mosaic.get_repository_metadata() str[source]#
+

Get the MOSAiC repository metadata as an XML string. +Pass this string to other get_* functions to extract the data you need.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.mosaic.html b/_autosummary/clouddrift.adapters.mosaic.html new file mode 100644 index 00000000..509d910a --- /dev/null +++ b/_autosummary/clouddrift.adapters.mosaic.html @@ -0,0 +1,632 @@ + + + + + + + + + + + clouddrift.adapters.mosaic — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.mosaic

+ +
+ +
+
+ + + + +
+ +
+

clouddrift.adapters.mosaic#

+

This module defines functions used to adapt the MOSAiC sea-ice drift dataset as +a ragged-array dataset.

+

The dataset is hosted at https://doi.org/10.18739/A2KP7TS83.

+

Reference: Angela Bliss, Jennifer Hutchings, Philip Anderson, Philipp Anhaus, +Hans Jakob Belter, Jørgen Berge, Vladimir Bessonov, Bin Cheng, Sylvia Cole, +Dave Costa, Finlo Cottier, Christopher J Cox, Pedro R De La Torre, Dmitry V Divine, +Gilbert Emzivat, Ying-Chih Fang, Steven Fons, Michael Gallagher, Maxime Geoffrey, +Mats A Granskog, … Guangyu Zuo. (2022). Sea ice drift tracks from the Distributed +Network of autonomous buoys deployed during the Multidisciplinary drifting Observatory +for the Study of Arctic Climate (MOSAiC) expedition 2019 - 2021. Arctic Data Center. +doi:10.18739/A2KP7TS83.

+
+

Example#

+
>>> from clouddrift.adapters import mosaic
+>>> ds = mosaic.to_xarray()
+
+
+
+

Functions

+ + + + + + + + + + + + + + + +

get_dataframes()

Get the MOSAiC data (obs dimension in the target Dataset) and metadata (traj dimension in the target dataset ) as pandas DataFrames.

get_file_urls(xml)

Pass the MOSAiC XML string and return the list of filenames and URLs.

get_repository_metadata()

Get the MOSAiC repository metadata as an XML string.

to_xarray()

Return the MOSAiC data as an ragged-array Xarray Dataset.

+
+
+clouddrift.adapters.mosaic.get_dataframes() tuple[DataFrame, DataFrame][source]#
+

Get the MOSAiC data (obs dimension in the target Dataset) and metadata +(traj dimension in the target dataset ) as pandas DataFrames.

+
+ +
+
+clouddrift.adapters.mosaic.get_file_urls(xml: str) list[str][source]#
+

Pass the MOSAiC XML string and return the list of filenames and URLs.

+
+ +
+
+clouddrift.adapters.mosaic.get_repository_metadata() str[source]#
+

Get the MOSAiC repository metadata as an XML string. +Pass this string to other get_* functions to extract the data you need.

+
+ +
+
+clouddrift.adapters.mosaic.to_xarray()[source]#
+

Return the MOSAiC data as an ragged-array Xarray Dataset.

+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.mosaic.to_xarray.html b/_autosummary/clouddrift.adapters.mosaic.to_xarray.html new file mode 100644 index 00000000..8b987951 --- /dev/null +++ b/_autosummary/clouddrift.adapters.mosaic.to_xarray.html @@ -0,0 +1,569 @@ + + + + + + + + + + + clouddrift.adapters.mosaic.to_xarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.mosaic.to_xarray

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.mosaic.to_xarray#

+
+
+clouddrift.adapters.mosaic.to_xarray()[source]#
+

Return the MOSAiC data as an ragged-array Xarray Dataset.

+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.subsurface_floats.download.html b/_autosummary/clouddrift.adapters.subsurface_floats.download.html new file mode 100644 index 00000000..ab31261f --- /dev/null +++ b/_autosummary/clouddrift.adapters.subsurface_floats.download.html @@ -0,0 +1,568 @@ + + + + + + + + + + + clouddrift.adapters.subsurface_floats.download — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.subsurface_floats.download

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.subsurface_floats.download#

+
+
+clouddrift.adapters.subsurface_floats.download(file: str)[source]#
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.subsurface_floats.html b/_autosummary/clouddrift.adapters.subsurface_floats.html new file mode 100644 index 00000000..c93626f0 --- /dev/null +++ b/_autosummary/clouddrift.adapters.subsurface_floats.html @@ -0,0 +1,585 @@ + + + + + + + + + + + clouddrift.adapters.subsurface_floats — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.subsurface_floats

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.subsurface_floats#

+

This module defines functions to adapt as a ragged-array dataset a collection of data +from 2193 trajectories of SOFAR, APEX, and RAFOS subsurface floats from 52 experiments +across the world between 1989 and 2015.

+

The dataset is hosted at https://www.aoml.noaa.gov/phod/float_traj/index.php

+
+

Example#

+
>>> from clouddrift.adapters import subsurface_floats
+>>> ds = subsurface_floats.to_xarray()
+
+
+
+

Functions

+ + + + + + + + + +

download(file)

to_xarray([tmp_path])

+
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.adapters.subsurface_floats.to_xarray.html b/_autosummary/clouddrift.adapters.subsurface_floats.to_xarray.html new file mode 100644 index 00000000..3b381687 --- /dev/null +++ b/_autosummary/clouddrift.adapters.subsurface_floats.to_xarray.html @@ -0,0 +1,568 @@ + + + + + + + + + + + clouddrift.adapters.subsurface_floats.to_xarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.adapters.subsurface_floats.to_xarray

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.adapters.subsurface_floats.to_xarray#

+
+
+clouddrift.adapters.subsurface_floats.to_xarray(tmp_path: str = None)[source]#
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.andro.html b/_autosummary/clouddrift.datasets.andro.html new file mode 100644 index 00000000..ab2eeee3 --- /dev/null +++ b/_autosummary/clouddrift.datasets.andro.html @@ -0,0 +1,633 @@ + + + + + + + + + + + clouddrift.datasets.andro — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.andro

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.andro#

+
+
+clouddrift.datasets.andro(decode_times: bool = True) Dataset[source]#
+

Returns the ANDRO as a ragged array Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access. The upstream +data is available at https://www.seanoe.org/data/00360/47077/.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

ANDRO dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import andro
+>>> ds = andro()
+>>> ds
+<xarray.Dataset>
+Dimensions:     (obs: 1360753, traj: 9996)
+Coordinates:
+    time_d      (obs) datetime64[ns] ...
+    time_s      (obs) datetime64[ns] ...
+    time_lp     (obs) datetime64[ns] ...
+    time_lc     (obs) datetime64[ns] ...
+    id          (traj) int64 ...
+Dimensions without coordinates: obs, traj
+Data variables: (12/33)
+    lon_d       (obs) float64 ...
+    lat_d       (obs) float64 ...
+    pres_d      (obs) float32 ...
+    temp_d      (obs) float32 ...
+    sal_d       (obs) float32 ...
+    ve_d        (obs) float32 ...
+    ...          ...
+    lon_lc      (obs) float64 ...
+    lat_lc      (obs) float64 ...
+    surf_fix    (obs) int64 ...
+    cycle       (obs) int64 ...
+    profile_id  (obs) float32 ...
+    rowsize     (traj) int64 ...
+Attributes:
+    title:           ANDRO: An Argo-based deep displacement dataset
+    history:         2022-03-04
+    date_created:    2023-12-08T00:52:00.937120
+    publisher_name:  SEANOE (SEA scieNtific Open data Edition)
+    publisher_url:   https://www.seanoe.org/data/00360/47077/
+    license:         freely available
+
+
+
+
+

Reference#

+

Ollitrault Michel, Rannou Philippe, Brion Emilie, Cabanes Cecile, Piron Anne, Reverdin Gilles, +Kolodziejczyk Nicolas (2022). ANDRO: An Argo-based deep displacement dataset. +SEANOE. https://doi.org/10.17882/47077

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.gdp1h.html b/_autosummary/clouddrift.datasets.gdp1h.html new file mode 100644 index 00000000..40209117 --- /dev/null +++ b/_autosummary/clouddrift.datasets.gdp1h.html @@ -0,0 +1,635 @@ + + + + + + + + + + + clouddrift.datasets.gdp1h — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.gdp1h

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.gdp1h#

+
+
+clouddrift.datasets.gdp1h(decode_times: bool = True) Dataset[source]#
+

Returns the latest version of the NOAA Global Drifter Program (GDP) hourly +dataset as a ragged array Xarray dataset.

+

The data is accessed from zarr archive hosted on a public AWS S3 bucket accessible at +https://registry.opendata.aws/noaa-oar-hourly-gdp/. Original data source from NOAA NCEI +is https://doi.org/10.25921/x46c-3620).

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

Hourly GDP dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import gdp1h
+>>> ds = gdp1h()
+>>> ds
+<xarray.Dataset>
+Dimensions:                (traj: 19396, obs: 197214787)
+Coordinates:
+    id                     (traj) int64 ...
+    time                   (obs) datetime64[ns] ...
+Dimensions without coordinates: traj, obs
+Data variables: (12/60)
+    BuoyTypeManufacturer   (traj) |S20 ...
+    BuoyTypeSensorArray    (traj) |S20 ...
+    CurrentProgram         (traj) float32 ...
+    DeployingCountry       (traj) |S20 ...
+    DeployingShip          (traj) |S20 ...
+    DeploymentComments     (traj) |S20 ...
+    ...                     ...
+    start_lat              (traj) float32 ...
+    start_lon              (traj) float32 ...
+    typebuoy               (traj) |S10 ...
+    typedeath              (traj) int8 ...
+    ve                     (obs) float32 ...
+    vn                     (obs) float32 ...
+Attributes: (12/16)
+    Conventions:       CF-1.6
+    acknowledgement:   Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurio...
+    contributor_name:  NOAA Global Drifter Program
+    contributor_role:  Data Acquisition Center
+    date_created:      2023-09-08T17:05:12.130123
+    doi:               10.25921/x46c-3620
+    ...                ...
+    processing_level:  Level 2 QC by GDP drifter DAC
+    publisher_email:   aoml.dftr@noaa.gov
+    publisher_name:    GDP Drifter DAC
+    publisher_url:     https://www.aoml.noaa.gov/phod/gdp
+    summary:           Global Drifter Program hourly data
+    title:             Global Drifter Program hourly drifting buoy collection
+
+
+
+
+

See Also#

+

gdp6h()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.gdp6h.html b/_autosummary/clouddrift.datasets.gdp6h.html new file mode 100644 index 00000000..33de4bb0 --- /dev/null +++ b/_autosummary/clouddrift.datasets.gdp6h.html @@ -0,0 +1,639 @@ + + + + + + + + + + + clouddrift.datasets.gdp6h — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.gdp6h

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.gdp6h#

+
+
+clouddrift.datasets.gdp6h(decode_times: bool = True) Dataset[source]#
+

Returns the NOAA Global Drifter Program (GDP) 6-hourly dataset as a ragged array +Xarray dataset.

+

The data is accessed from a public HTTPS server at NOAA’s Atlantic +Oceanographic and Meteorological Laboratory (AOML) accessible at +https://www.aoml.noaa.gov/phod/gdp/index.php. It should be noted that the data loading +method is platform dependent. Linux and Darwin (macOS) machines lazy load the datasets leveraging the +byte-range feature of the netCDF-c library (dataset loading engine used by xarray). +Windows machines download the entire dataset into a memory buffer which is then passed +to xarray.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

6-hourly GDP dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import gdp6h
+>>> ds = gdp6h()
+>>> ds
+<xarray.Dataset>
+Dimensions:                (traj: 27647, obs: 46535470)
+Coordinates:
+    ids                    (obs) int64 7702204 7702204 ... 300234061198840
+    time                   (obs) float64 2.879e+08 2.879e+08 ... 1.697e+09
+Dimensions without coordinates: traj, obs
+Data variables: (12/50)
+    ID                     (traj) int64 7702204 7702201 ... 300234061198840
+    rowsize                (traj) int32 92 1747 1943 1385 1819 ... 54 53 51 28
+    WMO                    (traj) int32 0 0 0 0 ... 6203890 6203888 4101885
+    expno                  (traj) int32 40 40 40 40 ... 31412 21421 21421 31412
+    deploy_date            (traj) float32 2.878e+08 2.878e+08 ... 1.696e+09 nan
+    deploy_lat             (traj) float32 -7.798 -4.9 -3.18 ... 9.9 11.9 nan
+    ...                     ...
+    vn                     (obs) float32 nan 0.1056 0.04974 ... 0.7384 nan
+    temp                   (obs) float32 28.35 28.3 nan ... 29.08 28.97 28.92
+    err_lat                (obs) float32 0.009737 0.007097 ... 0.001659 0.001687
+    err_lon                (obs) float32 0.00614 0.004583 ... 0.002471 0.002545
+    err_temp               (obs) float32 0.08666 0.08757 ... 0.03665 0.03665
+    drogue_status          (obs) bool False False False False ... True True True
+Attributes: (12/18)
+    title:                Global Drifter Program drifting buoy collection
+    history:              version September 2023. Metadata from dirall.dat an...
+    Conventions:          CF-1.6
+    time_coverage_start:  1979-02-15:00:00:00Z
+    time_coverage_end:    2023-10-18:18:00:00Z
+    date_created:         2023-12-22T17:50:22.242943
+    ...                   ...
+    contributor_name:     NOAA Global Drifter Program
+    contributor_role:     Data Acquisition Center
+    institution:          NOAA Atlantic Oceanographic and Meteorological Labo...
+    acknowledgement:      Lumpkin, Rick; Centurioni, Luca (2019). NOAA Global...
+    summary:              Global Drifter Program six-hourly data
+    doi:                  10.25921/7ntx-z961
+
+
+
+
+

See Also#

+

gdp1h()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.glad.html b/_autosummary/clouddrift.datasets.glad.html new file mode 100644 index 00000000..a806b836 --- /dev/null +++ b/_autosummary/clouddrift.datasets.glad.html @@ -0,0 +1,621 @@ + + + + + + + + + + + clouddrift.datasets.glad — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.glad

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.glad#

+
+
+clouddrift.datasets.glad(decode_times: bool = True) Dataset[source]#
+

Returns the Grand LAgrangian Deployment (GLAD) dataset as a ragged array +Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access.

+

The upstream data is available at https://doi.org/10.7266/N7VD6WC8.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

GLAD dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import glad
+>>> ds = glad()
+>>> ds
+<xarray.Dataset>
+Dimensions:         (obs: 1602883, traj: 297)
+Coordinates:
+  time            (obs) datetime64[ns] ...
+  id              (traj) object ...
+Data variables:
+  latitude        (obs) float32 ...
+  longitude       (obs) float32 ...
+  position_error  (obs) float32 ...
+  u               (obs) float32 ...
+  v               (obs) float32 ...
+  velocity_error  (obs) float32 ...
+  rowsize         (traj) int64 ...
+Attributes:
+  title:        GLAD experiment CODE-style drifter trajectories (low-pass f...
+  institution:  Consortium for Advanced Research on Transport of Hydrocarbo...
+  source:       CODE-style drifters
+  history:      Downloaded from https://data.gulfresearchinitiative.org/dat...
+  references:   Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter t...
+
+
+
+
+

Reference#

+

Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012. Distributed by: Gulf of Mexico Research Initiative Information and Data Cooperative (GRIIDC), Harte Research Institute, Texas A&M University–Corpus Christi. doi:10.7266/N7VD6WC8

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.html b/_autosummary/clouddrift.datasets.html new file mode 100644 index 00000000..5e1b263a --- /dev/null +++ b/_autosummary/clouddrift.datasets.html @@ -0,0 +1,1160 @@ + + + + + + + + + + + clouddrift.datasets — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets

+ +
+ +
+
+ + + + +
+ +
+

clouddrift.datasets#

+

This module provides functions to easily access ragged array datasets. If the datasets are +not accessed via cloud storage platforms or are not found on the local filesystem, +they will be downloaded from their upstream repositories and stored for later access +(~/.clouddrift for UNIX-based systems).

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

andro([decode_times])

Returns the ANDRO as a ragged array Xarray dataset.

gdp1h([decode_times])

Returns the latest version of the NOAA Global Drifter Program (GDP) hourly dataset as a ragged array Xarray dataset.

gdp6h([decode_times])

Returns the NOAA Global Drifter Program (GDP) 6-hourly dataset as a ragged array Xarray dataset.

glad([decode_times])

Returns the Grand LAgrangian Deployment (GLAD) dataset as a ragged array Xarray dataset.

mosaic([decode_times])

Returns the MOSAiC sea-ice drift dataset as a ragged array Xarray dataset.

spotters([decode_times])

Returns the Sofar Ocean Spotter drifters ragged array dataset as an Xarray dataset.

subsurface_floats([decode_times])

Returns the subsurface floats dataset as a ragged array Xarray dataset.

yomaha([decode_times])

Returns the YoMaHa dataset as a ragged array Xarray dataset.

+
+
+clouddrift.datasets.andro(decode_times: bool = True) Dataset[source]#
+

Returns the ANDRO as a ragged array Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access. The upstream +data is available at https://www.seanoe.org/data/00360/47077/.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

ANDRO dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import andro
+>>> ds = andro()
+>>> ds
+<xarray.Dataset>
+Dimensions:     (obs: 1360753, traj: 9996)
+Coordinates:
+    time_d      (obs) datetime64[ns] ...
+    time_s      (obs) datetime64[ns] ...
+    time_lp     (obs) datetime64[ns] ...
+    time_lc     (obs) datetime64[ns] ...
+    id          (traj) int64 ...
+Dimensions without coordinates: obs, traj
+Data variables: (12/33)
+    lon_d       (obs) float64 ...
+    lat_d       (obs) float64 ...
+    pres_d      (obs) float32 ...
+    temp_d      (obs) float32 ...
+    sal_d       (obs) float32 ...
+    ve_d        (obs) float32 ...
+    ...          ...
+    lon_lc      (obs) float64 ...
+    lat_lc      (obs) float64 ...
+    surf_fix    (obs) int64 ...
+    cycle       (obs) int64 ...
+    profile_id  (obs) float32 ...
+    rowsize     (traj) int64 ...
+Attributes:
+    title:           ANDRO: An Argo-based deep displacement dataset
+    history:         2022-03-04
+    date_created:    2023-12-08T00:52:00.937120
+    publisher_name:  SEANOE (SEA scieNtific Open data Edition)
+    publisher_url:   https://www.seanoe.org/data/00360/47077/
+    license:         freely available
+
+
+
+
+

Reference#

+

Ollitrault Michel, Rannou Philippe, Brion Emilie, Cabanes Cecile, Piron Anne, Reverdin Gilles, +Kolodziejczyk Nicolas (2022). ANDRO: An Argo-based deep displacement dataset. +SEANOE. https://doi.org/10.17882/47077

+
+
+ +
+
+clouddrift.datasets.gdp1h(decode_times: bool = True) Dataset[source]#
+

Returns the latest version of the NOAA Global Drifter Program (GDP) hourly +dataset as a ragged array Xarray dataset.

+

The data is accessed from zarr archive hosted on a public AWS S3 bucket accessible at +https://registry.opendata.aws/noaa-oar-hourly-gdp/. Original data source from NOAA NCEI +is https://doi.org/10.25921/x46c-3620).

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

Hourly GDP dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import gdp1h
+>>> ds = gdp1h()
+>>> ds
+<xarray.Dataset>
+Dimensions:                (traj: 19396, obs: 197214787)
+Coordinates:
+    id                     (traj) int64 ...
+    time                   (obs) datetime64[ns] ...
+Dimensions without coordinates: traj, obs
+Data variables: (12/60)
+    BuoyTypeManufacturer   (traj) |S20 ...
+    BuoyTypeSensorArray    (traj) |S20 ...
+    CurrentProgram         (traj) float32 ...
+    DeployingCountry       (traj) |S20 ...
+    DeployingShip          (traj) |S20 ...
+    DeploymentComments     (traj) |S20 ...
+    ...                     ...
+    start_lat              (traj) float32 ...
+    start_lon              (traj) float32 ...
+    typebuoy               (traj) |S10 ...
+    typedeath              (traj) int8 ...
+    ve                     (obs) float32 ...
+    vn                     (obs) float32 ...
+Attributes: (12/16)
+    Conventions:       CF-1.6
+    acknowledgement:   Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurio...
+    contributor_name:  NOAA Global Drifter Program
+    contributor_role:  Data Acquisition Center
+    date_created:      2023-09-08T17:05:12.130123
+    doi:               10.25921/x46c-3620
+    ...                ...
+    processing_level:  Level 2 QC by GDP drifter DAC
+    publisher_email:   aoml.dftr@noaa.gov
+    publisher_name:    GDP Drifter DAC
+    publisher_url:     https://www.aoml.noaa.gov/phod/gdp
+    summary:           Global Drifter Program hourly data
+    title:             Global Drifter Program hourly drifting buoy collection
+
+
+
+
+

See Also#

+

gdp6h()

+
+
+ +
+
+clouddrift.datasets.gdp6h(decode_times: bool = True) Dataset[source]#
+

Returns the NOAA Global Drifter Program (GDP) 6-hourly dataset as a ragged array +Xarray dataset.

+

The data is accessed from a public HTTPS server at NOAA’s Atlantic +Oceanographic and Meteorological Laboratory (AOML) accessible at +https://www.aoml.noaa.gov/phod/gdp/index.php. It should be noted that the data loading +method is platform dependent. Linux and Darwin (macOS) machines lazy load the datasets leveraging the +byte-range feature of the netCDF-c library (dataset loading engine used by xarray). +Windows machines download the entire dataset into a memory buffer which is then passed +to xarray.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

6-hourly GDP dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import gdp6h
+>>> ds = gdp6h()
+>>> ds
+<xarray.Dataset>
+Dimensions:                (traj: 27647, obs: 46535470)
+Coordinates:
+    ids                    (obs) int64 7702204 7702204 ... 300234061198840
+    time                   (obs) float64 2.879e+08 2.879e+08 ... 1.697e+09
+Dimensions without coordinates: traj, obs
+Data variables: (12/50)
+    ID                     (traj) int64 7702204 7702201 ... 300234061198840
+    rowsize                (traj) int32 92 1747 1943 1385 1819 ... 54 53 51 28
+    WMO                    (traj) int32 0 0 0 0 ... 6203890 6203888 4101885
+    expno                  (traj) int32 40 40 40 40 ... 31412 21421 21421 31412
+    deploy_date            (traj) float32 2.878e+08 2.878e+08 ... 1.696e+09 nan
+    deploy_lat             (traj) float32 -7.798 -4.9 -3.18 ... 9.9 11.9 nan
+    ...                     ...
+    vn                     (obs) float32 nan 0.1056 0.04974 ... 0.7384 nan
+    temp                   (obs) float32 28.35 28.3 nan ... 29.08 28.97 28.92
+    err_lat                (obs) float32 0.009737 0.007097 ... 0.001659 0.001687
+    err_lon                (obs) float32 0.00614 0.004583 ... 0.002471 0.002545
+    err_temp               (obs) float32 0.08666 0.08757 ... 0.03665 0.03665
+    drogue_status          (obs) bool False False False False ... True True True
+Attributes: (12/18)
+    title:                Global Drifter Program drifting buoy collection
+    history:              version September 2023. Metadata from dirall.dat an...
+    Conventions:          CF-1.6
+    time_coverage_start:  1979-02-15:00:00:00Z
+    time_coverage_end:    2023-10-18:18:00:00Z
+    date_created:         2023-12-22T17:50:22.242943
+    ...                   ...
+    contributor_name:     NOAA Global Drifter Program
+    contributor_role:     Data Acquisition Center
+    institution:          NOAA Atlantic Oceanographic and Meteorological Labo...
+    acknowledgement:      Lumpkin, Rick; Centurioni, Luca (2019). NOAA Global...
+    summary:              Global Drifter Program six-hourly data
+    doi:                  10.25921/7ntx-z961
+
+
+
+
+

See Also#

+

gdp1h()

+
+
+ +
+
+clouddrift.datasets.glad(decode_times: bool = True) Dataset[source]#
+

Returns the Grand LAgrangian Deployment (GLAD) dataset as a ragged array +Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access.

+

The upstream data is available at https://doi.org/10.7266/N7VD6WC8.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

GLAD dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import glad
+>>> ds = glad()
+>>> ds
+<xarray.Dataset>
+Dimensions:         (obs: 1602883, traj: 297)
+Coordinates:
+  time            (obs) datetime64[ns] ...
+  id              (traj) object ...
+Data variables:
+  latitude        (obs) float32 ...
+  longitude       (obs) float32 ...
+  position_error  (obs) float32 ...
+  u               (obs) float32 ...
+  v               (obs) float32 ...
+  velocity_error  (obs) float32 ...
+  rowsize         (traj) int64 ...
+Attributes:
+  title:        GLAD experiment CODE-style drifter trajectories (low-pass f...
+  institution:  Consortium for Advanced Research on Transport of Hydrocarbo...
+  source:       CODE-style drifters
+  history:      Downloaded from https://data.gulfresearchinitiative.org/dat...
+  references:   Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter t...
+
+
+
+
+

Reference#

+

Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012. Distributed by: Gulf of Mexico Research Initiative Information and Data Cooperative (GRIIDC), Harte Research Institute, Texas A&M University–Corpus Christi. doi:10.7266/N7VD6WC8

+
+
+ +
+
+clouddrift.datasets.mosaic(decode_times: bool = True) Dataset[source]#
+

Returns the MOSAiC sea-ice drift dataset as a ragged array Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access.

+

The upstream data is available at https://arcticdata.io/catalog/view/doi:10.18739/A2KP7TS83.

+
+

Reference#

+

Angela Bliss, Jennifer Hutchings, Philip Anderson, Philipp Anhaus, +Hans Jakob Belter, Jørgen Berge, Vladimir Bessonov, Bin Cheng, Sylvia Cole, +Dave Costa, Finlo Cottier, Christopher J Cox, Pedro R De La Torre, Dmitry V Divine, +Gilbert Emzivat, Ying-Chih Fang, Steven Fons, Michael Gallagher, Maxime Geoffrey, +Mats A Granskog, … Guangyu Zuo. (2022). Sea ice drift tracks from the Distributed +Network of autonomous buoys deployed during the Multidisciplinary drifting Observatory +for the Study of Arctic Climate (MOSAiC) expedition 2019 - 2021. Arctic Data Center. +doi:10.18739/A2KP7TS83.

+
+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

MOSAiC sea-ice drift dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import mosaic
+>>> ds = mosaic()
+>>> ds
+<xarray.Dataset>
+Dimensions:                     (obs: 1926226, traj: 216)
+Coordinates:
+    time                        (obs) datetime64[ns] ...
+    id                          (traj) object ...
+Dimensions without coordinates: obs, traj
+Data variables: (12/19)
+    latitude                    (obs) float64 ...
+    longitude                   (obs) float64 ...
+    Deployment Leg              (traj) int64 ...
+    DN Station ID               (traj) object ...
+    IMEI                        (traj) object ...
+    Deployment Date             (traj) datetime64[ns] ...
+    ...                          ...
+    Buoy Type                   (traj) object ...
+    Manufacturer                (traj) object ...
+    Model                       (traj) object ...
+    PI                          (traj) object ...
+    Data Authors                (traj) object ...
+    rowsize                     (traj) int64 ...
+
+
+
+
+ +
+
+clouddrift.datasets.spotters(decode_times: bool = True) Dataset[source]#
+

Returns the Sofar Ocean Spotter drifters ragged array dataset as an Xarray dataset.

+

The data is accessed from a zarr archive hosted on a public AWS S3 bucket accessible +at https://sofar-spotter-archive.s3.amazonaws.com/spotter_data_bulk_zarr.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

Sofar ocean floats dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import spotters
+>>> ds = spotters()
+>>> ds
+<xarray.Dataset>
+Dimensions:                (index: 6390651, trajectory: 871)
+Coordinates:
+    time                   (index) datetime64[ns] ...
+  * trajectory             (trajectory) object 'SPOT-010001' ... 'SPOT-1975'
+Dimensions without coordinates: index
+Data variables:
+    latitude               (index) float64 ...
+    longitude              (index) float64 ...
+    meanDirection          (index) float64 ...
+    meanDirectionalSpread  (index) float64 ...
+    meanPeriod             (index) float64 ...
+    peakDirection          (index) float64 ...
+    peakDirectionalSpread  (index) float64 ...
+    peakPeriod             (index) float64 ...
+    rowsize                (trajectory) int64 ...
+    significantWaveHeight  (index) float64 ...
+Attributes:
+    author:         Isabel A. Houghton
+    creation_date:  2023-10-18 00:43:55.333537
+    email:          isabel.houghton@sofarocean.com
+    institution:    Sofar Ocean
+    references:     https://content.sofarocean.com/hubfs/Spotter%20product%20...
+    source:         Spotter wave buoy
+    title:          Sofar Spotter Data Archive - Bulk Wave Parameters
+
+
+
+
+ +
+
+clouddrift.datasets.subsurface_floats(decode_times: bool = True) Dataset[source]#
+

Returns the subsurface floats dataset as a ragged array Xarray dataset.

+

The data is accessed from a public HTTPS server at NOAA’s Atlantic +Oceanographic and Meteorological Laboratory (AOML) accessible at +https://www.aoml.noaa.gov/phod/gdp/index.php.

+

The upstream data is available at +https://www.aoml.noaa.gov/phod/float_traj/files/allFloats_12122017.mat.

+

This dataset of subsurface float observations was compiled by the WOCE Subsurface +Float Data Assembly Center (WFDAC) in Woods Hole maintained by Andree Ramsey and +Heather Furey and copied to NOAA/AOML in October 2014 (version 1) and in December +2017 (version 2). Subsequent updates will be included as additional appropriate +float data, quality controlled by the appropriate principal investigators, is +submitted for inclusion.

+

Note that these observations are collected by ALACE/RAFOS/Eurofloat-style +acoustically-tracked, neutrally-buoyant subsurface floats which collect data while +drifting beneath the ocean surface. These data are the result of the effort and +resources of many individuals and institutions. You are encouraged to acknowledge +the work of the data originators and Data Centers in publications arising from use +of these data.

+

The float data were originally divided by project at the WFDAC. Here they have been +compiled in a single Matlab data set. See here for more information on the variables +contained in these files.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

Subsurface floats dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import subsurface_floats
+>>> ds = subsurface_floats()
+>>> ds
+<xarray.Dataset>
+Dimensions:   (traj: 2193, obs: 1402840)
+Coordinates:
+    id        (traj) uint16 ...
+    time      (obs) datetime64[ns] ...
+Dimensions without coordinates: traj, obs
+Data variables: (12/13)
+    expList   (traj) object ...
+    expName   (traj) object ...
+    expOrg    (traj) object ...
+    expPI     (traj) object ...
+    indexExp  (traj) uint8 ...
+    fltType   (traj) object ...
+    ...        ...
+    lon       (obs) float64 ...
+    lat       (obs) float64 ...
+    pres      (obs) float64 ...
+    temp      (obs) float64 ...
+    ve        (obs) float64 ...
+    vn        (obs) float64 ...
+Attributes:
+    title:            Subsurface float trajectories dataset
+    history:          December 2017 (version 2)
+    date_created:     2023-11-14T22:30:38.831656
+    publisher_name:   WOCE Subsurface Float Data Assembly Center and NOAA AOML
+    publisher_url:    https://www.aoml.noaa.gov/phod/float_traj/data.php
+    license:          freely available
+    acknowledgement:  Maintained by Andree Ramsey and Heather Furey from the ...
+
+
+
+
+

References#

+

WOCE Subsurface Float Data Assembly Center (WFDAC) https://www.aoml.noaa.gov/phod/float_traj/index.php

+
+
+ +
+
+clouddrift.datasets.yomaha(decode_times: bool = True) Dataset[source]#
+

Returns the YoMaHa dataset as a ragged array Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access. The upstream +data is available at http://apdrc.soest.hawaii.edu/projects/yomaha/.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

YoMaHa’07 dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import yomaha
+>>> ds = yomaha()
+>>> ds
+<xarray.Dataset>
+Dimensions:     (obs: 1926743, traj: 12196)
+Coordinates:
+    time_d      (obs) datetime64[ns] ...
+    time_s      (obs) datetime64[ns] ...
+    time_lp     (obs) datetime64[ns] ...
+    time_lc     (obs) datetime64[ns] ...
+    id          (traj) int64 ...
+Dimensions without coordinates: obs, traj
+Data variables: (12/27)
+    lon_d       (obs) float64 ...
+    lat_d       (obs) float64 ...
+    pres_d      (obs) float32 ...
+    ve_d        (obs) float32 ...
+    vn_d        (obs) float32 ...
+    err_ve_d    (obs) float32 ...
+    ...          ...
+    cycle       (obs) int64 ...
+    time_inv    (obs) int64 ...
+    rowsize     (traj) int64 ...
+    wmo_id      (traj) int64 ...
+    dac_id      (traj) int64 ...
+    float_type  (traj) int64 ...
+Attributes:
+    title:           YoMaHa'07: Velocity data assessed from trajectories of A...
+    history:         Dataset updated on Tue Jun 28 03:14:34 HST 2022
+    date_created:    2023-12-08T00:52:08.478075
+    publisher_name:  Asia-Pacific Data Research Center
+    publisher_url:   http://apdrc.soest.hawaii.edu/index.php
+    license:         Creative Commons Attribution 4.0 International License..
+
+
+
+
+

Reference#

+

Lebedev, K. V., Yoshinari, H., Maximenko, N. A., & Hacker, P. W. (2007). Velocity data +assessed from trajectories of Argo floats at parking level and at the sea +surface. IPRC Technical Note, 4(2), 1-16.

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.mosaic.html b/_autosummary/clouddrift.datasets.mosaic.html new file mode 100644 index 00000000..b6fa52f3 --- /dev/null +++ b/_autosummary/clouddrift.datasets.mosaic.html @@ -0,0 +1,628 @@ + + + + + + + + + + + clouddrift.datasets.mosaic — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.mosaic

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.mosaic#

+
+
+clouddrift.datasets.mosaic(decode_times: bool = True) Dataset[source]#
+

Returns the MOSAiC sea-ice drift dataset as a ragged array Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access.

+

The upstream data is available at https://arcticdata.io/catalog/view/doi:10.18739/A2KP7TS83.

+
+

Reference#

+

Angela Bliss, Jennifer Hutchings, Philip Anderson, Philipp Anhaus, +Hans Jakob Belter, Jørgen Berge, Vladimir Bessonov, Bin Cheng, Sylvia Cole, +Dave Costa, Finlo Cottier, Christopher J Cox, Pedro R De La Torre, Dmitry V Divine, +Gilbert Emzivat, Ying-Chih Fang, Steven Fons, Michael Gallagher, Maxime Geoffrey, +Mats A Granskog, … Guangyu Zuo. (2022). Sea ice drift tracks from the Distributed +Network of autonomous buoys deployed during the Multidisciplinary drifting Observatory +for the Study of Arctic Climate (MOSAiC) expedition 2019 - 2021. Arctic Data Center. +doi:10.18739/A2KP7TS83.

+
+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

MOSAiC sea-ice drift dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import mosaic
+>>> ds = mosaic()
+>>> ds
+<xarray.Dataset>
+Dimensions:                     (obs: 1926226, traj: 216)
+Coordinates:
+    time                        (obs) datetime64[ns] ...
+    id                          (traj) object ...
+Dimensions without coordinates: obs, traj
+Data variables: (12/19)
+    latitude                    (obs) float64 ...
+    longitude                   (obs) float64 ...
+    Deployment Leg              (traj) int64 ...
+    DN Station ID               (traj) object ...
+    IMEI                        (traj) object ...
+    Deployment Date             (traj) datetime64[ns] ...
+    ...                          ...
+    Buoy Type                   (traj) object ...
+    Manufacturer                (traj) object ...
+    Model                       (traj) object ...
+    PI                          (traj) object ...
+    Data Authors                (traj) object ...
+    rowsize                     (traj) int64 ...
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.spotters.html b/_autosummary/clouddrift.datasets.spotters.html new file mode 100644 index 00000000..31c41a70 --- /dev/null +++ b/_autosummary/clouddrift.datasets.spotters.html @@ -0,0 +1,620 @@ + + + + + + + + + + + clouddrift.datasets.spotters — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.spotters

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.spotters#

+
+
+clouddrift.datasets.spotters(decode_times: bool = True) Dataset[source]#
+

Returns the Sofar Ocean Spotter drifters ragged array dataset as an Xarray dataset.

+

The data is accessed from a zarr archive hosted on a public AWS S3 bucket accessible +at https://sofar-spotter-archive.s3.amazonaws.com/spotter_data_bulk_zarr.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

Sofar ocean floats dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import spotters
+>>> ds = spotters()
+>>> ds
+<xarray.Dataset>
+Dimensions:                (index: 6390651, trajectory: 871)
+Coordinates:
+    time                   (index) datetime64[ns] ...
+  * trajectory             (trajectory) object 'SPOT-010001' ... 'SPOT-1975'
+Dimensions without coordinates: index
+Data variables:
+    latitude               (index) float64 ...
+    longitude              (index) float64 ...
+    meanDirection          (index) float64 ...
+    meanDirectionalSpread  (index) float64 ...
+    meanPeriod             (index) float64 ...
+    peakDirection          (index) float64 ...
+    peakDirectionalSpread  (index) float64 ...
+    peakPeriod             (index) float64 ...
+    rowsize                (trajectory) int64 ...
+    significantWaveHeight  (index) float64 ...
+Attributes:
+    author:         Isabel A. Houghton
+    creation_date:  2023-10-18 00:43:55.333537
+    email:          isabel.houghton@sofarocean.com
+    institution:    Sofar Ocean
+    references:     https://content.sofarocean.com/hubfs/Spotter%20product%20...
+    source:         Spotter wave buoy
+    title:          Sofar Spotter Data Archive - Bulk Wave Parameters
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.subsurface_floats.html b/_autosummary/clouddrift.datasets.subsurface_floats.html new file mode 100644 index 00000000..7f7c60bd --- /dev/null +++ b/_autosummary/clouddrift.datasets.subsurface_floats.html @@ -0,0 +1,645 @@ + + + + + + + + + + + clouddrift.datasets.subsurface_floats — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.subsurface_floats

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.subsurface_floats#

+
+
+clouddrift.datasets.subsurface_floats(decode_times: bool = True) Dataset[source]#
+

Returns the subsurface floats dataset as a ragged array Xarray dataset.

+

The data is accessed from a public HTTPS server at NOAA’s Atlantic +Oceanographic and Meteorological Laboratory (AOML) accessible at +https://www.aoml.noaa.gov/phod/gdp/index.php.

+

The upstream data is available at +https://www.aoml.noaa.gov/phod/float_traj/files/allFloats_12122017.mat.

+

This dataset of subsurface float observations was compiled by the WOCE Subsurface +Float Data Assembly Center (WFDAC) in Woods Hole maintained by Andree Ramsey and +Heather Furey and copied to NOAA/AOML in October 2014 (version 1) and in December +2017 (version 2). Subsequent updates will be included as additional appropriate +float data, quality controlled by the appropriate principal investigators, is +submitted for inclusion.

+

Note that these observations are collected by ALACE/RAFOS/Eurofloat-style +acoustically-tracked, neutrally-buoyant subsurface floats which collect data while +drifting beneath the ocean surface. These data are the result of the effort and +resources of many individuals and institutions. You are encouraged to acknowledge +the work of the data originators and Data Centers in publications arising from use +of these data.

+

The float data were originally divided by project at the WFDAC. Here they have been +compiled in a single Matlab data set. See here for more information on the variables +contained in these files.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

Subsurface floats dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import subsurface_floats
+>>> ds = subsurface_floats()
+>>> ds
+<xarray.Dataset>
+Dimensions:   (traj: 2193, obs: 1402840)
+Coordinates:
+    id        (traj) uint16 ...
+    time      (obs) datetime64[ns] ...
+Dimensions without coordinates: traj, obs
+Data variables: (12/13)
+    expList   (traj) object ...
+    expName   (traj) object ...
+    expOrg    (traj) object ...
+    expPI     (traj) object ...
+    indexExp  (traj) uint8 ...
+    fltType   (traj) object ...
+    ...        ...
+    lon       (obs) float64 ...
+    lat       (obs) float64 ...
+    pres      (obs) float64 ...
+    temp      (obs) float64 ...
+    ve        (obs) float64 ...
+    vn        (obs) float64 ...
+Attributes:
+    title:            Subsurface float trajectories dataset
+    history:          December 2017 (version 2)
+    date_created:     2023-11-14T22:30:38.831656
+    publisher_name:   WOCE Subsurface Float Data Assembly Center and NOAA AOML
+    publisher_url:    https://www.aoml.noaa.gov/phod/float_traj/data.php
+    license:          freely available
+    acknowledgement:  Maintained by Andree Ramsey and Heather Furey from the ...
+
+
+
+
+

References#

+

WOCE Subsurface Float Data Assembly Center (WFDAC) https://www.aoml.noaa.gov/phod/float_traj/index.php

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.datasets.yomaha.html b/_autosummary/clouddrift.datasets.yomaha.html new file mode 100644 index 00000000..4bdda2e2 --- /dev/null +++ b/_autosummary/clouddrift.datasets.yomaha.html @@ -0,0 +1,633 @@ + + + + + + + + + + + clouddrift.datasets.yomaha — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.datasets.yomaha

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.datasets.yomaha#

+
+
+clouddrift.datasets.yomaha(decode_times: bool = True) Dataset[source]#
+

Returns the YoMaHa dataset as a ragged array Xarray dataset.

+

The function will first look for the ragged-array dataset on the local +filesystem. If it is not found, the dataset will be downloaded using the +corresponding adapter function and stored for later access. The upstream +data is available at http://apdrc.soest.hawaii.edu/projects/yomaha/.

+
+

Parameters#

+
+
decode_timesbool, optional

If True, decode the time coordinate into a datetime object. If False, the time +coordinate will be an int64 or float64 array of increments since the origin +time indicated in the units attribute. Default is True.

+
+
+
+
+

Returns#

+
+
xarray.Dataset

YoMaHa’07 dataset as a ragged array

+
+
+
+
+

Examples#

+
>>> from clouddrift.datasets import yomaha
+>>> ds = yomaha()
+>>> ds
+<xarray.Dataset>
+Dimensions:     (obs: 1926743, traj: 12196)
+Coordinates:
+    time_d      (obs) datetime64[ns] ...
+    time_s      (obs) datetime64[ns] ...
+    time_lp     (obs) datetime64[ns] ...
+    time_lc     (obs) datetime64[ns] ...
+    id          (traj) int64 ...
+Dimensions without coordinates: obs, traj
+Data variables: (12/27)
+    lon_d       (obs) float64 ...
+    lat_d       (obs) float64 ...
+    pres_d      (obs) float32 ...
+    ve_d        (obs) float32 ...
+    vn_d        (obs) float32 ...
+    err_ve_d    (obs) float32 ...
+    ...          ...
+    cycle       (obs) int64 ...
+    time_inv    (obs) int64 ...
+    rowsize     (traj) int64 ...
+    wmo_id      (traj) int64 ...
+    dac_id      (traj) int64 ...
+    float_type  (traj) int64 ...
+Attributes:
+    title:           YoMaHa'07: Velocity data assessed from trajectories of A...
+    history:         Dataset updated on Tue Jun 28 03:14:34 HST 2022
+    date_created:    2023-12-08T00:52:08.478075
+    publisher_name:  Asia-Pacific Data Research Center
+    publisher_url:   http://apdrc.soest.hawaii.edu/index.php
+    license:         Creative Commons Attribution 4.0 International License..
+
+
+
+
+

Reference#

+

Lebedev, K. V., Yoshinari, H., Maximenko, N. A., & Hacker, P. W. (2007). Velocity data +assessed from trajectories of Argo floats at parking level and at the sea +surface. IPRC Technical Note, 4(2), 1-16.

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.html b/_autosummary/clouddrift.kinematics.html new file mode 100644 index 00000000..3e229fb3 --- /dev/null +++ b/_autosummary/clouddrift.kinematics.html @@ -0,0 +1,1103 @@ + + + + + + + + + + + clouddrift.kinematics — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.kinematics#

+

Functions for kinematic computations.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + +

inertial_oscillation_from_position(...[, ...])

Extract inertial oscillations from consecutive geographical positions.

kinetic_energy(u[, v])

Compute kinetic energy from zonal and meridional velocities.

position_from_velocity(u, v, time, x_origin, ...)

Compute positions from arrays of velocities and time and a pair of origin coordinates.

residual_position_from_displacement(...)

Return residual longitudes and latitudes along a trajectory on the spherical Earth after correcting for zonal and meridional displacements x and y in meters.

spin(u, v, time[, difference_scheme, time_axis])

Compute spin continuously from velocities and times.

velocity_from_position(x, y, time[, ...])

Compute velocity from arrays of positions and time.

+
+
+clouddrift.kinematics.inertial_oscillation_from_position(longitude: ndarray, latitude: ndarray, relative_bandwidth: float | None = None, wavelet_duration: float | None = None, time_step: float | None = 3600.0, relative_vorticity: float | ndarray | None = 0.0) ndarray[source]#
+

Extract inertial oscillations from consecutive geographical positions.

+

This function acts by performing a time-frequency analysis of horizontal displacements +with analytic Morse wavelets. It extracts the portion of the wavelet transform signal +that follows the inertial frequency (opposite of Coriolis frequency) as a function of time, +potentially shifted in frequency by a measure of relative vorticity. The result is a pair +of zonal and meridional relative displacements in meters.

+

This function is equivalent to a bandpass filtering of the horizontal displacements. The characteristics +of the filter are defined by the relative bandwidth of the wavelet transform or by the duration of the wavelet, +see the parameters below.

+
+

Parameters#

+
+
longitudearray-like

Longitude sequence. Unidimensional array input.

+
+
latitudearray-like

Latitude sequence. Unidimensional array input.

+
+
relative_bandwidthfloat, optional

Bandwidth of the frequency-domain equivalent filter for the extraction of the inertial +oscillations; a number less or equal to one which is a fraction of the inertial frequency. +A value of 0.1 leads to a bandpass filter equivalent of +/- 10 percent of the inertial frequency.

+
+
wavelet_durationfloat, optional

Duration of the wavelet, or inverse of the relative bandwidth, which can be passed instead of the +relative bandwidth.

+
+
time_stepfloat, optional

The constant time interval between data points in seconds. Default is 3600.

+
+
relative_vorticity: Optional, float or array-like

Relative vorticity adding to the local Coriolis frequency. If “f” is the Coriolis +frequency then “f” + relative_vorticity will be the effective Coriolis frequency as defined by Kunze (1985). +Positive values correspond to cyclonic vorticity, irrespectively of the latitudes of the data +points.

+
+
+
+
+

Returns#

+
+
xhatarray-like

Zonal relative displacement in meters from inertial oscillations.

+
+
yhatarray-like

Meridional relative displacement in meters from inertial oscillations.

+
+
+
+
+

Examples#

+

To extract displacements from inertial oscillations from sequences of longitude +and latitude values, equivalent to bandpass around 20 percent of the local inertial frequency:

+
>>> xhat, yhat = inertial_oscillation_from_position(longitude, latitude, relative_bandwidth=0.2)
+
+
+

The same result can be obtained by specifying the wavelet duration instead of the relative bandwidth:

+
>>> xhat, yhat = inertial_oscillation_from_position(longitude, latitude, wavelet_duration=5)
+
+
+

Next, the residual positions from the inertial displacements can be obtained with another function:

+
>>> residual_longitudes, residual_latitudes = residual_position_from_displacement(longitude, latitude, xhat, yhat)
+
+
+
+
+

Raises#

+
+
ValueError

If longitude and latitude arrays do not have the same shape. +If relative_vorticity is an array and does not have the same shape as longitude and latitude. +If time_step is not a float. +If both relative_bandwidth and wavelet_duration are specified. +If neither relative_bandwidth nor wavelet_duration are specified. +If the absolute value of relative_bandwidth is not in the range (0,1]. +If the wavelet duration is not greater than or equal to 1.

+
+
+
+
+

See Also#

+

residual_position_from_displacement(), wavelet_transform, morse_wavelet

+
+
+ +
+
+clouddrift.kinematics.kinetic_energy(u: float | list | ndarray | DataArray | Series, v: float | list | ndarray | DataArray | Series | None = None) float | ndarray | DataArray[source]#
+

Compute kinetic energy from zonal and meridional velocities.

+
+

Parameters#

+
+
ufloat or array-like

Zonal velocity.

+
+
vfloat or array-like, optional.

Meridional velocity. If not provided, the flow is assumed one-dimensional +in time and defined by u.

+
+
+
+
+

Returns#

+
+
kefloat or array-like

Kinetic energy.

+
+
+
+
+

Examples#

+
>>> import numpy as np
+>>> from clouddrift.kinematics import kinetic_energy
+>>> u = np.array([1., 2., 3., 4.])
+>>> v = np.array([1., 1., 1., 1.])
+>>> kinetic_energy(u, v)
+array([1. , 2.5, 5. , 8.5])
+
+
+
>>> u = np.reshape(np.tile([1., 2., 3., 4.], 2), (2, 4))
+>>> v = np.reshape(np.tile([1., 1., 1., 1.], 2), (2, 4))
+>>> kinetic_energy(u, v)
+array([[1. , 2.5, 5. , 8.5],
+       [1. , 2.5, 5. , 8.5]])
+
+
+
+
+ +
+
+clouddrift.kinematics.position_from_velocity(u: ndarray, v: ndarray, time: ndarray, x_origin: float, y_origin: float, coord_system: str | None = 'spherical', integration_scheme: str | None = 'forward', time_axis: int | None = -1) Tuple[ndarray, ndarray][source]#
+

Compute positions from arrays of velocities and time and a pair of origin +coordinates.

+

The units of the result are degrees if coord_system == "spherical" (default). +If coord_system == "cartesian", the units of the result are equal to the +units of the input velocities multiplied by the units of the input time. +For example, if the input velocities are in meters per second and the input +time is in seconds, the units of the result will be meters.

+

Integration scheme can take one of three values:

+
+
    +
  1. +
    “forward” (default): integration from x[i] to x[i+1] is performed

    using the velocity at x[i].

    +
    +
    +
  2. +
  3. +
    “backward”: integration from x[i] to x[i+1] is performed using the

    velocity at x[i+1].

    +
    +
    +
  4. +
  5. +
    “centered”: integration from x[i] to x[i+1] is performed using the

    arithmetic average of the velocities at x[i] and x[i+1]. Note that +this method introduces some error due to the averaging.

    +
    +
    +
  6. +
+
+

u, v, and time can be multi-dimensional arrays. If the time axis, along +which the finite differencing is performed, is not the last one (i.e. +x.shape[-1]), use the time_axis optional argument to specify along which +axis should the differencing be done. x, y, and time must have +the same shape.

+

This function will not do any special handling of longitude ranges. If the +integrated trajectory crosses the antimeridian (dateline) in either direction, the +longitude values will not be adjusted to stay in any specific range such +as [-180, 180] or [0, 360]. If you need your longitudes to be in a specific +range, recast the resulting longitude from this function using the function +clouddrift.sphere.recast_lon().

+
+

Parameters#

+
+
unp.ndarray

An array of eastward velocities.

+
+
vnp.ndarray

An array of northward velocities.

+
+
timenp.ndarray

An array of time values.

+
+
x_originfloat

Origin x-coordinate or origin longitude.

+
+
y_originfloat

Origin y-coordinate or origin latitude.

+
+
coord_systemstr, optional

The coordinate system of the input. Can be “spherical” or “cartesian”. +Default is “spherical”.

+
+
integration_schemestr, optional

The difference scheme to use for computing the position. Can be +“forward” or “backward”. Default is “forward”.

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
xnp.ndarray

An array of zonal displacements or longitudes.

+
+
ynp.ndarray

An array of meridional displacements or latitudes.

+
+
+
+
+

Examples#

+

Simple integration on a plane, using the forward scheme by default:

+
>>> import numpy as np
+>>> from clouddrift.analysis import position_from_velocity
+>>> u = np.array([1., 2., 3., 4.])
+>>> v = np.array([1., 1., 1., 1.])
+>>> time = np.array([0., 1., 2., 3.])
+>>> x, y = position_from_velocity(u, v, time, 0, 0, coord_system="cartesian")
+>>> x
+array([0., 1., 3., 6.])
+>>> y
+array([0., 1., 2., 3.])
+
+
+

As above, but using centered scheme:

+
>>> x, y = position_from_velocity(u, v, time, 0, 0, coord_system="cartesian", integration_scheme="centered")
+>>> x
+array([0., 1.5, 4., 7.5])
+>>> y
+array([0., 1., 2., 3.])
+
+
+

Simple integration on a sphere (default):

+
>>> u = np.array([1., 2., 3., 4.])
+>>> v = np.array([1., 1., 1., 1.])
+>>> time = np.array([0., 1., 2., 3.]) * 1e5
+>>> x, y = position_from_velocity(u, v, time, 0, 0)
+>>> x
+array([0.        , 0.89839411, 2.69584476, 5.39367518])
+>>> y
+array([0.        , 0.89828369, 1.79601515, 2.69201609])
+
+
+

Integrating across the antimeridian (dateline) by default does not +recast the resulting longitude:

+
>>> u = np.array([1., 1.])
+>>> v = np.array([0., 0.])
+>>> time = np.array([0, 1e5])
+>>> x, y = position_from_velocity(u, v, time, 179.5, 0)
+>>> x
+array([179.5      , 180.3983205])
+>>> y
+array([0., 0.])
+
+
+

Use the clouddrift.sphere.recast_lon function to recast the longitudes +to the desired range:

+
>>> from clouddrift.sphere import recast_lon
+>>> recast_lon(x, -180)
+array([ 179.5      , -179.6016795])
+
+
+
+
+

Raises#

+
+
ValueError

If u and v do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]). +If lengths of x, y, and time along time_axis are not equal. +If the input coordinate system is not “spherical” or “cartesian”. +If the input integration scheme is not “forward”, “backward”, or “centered”

+
+
+
+
+

See Also#

+

velocity_from_position()

+
+
+ +
+
+clouddrift.kinematics.residual_position_from_displacement(longitude: float | ndarray | DataArray, latitude: float | ndarray | DataArray, x: float | ndarray, y: float | ndarray) Tuple[float] | Tuple[ndarray][source]#
+

Return residual longitudes and latitudes along a trajectory on the spherical Earth +after correcting for zonal and meridional displacements x and y in meters.

+

This is applicable as an example when one seeks to correct a trajectory for +horizontal oscillations due to inertial motions, tides, etc.

+
+

Parameters#

+
+
longitudefloat or array-like

Longitude in degrees.

+
+
latitudefloat or array-like

Latitude in degrees.

+
+
xfloat or np.ndarray

Zonal displacement in meters.

+
+
yfloat or np.ndarray

Meridional displacement in meters.

+
+
+
+
+

Returns#

+
+
residual_longitudefloat or np.ndarray

Residual longitude after correcting for zonal displacement, in degrees.

+
+
residual_latitudefloat or np.ndarray

Residual latitude after correcting for meridional displacement, in degrees.

+
+
+
+
+

Examples#

+

Obtain the new geographical position for a displacement of 1/360-th of the +circumference of the Earth from original position (longitude,latitude) = (1,0):

+
>>> from clouddrift.sphere import EARTH_RADIUS_METERS
+>>> residual_position_from_displacement(1,0,2 * np.pi * EARTH_RADIUS_METERS / 360,0)
+(0.0, 0.0)
+
+
+
+
+ +
+
+clouddrift.kinematics.spin(u: ndarray, v: ndarray, time: ndarray, difference_scheme: str | None = 'forward', time_axis: int | None = -1) float | ndarray[source]#
+

Compute spin continuously from velocities and times.

+

Spin is traditionally (Sawford, 1999; Veneziani et al., 2005) defined as +(<u’dv’ - v’du’>) / (2 dt EKE) where u’ and v’ are eddy-perturbations of the +velocity field, EKE is eddy kinetic energy, dt is the time step, and du’ and +dv’ are velocity component increments during dt, and < > denotes ensemble +average.

+

To allow computing spin based on full velocity fields, this function does +not do any demeaning of the velocity fields. If you need the spin based on +velocity anomalies, ensure to demean the velocity fields before passing +them to this function. This function also returns instantaneous spin values, +so the rank of the result is not reduced relative to the input.

+

u, v, and time can be multi-dimensional arrays. If the time +axis, along which the finite differencing is performed, is not the last one +(i.e. u.shape[-1]), use the time_axis optional argument to specify along +which the spin should be calculated. u, v, and time must either have the +same shape, or time must be a 1-d array with the same length as +u.shape[time_axis].

+

Difference scheme can be one of three values:

+
+
    +
  1. “forward” (default): finite difference is evaluated as dx[i] = dx[i+1] - dx[i];

  2. +
  3. “backward”: finite difference is evaluated as dx[i] = dx[i] - dx[i-1];

  4. +
  5. “centered”: finite difference is evaluated as dx[i] = (dx[i+1] - dx[i-1]) / 2.

  6. +
+
+

Forward and backward schemes are effectively the same except that the +position at which the velocity is evaluated is shifted one element down in +the backward scheme relative to the forward scheme. In the case of a +forward or backward difference scheme, the last or first element of the +velocity, respectively, is extrapolated from its neighboring point. In the +case of a centered difference scheme, the start and end boundary points are +evaluated using the forward and backward difference scheme, respectively.

+
+

Parameters#

+
+
unp.ndarray

Zonal velocity

+
+
vnp.ndarray

Meridional velocity

+
+
timearray-like

Time

+
+
difference_schemestr, optional

Difference scheme to use; possible values are “forward”, “backward”, and “centered”.

+
+
time_axisint, optional

Axis along which the time varies (default is -1)

+
+
+
+
+

Returns#

+
+
sfloat or np.ndarray

Spin

+
+
+
+
+

Raises#

+
+
ValueError

If u and v do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]). +If lengths of u, v, and time along time_axis are not equal. +If difference_scheme is not “forward”, “backward”, or “centered”.

+
+
+
+
+

Examples#

+
>>> from clouddrift.kinematics import spin
+>>> import numpy as np
+>>> u = np.array([1., 2., -1., 4.])
+>>> v = np.array([1., 3., -2., 1.])
+>>> time = np.array([0., 1., 2., 3.])
+>>> spin(u, v, time)
+array([ 0.5       , -0.07692308,  1.4       ,  0.41176471])
+
+
+

Use difference_scheme to specify an alternative finite difference +scheme for the velocity differences:

+
>>> spin(u, v, time, difference_scheme="centered")
+array([0.5       , 0.        , 0.6       , 0.41176471])
+>>> spin(u, v, time, difference_scheme="backward")
+array([ 0.5       ,  0.07692308, -0.2       ,  0.41176471])
+
+
+
+
+

References#

+ +
+
+ +
+
+clouddrift.kinematics.velocity_from_position(x: ndarray, y: ndarray, time: ndarray, coord_system: str | None = 'spherical', difference_scheme: str | None = 'forward', time_axis: int | None = -1) Tuple[DataArray, DataArray][source]#
+

Compute velocity from arrays of positions and time.

+

x and y can be provided as longitude and latitude in degrees if +coord_system == “spherical” (default), or as easting and northing if +coord_system == “cartesian”.

+

The units of the result are meters per unit of time if +coord_system == “spherical”. For example, if the time is provided in the +units of seconds, the resulting velocity is in the units of meters per +second. Otherwise, if coord_system == “cartesian”, the units of the +resulting velocity correspond to the units of the input. For example, +if zonal and meridional displacements are in the units of kilometers and +time is in the units of hours, the resulting velocity is in the units of +kilometers per hour.

+

x, y, and time can be multi-dimensional arrays. If the time axis, along +which the finite differencing is performed, is not the last one (i.e. +x.shape[-1]), use the time_axis optional argument to specify along which +axis should the differencing be done. x, y, and time must have the same +shape.

+

Difference scheme can take one of three values:

+
    +
  1. “forward” (default): finite difference is evaluated as dx[i] = dx[i+1] - dx[i];

  2. +
  3. “backward”: finite difference is evaluated as dx[i] = dx[i] - dx[i-1];

  4. +
  5. “centered”: finite difference is evaluated as dx[i] = (dx[i+1] - dx[i-1]) / 2.

  6. +
+

Forward and backward schemes are effectively the same except that the +position at which the velocity is evaluated is shifted one element down in +the backward scheme relative to the forward scheme. In the case of a +forward or backward difference scheme, the last or first element of the +velocity, respectively, is extrapolated from its neighboring point. In the +case of a centered difference scheme, the start and end boundary points are +evaluated using the forward and backward difference scheme, respectively.

+
+

Parameters#

+
+
xarray_like

An N-d array of x-positions (longitude in degrees or zonal displacement in any unit)

+
+
yarray_like

An N-d array of y-positions (latitude in degrees or meridional displacement in any unit)

+
+
timearray_like

An N-d array of times as floating point values (in any unit)

+
+
coord_systemstr, optional

Coordinate system that x and y arrays are in; possible values are “spherical” (default) or “cartesian”.

+
+
difference_schemestr, optional

Difference scheme to use; possible values are “forward”, “backward”, and “centered”.

+
+
time_axisint, optional

Axis along which to differentiate (default is -1)

+
+
+
+
+

Returns#

+
+
unp.ndarray

Zonal velocity

+
+
vnp.ndarray

Meridional velocity

+
+
+
+
+

Raises#

+
+
ValueError

If x and y do not have the same shape. +If time_axis is outside of the valid range. +If lengths of x, y, and time along time_axis are not equal. +If coord_system is not “spherical” or “cartesian”. +If difference_scheme is not “forward”, “backward”, or “centered”.

+
+
+
+
+

Examples#

+

Simple integration on a sphere, using the forward scheme by default:

+
>>> import numpy as np
+>>> from clouddrift.kinematics import velocity_from_position
+>>> lon = np.array([0., 1., 3., 6.])
+>>> lat = np.array([0., 1., 2., 3.])
+>>> time = np.array([0., 1., 2., 3.]) * 1e5
+>>> u, v = velocity_from_position(lon, lat, time)
+>>> u
+array([1.11307541, 2.22513331, 3.33515501, 3.33515501])
+>>> v
+array([1.11324496, 1.11409224, 1.1167442 , 1.1167442 ])
+
+
+

Integration on a Cartesian plane, using the forward scheme by default:

+
>>> x = np.array([0., 1., 3., 6.])
+>>> y = np.array([0., 1., 2., 3.])
+>>> time = np.array([0., 1., 2., 3.])
+>>> u, v = velocity_from_position(x, y, time, coord_system="cartesian")
+>>> u
+array([1., 2., 3., 3.])
+>>> v
+array([1., 1., 1., 1.])
+
+
+
+
+

See Also#

+

position_from_velocity()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.inertial_oscillation_from_position.html b/_autosummary/clouddrift.kinematics.inertial_oscillation_from_position.html new file mode 100644 index 00000000..bc2919cc --- /dev/null +++ b/_autosummary/clouddrift.kinematics.inertial_oscillation_from_position.html @@ -0,0 +1,642 @@ + + + + + + + + + + + clouddrift.kinematics.inertial_oscillation_from_position — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.kinematics.inertial_oscillation_from_position

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.kinematics.inertial_oscillation_from_position#

+
+
+clouddrift.kinematics.inertial_oscillation_from_position(longitude: ndarray, latitude: ndarray, relative_bandwidth: float | None = None, wavelet_duration: float | None = None, time_step: float | None = 3600.0, relative_vorticity: float | ndarray | None = 0.0) ndarray[source]#
+

Extract inertial oscillations from consecutive geographical positions.

+

This function acts by performing a time-frequency analysis of horizontal displacements +with analytic Morse wavelets. It extracts the portion of the wavelet transform signal +that follows the inertial frequency (opposite of Coriolis frequency) as a function of time, +potentially shifted in frequency by a measure of relative vorticity. The result is a pair +of zonal and meridional relative displacements in meters.

+

This function is equivalent to a bandpass filtering of the horizontal displacements. The characteristics +of the filter are defined by the relative bandwidth of the wavelet transform or by the duration of the wavelet, +see the parameters below.

+
+

Parameters#

+
+
longitudearray-like

Longitude sequence. Unidimensional array input.

+
+
latitudearray-like

Latitude sequence. Unidimensional array input.

+
+
relative_bandwidthfloat, optional

Bandwidth of the frequency-domain equivalent filter for the extraction of the inertial +oscillations; a number less or equal to one which is a fraction of the inertial frequency. +A value of 0.1 leads to a bandpass filter equivalent of +/- 10 percent of the inertial frequency.

+
+
wavelet_durationfloat, optional

Duration of the wavelet, or inverse of the relative bandwidth, which can be passed instead of the +relative bandwidth.

+
+
time_stepfloat, optional

The constant time interval between data points in seconds. Default is 3600.

+
+
relative_vorticity: Optional, float or array-like

Relative vorticity adding to the local Coriolis frequency. If “f” is the Coriolis +frequency then “f” + relative_vorticity will be the effective Coriolis frequency as defined by Kunze (1985). +Positive values correspond to cyclonic vorticity, irrespectively of the latitudes of the data +points.

+
+
+
+
+

Returns#

+
+
xhatarray-like

Zonal relative displacement in meters from inertial oscillations.

+
+
yhatarray-like

Meridional relative displacement in meters from inertial oscillations.

+
+
+
+
+

Examples#

+

To extract displacements from inertial oscillations from sequences of longitude +and latitude values, equivalent to bandpass around 20 percent of the local inertial frequency:

+
>>> xhat, yhat = inertial_oscillation_from_position(longitude, latitude, relative_bandwidth=0.2)
+
+
+

The same result can be obtained by specifying the wavelet duration instead of the relative bandwidth:

+
>>> xhat, yhat = inertial_oscillation_from_position(longitude, latitude, wavelet_duration=5)
+
+
+

Next, the residual positions from the inertial displacements can be obtained with another function:

+
>>> residual_longitudes, residual_latitudes = residual_position_from_displacement(longitude, latitude, xhat, yhat)
+
+
+
+
+

Raises#

+
+
ValueError

If longitude and latitude arrays do not have the same shape. +If relative_vorticity is an array and does not have the same shape as longitude and latitude. +If time_step is not a float. +If both relative_bandwidth and wavelet_duration are specified. +If neither relative_bandwidth nor wavelet_duration are specified. +If the absolute value of relative_bandwidth is not in the range (0,1]. +If the wavelet duration is not greater than or equal to 1.

+
+
+
+
+

See Also#

+

residual_position_from_displacement(), wavelet_transform, morse_wavelet

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.kinetic_energy.html b/_autosummary/clouddrift.kinematics.kinetic_energy.html new file mode 100644 index 00000000..9e49e0f5 --- /dev/null +++ b/_autosummary/clouddrift.kinematics.kinetic_energy.html @@ -0,0 +1,604 @@ + + + + + + + + + + + clouddrift.kinematics.kinetic_energy — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.kinematics.kinetic_energy

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.kinematics.kinetic_energy#

+
+
+clouddrift.kinematics.kinetic_energy(u: float | list | ndarray | DataArray | Series, v: float | list | ndarray | DataArray | Series | None = None) float | ndarray | DataArray[source]#
+

Compute kinetic energy from zonal and meridional velocities.

+
+

Parameters#

+
+
ufloat or array-like

Zonal velocity.

+
+
vfloat or array-like, optional.

Meridional velocity. If not provided, the flow is assumed one-dimensional +in time and defined by u.

+
+
+
+
+

Returns#

+
+
kefloat or array-like

Kinetic energy.

+
+
+
+
+

Examples#

+
>>> import numpy as np
+>>> from clouddrift.kinematics import kinetic_energy
+>>> u = np.array([1., 2., 3., 4.])
+>>> v = np.array([1., 1., 1., 1.])
+>>> kinetic_energy(u, v)
+array([1. , 2.5, 5. , 8.5])
+
+
+
>>> u = np.reshape(np.tile([1., 2., 3., 4.], 2), (2, 4))
+>>> v = np.reshape(np.tile([1., 1., 1., 1.], 2), (2, 4))
+>>> kinetic_energy(u, v)
+array([[1. , 2.5, 5. , 8.5],
+       [1. , 2.5, 5. , 8.5]])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.position_from_velocity.html b/_autosummary/clouddrift.kinematics.position_from_velocity.html new file mode 100644 index 00000000..0827f015 --- /dev/null +++ b/_autosummary/clouddrift.kinematics.position_from_velocity.html @@ -0,0 +1,709 @@ + + + + + + + + + + + clouddrift.kinematics.position_from_velocity — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.kinematics.position_from_velocity

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.kinematics.position_from_velocity#

+
+
+clouddrift.kinematics.position_from_velocity(u: ndarray, v: ndarray, time: ndarray, x_origin: float, y_origin: float, coord_system: str | None = 'spherical', integration_scheme: str | None = 'forward', time_axis: int | None = -1) Tuple[ndarray, ndarray][source]#
+

Compute positions from arrays of velocities and time and a pair of origin +coordinates.

+

The units of the result are degrees if coord_system == "spherical" (default). +If coord_system == "cartesian", the units of the result are equal to the +units of the input velocities multiplied by the units of the input time. +For example, if the input velocities are in meters per second and the input +time is in seconds, the units of the result will be meters.

+

Integration scheme can take one of three values:

+
+
    +
  1. +
    “forward” (default): integration from x[i] to x[i+1] is performed

    using the velocity at x[i].

    +
    +
    +
  2. +
  3. +
    “backward”: integration from x[i] to x[i+1] is performed using the

    velocity at x[i+1].

    +
    +
    +
  4. +
  5. +
    “centered”: integration from x[i] to x[i+1] is performed using the

    arithmetic average of the velocities at x[i] and x[i+1]. Note that +this method introduces some error due to the averaging.

    +
    +
    +
  6. +
+
+

u, v, and time can be multi-dimensional arrays. If the time axis, along +which the finite differencing is performed, is not the last one (i.e. +x.shape[-1]), use the time_axis optional argument to specify along which +axis should the differencing be done. x, y, and time must have +the same shape.

+

This function will not do any special handling of longitude ranges. If the +integrated trajectory crosses the antimeridian (dateline) in either direction, the +longitude values will not be adjusted to stay in any specific range such +as [-180, 180] or [0, 360]. If you need your longitudes to be in a specific +range, recast the resulting longitude from this function using the function +clouddrift.sphere.recast_lon().

+
+

Parameters#

+
+
unp.ndarray

An array of eastward velocities.

+
+
vnp.ndarray

An array of northward velocities.

+
+
timenp.ndarray

An array of time values.

+
+
x_originfloat

Origin x-coordinate or origin longitude.

+
+
y_originfloat

Origin y-coordinate or origin latitude.

+
+
coord_systemstr, optional

The coordinate system of the input. Can be “spherical” or “cartesian”. +Default is “spherical”.

+
+
integration_schemestr, optional

The difference scheme to use for computing the position. Can be +“forward” or “backward”. Default is “forward”.

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
xnp.ndarray

An array of zonal displacements or longitudes.

+
+
ynp.ndarray

An array of meridional displacements or latitudes.

+
+
+
+
+

Examples#

+

Simple integration on a plane, using the forward scheme by default:

+
>>> import numpy as np
+>>> from clouddrift.analysis import position_from_velocity
+>>> u = np.array([1., 2., 3., 4.])
+>>> v = np.array([1., 1., 1., 1.])
+>>> time = np.array([0., 1., 2., 3.])
+>>> x, y = position_from_velocity(u, v, time, 0, 0, coord_system="cartesian")
+>>> x
+array([0., 1., 3., 6.])
+>>> y
+array([0., 1., 2., 3.])
+
+
+

As above, but using centered scheme:

+
>>> x, y = position_from_velocity(u, v, time, 0, 0, coord_system="cartesian", integration_scheme="centered")
+>>> x
+array([0., 1.5, 4., 7.5])
+>>> y
+array([0., 1., 2., 3.])
+
+
+

Simple integration on a sphere (default):

+
>>> u = np.array([1., 2., 3., 4.])
+>>> v = np.array([1., 1., 1., 1.])
+>>> time = np.array([0., 1., 2., 3.]) * 1e5
+>>> x, y = position_from_velocity(u, v, time, 0, 0)
+>>> x
+array([0.        , 0.89839411, 2.69584476, 5.39367518])
+>>> y
+array([0.        , 0.89828369, 1.79601515, 2.69201609])
+
+
+

Integrating across the antimeridian (dateline) by default does not +recast the resulting longitude:

+
>>> u = np.array([1., 1.])
+>>> v = np.array([0., 0.])
+>>> time = np.array([0, 1e5])
+>>> x, y = position_from_velocity(u, v, time, 179.5, 0)
+>>> x
+array([179.5      , 180.3983205])
+>>> y
+array([0., 0.])
+
+
+

Use the clouddrift.sphere.recast_lon function to recast the longitudes +to the desired range:

+
>>> from clouddrift.sphere import recast_lon
+>>> recast_lon(x, -180)
+array([ 179.5      , -179.6016795])
+
+
+
+
+

Raises#

+
+
ValueError

If u and v do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]). +If lengths of x, y, and time along time_axis are not equal. +If the input coordinate system is not “spherical” or “cartesian”. +If the input integration scheme is not “forward”, “backward”, or “centered”

+
+
+
+
+

See Also#

+

velocity_from_position()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.residual_position_from_displacement.html b/_autosummary/clouddrift.kinematics.residual_position_from_displacement.html new file mode 100644 index 00000000..4a2f95bc --- /dev/null +++ b/_autosummary/clouddrift.kinematics.residual_position_from_displacement.html @@ -0,0 +1,604 @@ + + + + + + + + + + + clouddrift.kinematics.residual_position_from_displacement — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.kinematics.residual_position_from_displacement

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.kinematics.residual_position_from_displacement#

+
+
+clouddrift.kinematics.residual_position_from_displacement(longitude: float | ndarray | DataArray, latitude: float | ndarray | DataArray, x: float | ndarray, y: float | ndarray) Tuple[float] | Tuple[ndarray][source]#
+

Return residual longitudes and latitudes along a trajectory on the spherical Earth +after correcting for zonal and meridional displacements x and y in meters.

+

This is applicable as an example when one seeks to correct a trajectory for +horizontal oscillations due to inertial motions, tides, etc.

+
+

Parameters#

+
+
longitudefloat or array-like

Longitude in degrees.

+
+
latitudefloat or array-like

Latitude in degrees.

+
+
xfloat or np.ndarray

Zonal displacement in meters.

+
+
yfloat or np.ndarray

Meridional displacement in meters.

+
+
+
+
+

Returns#

+
+
residual_longitudefloat or np.ndarray

Residual longitude after correcting for zonal displacement, in degrees.

+
+
residual_latitudefloat or np.ndarray

Residual latitude after correcting for meridional displacement, in degrees.

+
+
+
+
+

Examples#

+

Obtain the new geographical position for a displacement of 1/360-th of the +circumference of the Earth from original position (longitude,latitude) = (1,0):

+
>>> from clouddrift.sphere import EARTH_RADIUS_METERS
+>>> residual_position_from_displacement(1,0,2 * np.pi * EARTH_RADIUS_METERS / 360,0)
+(0.0, 0.0)
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.spin.html b/_autosummary/clouddrift.kinematics.spin.html new file mode 100644 index 00000000..9ba7bfdf --- /dev/null +++ b/_autosummary/clouddrift.kinematics.spin.html @@ -0,0 +1,659 @@ + + + + + + + + + + + clouddrift.kinematics.spin — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.kinematics.spin

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.kinematics.spin#

+
+
+clouddrift.kinematics.spin(u: ndarray, v: ndarray, time: ndarray, difference_scheme: str | None = 'forward', time_axis: int | None = -1) float | ndarray[source]#
+

Compute spin continuously from velocities and times.

+

Spin is traditionally (Sawford, 1999; Veneziani et al., 2005) defined as +(<u’dv’ - v’du’>) / (2 dt EKE) where u’ and v’ are eddy-perturbations of the +velocity field, EKE is eddy kinetic energy, dt is the time step, and du’ and +dv’ are velocity component increments during dt, and < > denotes ensemble +average.

+

To allow computing spin based on full velocity fields, this function does +not do any demeaning of the velocity fields. If you need the spin based on +velocity anomalies, ensure to demean the velocity fields before passing +them to this function. This function also returns instantaneous spin values, +so the rank of the result is not reduced relative to the input.

+

u, v, and time can be multi-dimensional arrays. If the time +axis, along which the finite differencing is performed, is not the last one +(i.e. u.shape[-1]), use the time_axis optional argument to specify along +which the spin should be calculated. u, v, and time must either have the +same shape, or time must be a 1-d array with the same length as +u.shape[time_axis].

+

Difference scheme can be one of three values:

+
+
    +
  1. “forward” (default): finite difference is evaluated as dx[i] = dx[i+1] - dx[i];

  2. +
  3. “backward”: finite difference is evaluated as dx[i] = dx[i] - dx[i-1];

  4. +
  5. “centered”: finite difference is evaluated as dx[i] = (dx[i+1] - dx[i-1]) / 2.

  6. +
+
+

Forward and backward schemes are effectively the same except that the +position at which the velocity is evaluated is shifted one element down in +the backward scheme relative to the forward scheme. In the case of a +forward or backward difference scheme, the last or first element of the +velocity, respectively, is extrapolated from its neighboring point. In the +case of a centered difference scheme, the start and end boundary points are +evaluated using the forward and backward difference scheme, respectively.

+
+

Parameters#

+
+
unp.ndarray

Zonal velocity

+
+
vnp.ndarray

Meridional velocity

+
+
timearray-like

Time

+
+
difference_schemestr, optional

Difference scheme to use; possible values are “forward”, “backward”, and “centered”.

+
+
time_axisint, optional

Axis along which the time varies (default is -1)

+
+
+
+
+

Returns#

+
+
sfloat or np.ndarray

Spin

+
+
+
+
+

Raises#

+
+
ValueError

If u and v do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]). +If lengths of u, v, and time along time_axis are not equal. +If difference_scheme is not “forward”, “backward”, or “centered”.

+
+
+
+
+

Examples#

+
>>> from clouddrift.kinematics import spin
+>>> import numpy as np
+>>> u = np.array([1., 2., -1., 4.])
+>>> v = np.array([1., 3., -2., 1.])
+>>> time = np.array([0., 1., 2., 3.])
+>>> spin(u, v, time)
+array([ 0.5       , -0.07692308,  1.4       ,  0.41176471])
+
+
+

Use difference_scheme to specify an alternative finite difference +scheme for the velocity differences:

+
>>> spin(u, v, time, difference_scheme="centered")
+array([0.5       , 0.        , 0.6       , 0.41176471])
+>>> spin(u, v, time, difference_scheme="backward")
+array([ 0.5       ,  0.07692308, -0.2       ,  0.41176471])
+
+
+
+
+

References#

+ +
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.kinematics.velocity_from_position.html b/_autosummary/clouddrift.kinematics.velocity_from_position.html new file mode 100644 index 00000000..c51e4192 --- /dev/null +++ b/_autosummary/clouddrift.kinematics.velocity_from_position.html @@ -0,0 +1,666 @@ + + + + + + + + + + + clouddrift.kinematics.velocity_from_position — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.kinematics.velocity_from_position

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.kinematics.velocity_from_position#

+
+
+clouddrift.kinematics.velocity_from_position(x: ndarray, y: ndarray, time: ndarray, coord_system: str | None = 'spherical', difference_scheme: str | None = 'forward', time_axis: int | None = -1) Tuple[DataArray, DataArray][source]#
+

Compute velocity from arrays of positions and time.

+

x and y can be provided as longitude and latitude in degrees if +coord_system == “spherical” (default), or as easting and northing if +coord_system == “cartesian”.

+

The units of the result are meters per unit of time if +coord_system == “spherical”. For example, if the time is provided in the +units of seconds, the resulting velocity is in the units of meters per +second. Otherwise, if coord_system == “cartesian”, the units of the +resulting velocity correspond to the units of the input. For example, +if zonal and meridional displacements are in the units of kilometers and +time is in the units of hours, the resulting velocity is in the units of +kilometers per hour.

+

x, y, and time can be multi-dimensional arrays. If the time axis, along +which the finite differencing is performed, is not the last one (i.e. +x.shape[-1]), use the time_axis optional argument to specify along which +axis should the differencing be done. x, y, and time must have the same +shape.

+

Difference scheme can take one of three values:

+
    +
  1. “forward” (default): finite difference is evaluated as dx[i] = dx[i+1] - dx[i];

  2. +
  3. “backward”: finite difference is evaluated as dx[i] = dx[i] - dx[i-1];

  4. +
  5. “centered”: finite difference is evaluated as dx[i] = (dx[i+1] - dx[i-1]) / 2.

  6. +
+

Forward and backward schemes are effectively the same except that the +position at which the velocity is evaluated is shifted one element down in +the backward scheme relative to the forward scheme. In the case of a +forward or backward difference scheme, the last or first element of the +velocity, respectively, is extrapolated from its neighboring point. In the +case of a centered difference scheme, the start and end boundary points are +evaluated using the forward and backward difference scheme, respectively.

+
+

Parameters#

+
+
xarray_like

An N-d array of x-positions (longitude in degrees or zonal displacement in any unit)

+
+
yarray_like

An N-d array of y-positions (latitude in degrees or meridional displacement in any unit)

+
+
timearray_like

An N-d array of times as floating point values (in any unit)

+
+
coord_systemstr, optional

Coordinate system that x and y arrays are in; possible values are “spherical” (default) or “cartesian”.

+
+
difference_schemestr, optional

Difference scheme to use; possible values are “forward”, “backward”, and “centered”.

+
+
time_axisint, optional

Axis along which to differentiate (default is -1)

+
+
+
+
+

Returns#

+
+
unp.ndarray

Zonal velocity

+
+
vnp.ndarray

Meridional velocity

+
+
+
+
+

Raises#

+
+
ValueError

If x and y do not have the same shape. +If time_axis is outside of the valid range. +If lengths of x, y, and time along time_axis are not equal. +If coord_system is not “spherical” or “cartesian”. +If difference_scheme is not “forward”, “backward”, or “centered”.

+
+
+
+
+

Examples#

+

Simple integration on a sphere, using the forward scheme by default:

+
>>> import numpy as np
+>>> from clouddrift.kinematics import velocity_from_position
+>>> lon = np.array([0., 1., 3., 6.])
+>>> lat = np.array([0., 1., 2., 3.])
+>>> time = np.array([0., 1., 2., 3.]) * 1e5
+>>> u, v = velocity_from_position(lon, lat, time)
+>>> u
+array([1.11307541, 2.22513331, 3.33515501, 3.33515501])
+>>> v
+array([1.11324496, 1.11409224, 1.1167442 , 1.1167442 ])
+
+
+

Integration on a Cartesian plane, using the forward scheme by default:

+
>>> x = np.array([0., 1., 3., 6.])
+>>> y = np.array([0., 1., 2., 3.])
+>>> time = np.array([0., 1., 2., 3.])
+>>> u, v = velocity_from_position(x, y, time, coord_system="cartesian")
+>>> u
+array([1., 2., 3., 3.])
+>>> v
+array([1., 1., 1., 1.])
+
+
+
+
+

See Also#

+

position_from_velocity()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.chance_pair.html b/_autosummary/clouddrift.pairs.chance_pair.html new file mode 100644 index 00000000..c54828ec --- /dev/null +++ b/_autosummary/clouddrift.pairs.chance_pair.html @@ -0,0 +1,651 @@ + + + + + + + + + + + clouddrift.pairs.chance_pair — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.pairs.chance_pair

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.pairs.chance_pair#

+
+
+clouddrift.pairs.chance_pair(lon1: list[float] | ndarray[float] | Series | DataArray, lat1: list[float] | ndarray[float] | Series | DataArray, lon2: list[float] | ndarray[float] | Series | DataArray, lat2: list[float] | ndarray[float] | Series | DataArray, time1: list[float] | ndarray[float] | Series | DataArray | None = None, time2: list[float] | ndarray[float] | Series | DataArray | None = None, space_distance: float | None = 0, time_distance: float | None = 0)[source]#
+

Given two sets of longitudes, latitudes, and times arrays, return in pairs +the indices of collocated data points that are within prescribed distances +in space and time. Also known as chance pairs.

+
+

Parameters#

+
+
lon1array_like

First array of longitudes in degrees.

+
+
lat1array_like

First array of latitudes in degrees.

+
+
lon2array_like

Second array of longitudes in degrees.

+
+
lat2array_like

Second array of latitudes in degrees.

+
+
time1array_like, optional

First array of times.

+
+
time2array_like, optional

Second array of times.

+
+
space_distancefloat, optional

Maximum allowable space distance in meters for a pair to qualify as chance pair. +If the separation is within this distance, the pair is considered to be +a chance pair. Default is 0, or no distance, i.e. the positions must be +exactly the same.

+
+
time_distancefloat, optional

Maximum allowable time distance for a pair to qualify as chance pair. +If a separation is within this distance, and a space distance +condition is satisfied, the pair is considered a chance pair. Default is +0, or no distance, i.e. the times must be exactly the same.

+
+
+
+
+

Returns#

+
+
indices1np.ndarray[int]

Indices within the first set of arrays that lead to chance pair.

+
+
indices2np.ndarray[int]

Indices within the second set of arrays that lead to chance pair.

+
+
+
+
+

Examples#

+

In the following example, we load the GLAD dataset, extract the first +two trajectories, and find between these the array indices that satisfy +the chance pair criteria of 6 km separation distance and no time separation:

+
>>> from clouddrift.datasets import glad
+>>> from clouddrift.pairs import chance_pair
+>>> from clouddrift.ragged import unpack
+>>> ds = glad()
+>>> lon1 = unpack(ds["longitude"], ds["rowsize"], rows=0).pop()
+>>> lat1 = unpack(ds["latitude"], ds["rowsize"], rows=0).pop()
+>>> time1 = unpack(ds["time"], ds["rowsize"], rows=0).pop()
+>>> lon2 = unpack(ds["longitude"], ds["rowsize"], rows=1).pop()
+>>> lat2 = unpack(ds["latitude"], ds["rowsize"], rows=1).pop()
+>>> time2 = unpack(ds["time"], ds["rowsize"], rows=1).pop()
+>>> i1, i2 = chance_pair(lon1, lat1, lon2, lat2, time1, time2, 6000, np.timedelta64(0))
+>>> i1, i2
+(array([177, 180, 183, 186, 189, 192]), array([166, 169, 172, 175, 178, 181]))
+
+
+

Check to ensure our collocation in space worked by calculating the distance +between the identified pairs:

+
>>> sphere.distance(lon1[i1], lat1[i1], lon2[i2], lat2[i2])
+array([5967.4844, 5403.253 , 5116.9136, 5185.715 , 5467.8555, 5958.4917],
+      dtype=float32)
+
+
+

Check the collocation in time:

+
>>> time1[i1] - time2[i2]
+<xarray.DataArray 'time' (obs: 6)>
+array([0, 0, 0, 0, 0, 0], dtype='timedelta64[ns]')
+Coordinates:
+    time     (obs) datetime64[ns] 2012-07-21T21:30:00.524160 ... 2012-07-22T0...
+Dimensions without coordinates: obs
+
+
+
+
+

Raises#

+
+
ValueError

If time1 and time2 are not both provided or both omitted.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.chance_pairs_from_ragged.html b/_autosummary/clouddrift.pairs.chance_pairs_from_ragged.html new file mode 100644 index 00000000..64f57cb0 --- /dev/null +++ b/_autosummary/clouddrift.pairs.chance_pairs_from_ragged.html @@ -0,0 +1,652 @@ + + + + + + + + + + + clouddrift.pairs.chance_pairs_from_ragged — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.pairs.chance_pairs_from_ragged

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.pairs.chance_pairs_from_ragged#

+
+
+clouddrift.pairs.chance_pairs_from_ragged(lon: list[float] | ndarray[float] | Series | DataArray, lat: list[float] | ndarray[float] | Series | DataArray, rowsize: list[float] | ndarray[float] | Series | DataArray, space_distance: float | None = 0, time: list[float] | ndarray[float] | Series | DataArray | None = None, time_distance: float | None = 0) List[Tuple[Tuple[int, int], Tuple[ndarray, ndarray]]][source]#
+

Return all chance pairs of contiguous trajectories in a ragged array, +and their collocated points in space and (optionally) time, given input +ragged arrays of longitude, latitude, and (optionally) time, and chance +pair criteria as maximum allowable distances in space and time.

+

If time and time_distance are omitted, the search will be done +only on the spatial criteria, and the result will not include the time +arrays.

+

If time and time_distance are provided, the search will be done +on both the spatial and temporal criteria, and the result will include the +time arrays.

+
+

Parameters#

+
+
lonarray_like

Array of longitudes in degrees.

+
+
latarray_like

Array of latitudes in degrees.

+
+
rowsizearray_like

Array of rowsizes.

+
+
space_distancefloat, optional

Maximum space distance in meters for the pair to qualify as chance pair. +If the separation is within this distance, the pair is considered to be +a chance pair. Default is 0, or no distance, i.e. the positions must be +exactly the same.

+
+
timearray_like, optional

Array of times.

+
+
time_distancefloat, optional

Maximum time distance allowed for the pair to qualify as chance pair. +If the separation is within this distance, and the space distance +condition is satisfied, the pair is considered a chance pair. Default is +0, or no distance, i.e. the times must be exactly the same.

+
+
+
+
+

Returns#

+
+
pairsList[Tuple[Tuple[int, int], Tuple[np.ndarray, np.ndarray]]]

List of tuples, each tuple containing a Tuple of integer indices that +corresponds to the trajectory rows in the ragged array, indicating the +pair of trajectories that satisfy the chance pair criteria, and a Tuple +of arrays containing the indices of the collocated points for each +trajectory in the chance pair.

+
+
+
+
+

Examples#

+

In the following example, we load GLAD dataset as a ragged array dataset, +subset the result to retain the first five trajectories, and finally find +all trajectories that satisfy the chance pair criteria of 12 km separation +distance and no time separation, as well as the indices of the collocated +points for each pair.

+
>>> from clouddrift.datasets import glad
+>>> from clouddrift.pairs import chance_pairs_from_ragged
+>>> from clouddrift.ragged import subset
+>>> ds = subset(glad(), {"id": ["CARTHE_001", "CARTHE_002", "CARTHE_003", "CARTHE_004", "CARTHE_005"]}, id_var_name="id")
+>>> pairs = chance_pairs_from_ragged(
+    ds["longitude"].values,
+    ds["latitude"].values,
+    ds["rowsize"].values,
+    space_distance=12000,
+    time=ds["time"].values,
+    time_distance=np.timedelta64(0)
+)
+[((0, 1),
+  (array([153, 156, 159, 162, 165, 168, 171, 174, 177, 180, 183, 186, 189,
+          192, 195, 198, 201, 204, 207, 210, 213, 216]),
+   array([142, 145, 148, 151, 154, 157, 160, 163, 166, 169, 172, 175, 178,
+          181, 184, 187, 190, 193, 196, 199, 202, 205]))),
+ ((3, 4),
+  (array([141, 144, 147, 150, 153, 156, 159, 162, 165, 168, 171, 174, 177,
+          180, 183]),
+   array([136, 139, 142, 145, 148, 151, 154, 157, 160, 163, 166, 169, 172,
+          175, 178])))]
+
+
+

The result above shows that 2 chance pairs were found.

+
+
+

Raises#

+
+
ValueError

If rowsize has fewer than two elements.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.html b/_autosummary/clouddrift.pairs.html new file mode 100644 index 00000000..280dfb72 --- /dev/null +++ b/_autosummary/clouddrift.pairs.html @@ -0,0 +1,944 @@ + + + + + + + + + + + clouddrift.pairs — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.pairs#

+

Functions to analyze pairs of contiguous data segments.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + +

chance_pair(lon1, lat1, lon2, lat2[, time1, ...])

Given two sets of longitudes, latitudes, and times arrays, return in pairs the indices of collocated data points that are within prescribed distances in space and time.

chance_pairs_from_ragged(lon, lat, rowsize)

Return all chance pairs of contiguous trajectories in a ragged array, and their collocated points in space and (optionally) time, given input ragged arrays of longitude, latitude, and (optionally) time, and chance pair criteria as maximum allowable distances in space and time.

pair_bounding_box_overlap(lon1, lat1, lon2, lat2)

Given two arrays of longitudes and latitudes, return boolean masks for their overlapping bounding boxes.

pair_space_distance(lon1, lat1, lon2, lat2)

Given two arrays of longitudes and latitudes, return the distance on a sphere between all pairs of points.

pair_time_distance(time1, time2)

Given two arrays of times (or any other monotonically increasing quantity), return the temporal distance between all pairs of times.

pair_time_overlap(time1, time2[, distance])

Given two arrays of times (or any other monotonically increasing quantity), return indices where the times are within a prescribed distance.

+
+
+clouddrift.pairs.chance_pair(lon1: list[float] | ndarray[float] | Series | DataArray, lat1: list[float] | ndarray[float] | Series | DataArray, lon2: list[float] | ndarray[float] | Series | DataArray, lat2: list[float] | ndarray[float] | Series | DataArray, time1: list[float] | ndarray[float] | Series | DataArray | None = None, time2: list[float] | ndarray[float] | Series | DataArray | None = None, space_distance: float | None = 0, time_distance: float | None = 0)[source]#
+

Given two sets of longitudes, latitudes, and times arrays, return in pairs +the indices of collocated data points that are within prescribed distances +in space and time. Also known as chance pairs.

+
+

Parameters#

+
+
lon1array_like

First array of longitudes in degrees.

+
+
lat1array_like

First array of latitudes in degrees.

+
+
lon2array_like

Second array of longitudes in degrees.

+
+
lat2array_like

Second array of latitudes in degrees.

+
+
time1array_like, optional

First array of times.

+
+
time2array_like, optional

Second array of times.

+
+
space_distancefloat, optional

Maximum allowable space distance in meters for a pair to qualify as chance pair. +If the separation is within this distance, the pair is considered to be +a chance pair. Default is 0, or no distance, i.e. the positions must be +exactly the same.

+
+
time_distancefloat, optional

Maximum allowable time distance for a pair to qualify as chance pair. +If a separation is within this distance, and a space distance +condition is satisfied, the pair is considered a chance pair. Default is +0, or no distance, i.e. the times must be exactly the same.

+
+
+
+
+

Returns#

+
+
indices1np.ndarray[int]

Indices within the first set of arrays that lead to chance pair.

+
+
indices2np.ndarray[int]

Indices within the second set of arrays that lead to chance pair.

+
+
+
+
+

Examples#

+

In the following example, we load the GLAD dataset, extract the first +two trajectories, and find between these the array indices that satisfy +the chance pair criteria of 6 km separation distance and no time separation:

+
>>> from clouddrift.datasets import glad
+>>> from clouddrift.pairs import chance_pair
+>>> from clouddrift.ragged import unpack
+>>> ds = glad()
+>>> lon1 = unpack(ds["longitude"], ds["rowsize"], rows=0).pop()
+>>> lat1 = unpack(ds["latitude"], ds["rowsize"], rows=0).pop()
+>>> time1 = unpack(ds["time"], ds["rowsize"], rows=0).pop()
+>>> lon2 = unpack(ds["longitude"], ds["rowsize"], rows=1).pop()
+>>> lat2 = unpack(ds["latitude"], ds["rowsize"], rows=1).pop()
+>>> time2 = unpack(ds["time"], ds["rowsize"], rows=1).pop()
+>>> i1, i2 = chance_pair(lon1, lat1, lon2, lat2, time1, time2, 6000, np.timedelta64(0))
+>>> i1, i2
+(array([177, 180, 183, 186, 189, 192]), array([166, 169, 172, 175, 178, 181]))
+
+
+

Check to ensure our collocation in space worked by calculating the distance +between the identified pairs:

+
>>> sphere.distance(lon1[i1], lat1[i1], lon2[i2], lat2[i2])
+array([5967.4844, 5403.253 , 5116.9136, 5185.715 , 5467.8555, 5958.4917],
+      dtype=float32)
+
+
+

Check the collocation in time:

+
>>> time1[i1] - time2[i2]
+<xarray.DataArray 'time' (obs: 6)>
+array([0, 0, 0, 0, 0, 0], dtype='timedelta64[ns]')
+Coordinates:
+    time     (obs) datetime64[ns] 2012-07-21T21:30:00.524160 ... 2012-07-22T0...
+Dimensions without coordinates: obs
+
+
+
+
+

Raises#

+
+
ValueError

If time1 and time2 are not both provided or both omitted.

+
+
+
+
+ +
+
+clouddrift.pairs.chance_pairs_from_ragged(lon: list[float] | ndarray[float] | Series | DataArray, lat: list[float] | ndarray[float] | Series | DataArray, rowsize: list[float] | ndarray[float] | Series | DataArray, space_distance: float | None = 0, time: list[float] | ndarray[float] | Series | DataArray | None = None, time_distance: float | None = 0) List[Tuple[Tuple[int, int], Tuple[ndarray, ndarray]]][source]#
+

Return all chance pairs of contiguous trajectories in a ragged array, +and their collocated points in space and (optionally) time, given input +ragged arrays of longitude, latitude, and (optionally) time, and chance +pair criteria as maximum allowable distances in space and time.

+

If time and time_distance are omitted, the search will be done +only on the spatial criteria, and the result will not include the time +arrays.

+

If time and time_distance are provided, the search will be done +on both the spatial and temporal criteria, and the result will include the +time arrays.

+
+

Parameters#

+
+
lonarray_like

Array of longitudes in degrees.

+
+
latarray_like

Array of latitudes in degrees.

+
+
rowsizearray_like

Array of rowsizes.

+
+
space_distancefloat, optional

Maximum space distance in meters for the pair to qualify as chance pair. +If the separation is within this distance, the pair is considered to be +a chance pair. Default is 0, or no distance, i.e. the positions must be +exactly the same.

+
+
timearray_like, optional

Array of times.

+
+
time_distancefloat, optional

Maximum time distance allowed for the pair to qualify as chance pair. +If the separation is within this distance, and the space distance +condition is satisfied, the pair is considered a chance pair. Default is +0, or no distance, i.e. the times must be exactly the same.

+
+
+
+
+

Returns#

+
+
pairsList[Tuple[Tuple[int, int], Tuple[np.ndarray, np.ndarray]]]

List of tuples, each tuple containing a Tuple of integer indices that +corresponds to the trajectory rows in the ragged array, indicating the +pair of trajectories that satisfy the chance pair criteria, and a Tuple +of arrays containing the indices of the collocated points for each +trajectory in the chance pair.

+
+
+
+
+

Examples#

+

In the following example, we load GLAD dataset as a ragged array dataset, +subset the result to retain the first five trajectories, and finally find +all trajectories that satisfy the chance pair criteria of 12 km separation +distance and no time separation, as well as the indices of the collocated +points for each pair.

+
>>> from clouddrift.datasets import glad
+>>> from clouddrift.pairs import chance_pairs_from_ragged
+>>> from clouddrift.ragged import subset
+>>> ds = subset(glad(), {"id": ["CARTHE_001", "CARTHE_002", "CARTHE_003", "CARTHE_004", "CARTHE_005"]}, id_var_name="id")
+>>> pairs = chance_pairs_from_ragged(
+    ds["longitude"].values,
+    ds["latitude"].values,
+    ds["rowsize"].values,
+    space_distance=12000,
+    time=ds["time"].values,
+    time_distance=np.timedelta64(0)
+)
+[((0, 1),
+  (array([153, 156, 159, 162, 165, 168, 171, 174, 177, 180, 183, 186, 189,
+          192, 195, 198, 201, 204, 207, 210, 213, 216]),
+   array([142, 145, 148, 151, 154, 157, 160, 163, 166, 169, 172, 175, 178,
+          181, 184, 187, 190, 193, 196, 199, 202, 205]))),
+ ((3, 4),
+  (array([141, 144, 147, 150, 153, 156, 159, 162, 165, 168, 171, 174, 177,
+          180, 183]),
+   array([136, 139, 142, 145, 148, 151, 154, 157, 160, 163, 166, 169, 172,
+          175, 178])))]
+
+
+

The result above shows that 2 chance pairs were found.

+
+
+

Raises#

+
+
ValueError

If rowsize has fewer than two elements.

+
+
+
+
+ +
+
+clouddrift.pairs.pair_bounding_box_overlap(lon1: list[float] | ndarray[float] | Series | DataArray, lat1: list[float] | ndarray[float] | Series | DataArray, lon2: list[float] | ndarray[float] | Series | DataArray, lat2: list[float] | ndarray[float] | Series | DataArray, distance: float | None = 0) Tuple[ndarray[bool], ndarray[bool]][source]#
+

Given two arrays of longitudes and latitudes, return boolean masks for +their overlapping bounding boxes.

+
+

Parameters#

+
+
lon1array_like

First array of longitudes in degrees.

+
+
lat1array_like

First array of latitudes in degrees.

+
+
lon2array_like

Second array of longitudes in degrees.

+
+
lat2array_like

Second array of latitudes in degrees.

+
+
distancefloat, optional

Distance in degrees for the overlap. If the overlap is within this +distance, the bounding boxes are considered to overlap. Default is 0.

+
+
+
+
+

Returns#

+
+
overlap1np.ndarray[int]

Indices lon1 and lat1 where their bounding box overlaps with +that of lon2 and lat2.

+
+
overlap2np.ndarray[int]

Indices lon2 and lat2 where their bounding box overlaps with +that of lon1 and lat1.

+
+
+
+
+

Examples#

+
>>> lon1 = [0, 0, 1, 1]
+>>> lat1 = [0, 0, 1, 1]
+>>> lon2 = [1, 1, 2, 2]
+>>> lat2 = [1, 1, 2, 2]
+>>> pair_bounding_box_overlap(lon1, lat1, lon2, lat2, 0.5)
+(array([2, 3]), array([0, 1]))
+
+
+
+
+ +
+
+clouddrift.pairs.pair_space_distance(lon1: list[float] | ndarray[float] | Series | DataArray, lat1: list[float] | ndarray[float] | Series | DataArray, lon2: list[float] | ndarray[float] | Series | DataArray, lat2: list[float] | ndarray[float] | Series | DataArray) ndarray[float][source]#
+

Given two arrays of longitudes and latitudes, return the distance +on a sphere between all pairs of points.

+
+

Parameters#

+
+
lon1array_like

First array of longitudes in degrees.

+
+
lat1array_like

First array of latitudes in degrees.

+
+
lon2array_like

Second array of longitudes in degrees.

+
+
lat2array_like

Second array of latitudes in degrees.

+
+
+
+
+

Returns#

+
+
distancenp.ndarray[float]

Array of distances between all pairs of points.

+
+
+
+
+

Examples#

+
>>> lon1 = [0, 0, 1, 1]
+>>> lat1 = [0, 0, 1, 1]
+>>> lon2 = [1, 1, 2, 2]
+>>> lat2 = [1, 1, 2, 2]
+>>> pair_space_distance(lon1, lat1, lon2, lat2)
+array([[157424.62387233, 157424.62387233,      0.        ,
+         0.        ],
+   [157424.62387233, 157424.62387233,      0.        ,
+         0.        ],
+   [314825.26360286, 314825.26360286, 157400.64794884,
+    157400.64794884],
+   [314825.26360286, 314825.26360286, 157400.64794884,
+    157400.64794884]])
+
+
+
+
+ +
+
+clouddrift.pairs.pair_time_distance(time1: list[float] | ndarray[float] | Series | DataArray, time2: list[float] | ndarray[float] | Series | DataArray) ndarray[float][source]#
+

Given two arrays of times (or any other monotonically increasing +quantity), return the temporal distance between all pairs of times.

+
+

Parameters#

+
+
time1array_like

First array of times.

+
+
time2array_like

Second array of times.

+
+
+
+
+

Returns#

+
+
distancenp.ndarray[float]

Array of distances between all pairs of times.

+
+
+
+
+

Examples#

+
>>> time1 = np.arange(4)
+>>> time2 = np.arange(2, 6)
+>>> pair_time_distance(time1, time2)
+array([[2, 1, 0, 1],
+       [3, 2, 1, 0],
+       [4, 3, 2, 1],
+       [5, 4, 3, 2]])
+
+
+
+
+ +
+
+clouddrift.pairs.pair_time_overlap(time1: list[float] | ndarray[float] | Series | DataArray, time2: list[float] | ndarray[float] | Series | DataArray, distance: float | None = 0) Tuple[ndarray[int], ndarray[int]][source]#
+

Given two arrays of times (or any other monotonically increasing +quantity), return indices where the times are within a prescribed distance.

+

Although higher-level array containers like xarray and pandas are supported +for input arrays, this function is an order of magnitude faster when passing +in numpy arrays.

+
+

Parameters#

+
+
time1array_like

First array of times.

+
+
time2array_like

Second array of times.

+
+
distancefloat

Maximum distance within which the values of time1 and time2 are +considered to overlap. Default is 0, or, the values must be exactly the +same.

+
+
+
+
+

Returns#

+
+
overlap1np.ndarray[int]

Indices of time1 where its time overlaps with time2.

+
+
overlap2np.ndarray[int]

Indices of time2 where its time overlaps with time1.

+
+
+
+
+

Examples#

+
>>> time1 = np.arange(4)
+>>> time2 = np.arange(2, 6)
+>>> pair_time_overlap(time1, time2)
+(array([2, 3]), array([0, 1]))
+
+
+
>>> pair_time_overlap(time1, time2, 1)
+(array([1, 2, 3]), array([0, 1, 2]))
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.pair_bounding_box_overlap.html b/_autosummary/clouddrift.pairs.pair_bounding_box_overlap.html new file mode 100644 index 00000000..192da657 --- /dev/null +++ b/_autosummary/clouddrift.pairs.pair_bounding_box_overlap.html @@ -0,0 +1,608 @@ + + + + + + + + + + + clouddrift.pairs.pair_bounding_box_overlap — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.pairs.pair_bounding_box_overlap

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.pairs.pair_bounding_box_overlap#

+
+
+clouddrift.pairs.pair_bounding_box_overlap(lon1: list[float] | ndarray[float] | Series | DataArray, lat1: list[float] | ndarray[float] | Series | DataArray, lon2: list[float] | ndarray[float] | Series | DataArray, lat2: list[float] | ndarray[float] | Series | DataArray, distance: float | None = 0) Tuple[ndarray[bool], ndarray[bool]][source]#
+

Given two arrays of longitudes and latitudes, return boolean masks for +their overlapping bounding boxes.

+
+

Parameters#

+
+
lon1array_like

First array of longitudes in degrees.

+
+
lat1array_like

First array of latitudes in degrees.

+
+
lon2array_like

Second array of longitudes in degrees.

+
+
lat2array_like

Second array of latitudes in degrees.

+
+
distancefloat, optional

Distance in degrees for the overlap. If the overlap is within this +distance, the bounding boxes are considered to overlap. Default is 0.

+
+
+
+
+

Returns#

+
+
overlap1np.ndarray[int]

Indices lon1 and lat1 where their bounding box overlaps with +that of lon2 and lat2.

+
+
overlap2np.ndarray[int]

Indices lon2 and lat2 where their bounding box overlaps with +that of lon1 and lat1.

+
+
+
+
+

Examples#

+
>>> lon1 = [0, 0, 1, 1]
+>>> lat1 = [0, 0, 1, 1]
+>>> lon2 = [1, 1, 2, 2]
+>>> lat2 = [1, 1, 2, 2]
+>>> pair_bounding_box_overlap(lon1, lat1, lon2, lat2, 0.5)
+(array([2, 3]), array([0, 1]))
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.pair_space_distance.html b/_autosummary/clouddrift.pairs.pair_space_distance.html new file mode 100644 index 00000000..bda9acff --- /dev/null +++ b/_autosummary/clouddrift.pairs.pair_space_distance.html @@ -0,0 +1,608 @@ + + + + + + + + + + + clouddrift.pairs.pair_space_distance — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.pairs.pair_space_distance

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.pairs.pair_space_distance#

+
+
+clouddrift.pairs.pair_space_distance(lon1: list[float] | ndarray[float] | Series | DataArray, lat1: list[float] | ndarray[float] | Series | DataArray, lon2: list[float] | ndarray[float] | Series | DataArray, lat2: list[float] | ndarray[float] | Series | DataArray) ndarray[float][source]#
+

Given two arrays of longitudes and latitudes, return the distance +on a sphere between all pairs of points.

+
+

Parameters#

+
+
lon1array_like

First array of longitudes in degrees.

+
+
lat1array_like

First array of latitudes in degrees.

+
+
lon2array_like

Second array of longitudes in degrees.

+
+
lat2array_like

Second array of latitudes in degrees.

+
+
+
+
+

Returns#

+
+
distancenp.ndarray[float]

Array of distances between all pairs of points.

+
+
+
+
+

Examples#

+
>>> lon1 = [0, 0, 1, 1]
+>>> lat1 = [0, 0, 1, 1]
+>>> lon2 = [1, 1, 2, 2]
+>>> lat2 = [1, 1, 2, 2]
+>>> pair_space_distance(lon1, lat1, lon2, lat2)
+array([[157424.62387233, 157424.62387233,      0.        ,
+         0.        ],
+   [157424.62387233, 157424.62387233,      0.        ,
+         0.        ],
+   [314825.26360286, 314825.26360286, 157400.64794884,
+    157400.64794884],
+   [314825.26360286, 314825.26360286, 157400.64794884,
+    157400.64794884]])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.pair_time_distance.html b/_autosummary/clouddrift.pairs.pair_time_distance.html new file mode 100644 index 00000000..fd1e487c --- /dev/null +++ b/_autosummary/clouddrift.pairs.pair_time_distance.html @@ -0,0 +1,598 @@ + + + + + + + + + + + clouddrift.pairs.pair_time_distance — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.pairs.pair_time_distance

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.pairs.pair_time_distance#

+
+
+clouddrift.pairs.pair_time_distance(time1: list[float] | ndarray[float] | Series | DataArray, time2: list[float] | ndarray[float] | Series | DataArray) ndarray[float][source]#
+

Given two arrays of times (or any other monotonically increasing +quantity), return the temporal distance between all pairs of times.

+
+

Parameters#

+
+
time1array_like

First array of times.

+
+
time2array_like

Second array of times.

+
+
+
+
+

Returns#

+
+
distancenp.ndarray[float]

Array of distances between all pairs of times.

+
+
+
+
+

Examples#

+
>>> time1 = np.arange(4)
+>>> time2 = np.arange(2, 6)
+>>> pair_time_distance(time1, time2)
+array([[2, 1, 0, 1],
+       [3, 2, 1, 0],
+       [4, 3, 2, 1],
+       [5, 4, 3, 2]])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.pairs.pair_time_overlap.html b/_autosummary/clouddrift.pairs.pair_time_overlap.html new file mode 100644 index 00000000..feefe680 --- /dev/null +++ b/_autosummary/clouddrift.pairs.pair_time_overlap.html @@ -0,0 +1,608 @@ + + + + + + + + + + + clouddrift.pairs.pair_time_overlap — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.pairs.pair_time_overlap

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.pairs.pair_time_overlap#

+
+
+clouddrift.pairs.pair_time_overlap(time1: list[float] | ndarray[float] | Series | DataArray, time2: list[float] | ndarray[float] | Series | DataArray, distance: float | None = 0) Tuple[ndarray[int], ndarray[int]][source]#
+

Given two arrays of times (or any other monotonically increasing +quantity), return indices where the times are within a prescribed distance.

+

Although higher-level array containers like xarray and pandas are supported +for input arrays, this function is an order of magnitude faster when passing +in numpy arrays.

+
+

Parameters#

+
+
time1array_like

First array of times.

+
+
time2array_like

Second array of times.

+
+
distancefloat

Maximum distance within which the values of time1 and time2 are +considered to overlap. Default is 0, or, the values must be exactly the +same.

+
+
+
+
+

Returns#

+
+
overlap1np.ndarray[int]

Indices of time1 where its time overlaps with time2.

+
+
overlap2np.ndarray[int]

Indices of time2 where its time overlaps with time1.

+
+
+
+
+

Examples#

+
>>> time1 = np.arange(4)
+>>> time2 = np.arange(2, 6)
+>>> pair_time_overlap(time1, time2)
+(array([2, 3]), array([0, 1]))
+
+
+
>>> pair_time_overlap(time1, time2, 1)
+(array([1, 2, 3]), array([0, 1, 2]))
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.plotting.html b/_autosummary/clouddrift.plotting.html new file mode 100644 index 00000000..183b3ebb --- /dev/null +++ b/_autosummary/clouddrift.plotting.html @@ -0,0 +1,686 @@ + + + + + + + + + + + clouddrift.plotting — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.plotting

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.plotting#

+

This module provides a function to easily and efficiently plot trajectories stored in a ragged array.

+

Functions

+ + + + + + +

plot_ragged(ax, longitude, latitude, ...[, ...])

Plot trajectories from a ragged array dataset on a Matplotlib Axes or a Cartopy GeoAxes object ax.

+
+
+clouddrift.plotting.plot_ragged(ax, longitude: list | ndarray | Series | DataArray, latitude: list | ndarray | Series | DataArray, rowsize: list | ndarray | Series | DataArray, *args, colors: list | ndarray | Series | DataArray | None = None, tolerance: float | int | None = 180, **kwargs)[source]#
+

Plot trajectories from a ragged array dataset on a Matplotlib Axes +or a Cartopy GeoAxes object ax.

+

This function wraps Matplotlib’s plot function (plt.plot) and +LineCollection (matplotlib.collections) to efficiently plot +trajectories from a ragged array dataset.

+
+

Parameters#

+
+
ax: matplotlib.axes.Axes or cartopy.mpl.geoaxes.GeoAxes

Axis to plot on.

+
+
longitudearray-like

Longitude sequence. Unidimensional array input.

+
+
latitudearray-like

Latitude sequence. Unidimensional array input.

+
+
rowsizelist

List of integers specifying the number of data points in each row.

+
+
*argstuple

Additional arguments to pass to ax.plot.

+
+
colorsarray-like

Colors to use for plotting. If colors is the same shape as longitude and latitude, +the trajectories are splitted into segments and each segment is colored according +to the corresponding color value. If colors is the same shape as rowsize, the +trajectories are uniformly colored according to the corresponding color value.

+
+
tolerancefloat

Longitude tolerance gap between data points (in degrees) for segmenting trajectories. +For periodic domains, the tolerance parameter should be set to the maximum allowed gap +between data points. Defaults to 180.

+
+
**kwargsdict

Additional keyword arguments to pass to ax.plot.

+
+
+
+
+

Returns#

+
+
list of matplotlib.lines.Line2D or matplotlib.collections.LineCollection

The plotted lines or line collection. Can be used to set a colorbar +after plotting or extract information from the lines.

+
+
+
+
+

Examples#

+

Plot the first 100 trajectories from the gdp1h dataset, assigning +a different color to each trajectory:

+
>>> from clouddrift import datasets
+>>> import matplotlib.pyplot as plt
+>>> ds = datasets.gdp1h()
+>>> ds = subset(ds, {"ID": ds.ID[:100].values}).load()
+>>> fig = plt.figure()
+>>> ax = fig.add_subplot(1, 1, 1)
+
+
+
>>> plot_ragged(
+>>>     ax,
+>>>     ds.lon,
+>>>     ds.lat,
+>>>     ds.rowsize,
+>>>     colors=np.arange(len(ds.rowsize))
+>>> )
+
+
+

To plot the same trajectories, but assigning a different color to each +observation and specifying a colormap:

+
>>> fig = plt.figure()
+>>> ax = fig.add_subplot(1, 1, 1)
+>>> time = [v.astype(np.int64) / 86400 / 1e9 for v in ds.time.values]
+>>> lc = plot_ragged(
+>>>     ax,
+>>>     ds.lon,
+>>>     ds.lat,
+>>>     ds.rowsize,
+>>>     colors=np.floor(time),
+>>>     cmap="inferno"
+>>> )
+>>> fig.colorbar(lc[0])
+>>> ax.set_xlim([-180, 180])
+>>> ax.set_ylim([-90, 90])
+
+
+

Finally, to plot the same trajectories, but using a cartopy +projection:

+
>>> import cartopy.crs as ccrs
+>>> fig = plt.figure()
+>>> ax = fig.add_subplot(1, 1, 1, projection=ccrs.Mollweide())
+>>> time = [v.astype(np.int64) / 86400 / 1e9 for v in ds.time.values]
+>>> lc = plot_ragged(
+>>>     ax,
+>>>     ds.lon,
+>>>     ds.lat,
+>>>     ds.rowsize,
+>>>     colors=np.arange(len(ds.rowsize)),
+>>>     transform=ccrs.PlateCarree(),
+>>>     cmap=cmocean.cm.ice,
+>>> )
+
+
+
+
+

Raises#

+
+
ValueError

If longitude and latitude arrays do not have the same shape. +If colors do not have the same shape as longitude and latitude arrays or rowsize. +If ax is not a matplotlib Axes or GeoAxes object. +If ax is a GeoAxes object and the transform keyword argument is not provided.

+
+
ImportError

If matplotlib is not installed. +If the axis is a GeoAxes object and cartopy is not installed.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.plotting.plot_ragged.html b/_autosummary/clouddrift.plotting.plot_ragged.html new file mode 100644 index 00000000..06c69db2 --- /dev/null +++ b/_autosummary/clouddrift.plotting.plot_ragged.html @@ -0,0 +1,677 @@ + + + + + + + + + + + clouddrift.plotting.plot_ragged — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.plotting.plot_ragged

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.plotting.plot_ragged#

+
+
+clouddrift.plotting.plot_ragged(ax, longitude: list | ndarray | Series | DataArray, latitude: list | ndarray | Series | DataArray, rowsize: list | ndarray | Series | DataArray, *args, colors: list | ndarray | Series | DataArray | None = None, tolerance: float | int | None = 180, **kwargs)[source]#
+

Plot trajectories from a ragged array dataset on a Matplotlib Axes +or a Cartopy GeoAxes object ax.

+

This function wraps Matplotlib’s plot function (plt.plot) and +LineCollection (matplotlib.collections) to efficiently plot +trajectories from a ragged array dataset.

+
+

Parameters#

+
+
ax: matplotlib.axes.Axes or cartopy.mpl.geoaxes.GeoAxes

Axis to plot on.

+
+
longitudearray-like

Longitude sequence. Unidimensional array input.

+
+
latitudearray-like

Latitude sequence. Unidimensional array input.

+
+
rowsizelist

List of integers specifying the number of data points in each row.

+
+
*argstuple

Additional arguments to pass to ax.plot.

+
+
colorsarray-like

Colors to use for plotting. If colors is the same shape as longitude and latitude, +the trajectories are splitted into segments and each segment is colored according +to the corresponding color value. If colors is the same shape as rowsize, the +trajectories are uniformly colored according to the corresponding color value.

+
+
tolerancefloat

Longitude tolerance gap between data points (in degrees) for segmenting trajectories. +For periodic domains, the tolerance parameter should be set to the maximum allowed gap +between data points. Defaults to 180.

+
+
**kwargsdict

Additional keyword arguments to pass to ax.plot.

+
+
+
+
+

Returns#

+
+
list of matplotlib.lines.Line2D or matplotlib.collections.LineCollection

The plotted lines or line collection. Can be used to set a colorbar +after plotting or extract information from the lines.

+
+
+
+
+

Examples#

+

Plot the first 100 trajectories from the gdp1h dataset, assigning +a different color to each trajectory:

+
>>> from clouddrift import datasets
+>>> import matplotlib.pyplot as plt
+>>> ds = datasets.gdp1h()
+>>> ds = subset(ds, {"ID": ds.ID[:100].values}).load()
+>>> fig = plt.figure()
+>>> ax = fig.add_subplot(1, 1, 1)
+
+
+
>>> plot_ragged(
+>>>     ax,
+>>>     ds.lon,
+>>>     ds.lat,
+>>>     ds.rowsize,
+>>>     colors=np.arange(len(ds.rowsize))
+>>> )
+
+
+

To plot the same trajectories, but assigning a different color to each +observation and specifying a colormap:

+
>>> fig = plt.figure()
+>>> ax = fig.add_subplot(1, 1, 1)
+>>> time = [v.astype(np.int64) / 86400 / 1e9 for v in ds.time.values]
+>>> lc = plot_ragged(
+>>>     ax,
+>>>     ds.lon,
+>>>     ds.lat,
+>>>     ds.rowsize,
+>>>     colors=np.floor(time),
+>>>     cmap="inferno"
+>>> )
+>>> fig.colorbar(lc[0])
+>>> ax.set_xlim([-180, 180])
+>>> ax.set_ylim([-90, 90])
+
+
+

Finally, to plot the same trajectories, but using a cartopy +projection:

+
>>> import cartopy.crs as ccrs
+>>> fig = plt.figure()
+>>> ax = fig.add_subplot(1, 1, 1, projection=ccrs.Mollweide())
+>>> time = [v.astype(np.int64) / 86400 / 1e9 for v in ds.time.values]
+>>> lc = plot_ragged(
+>>>     ax,
+>>>     ds.lon,
+>>>     ds.lat,
+>>>     ds.rowsize,
+>>>     colors=np.arange(len(ds.rowsize)),
+>>>     transform=ccrs.PlateCarree(),
+>>>     cmap=cmocean.cm.ice,
+>>> )
+
+
+
+
+

Raises#

+
+
ValueError

If longitude and latitude arrays do not have the same shape. +If colors do not have the same shape as longitude and latitude arrays or rowsize. +If ax is not a matplotlib Axes or GeoAxes object. +If ax is a GeoAxes object and the transform keyword argument is not provided.

+
+
ImportError

If matplotlib is not installed. +If the axis is a GeoAxes object and cartopy is not installed.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.apply_ragged.html b/_autosummary/clouddrift.ragged.apply_ragged.html new file mode 100644 index 00000000..dcf33b78 --- /dev/null +++ b/_autosummary/clouddrift.ragged.apply_ragged.html @@ -0,0 +1,649 @@ + + + + + + + + + + + clouddrift.ragged.apply_ragged — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.apply_ragged

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.apply_ragged#

+
+
+clouddrift.ragged.apply_ragged(func: callable, arrays: list[~numpy.ndarray | ~xarray.core.dataarray.DataArray] | ~numpy.ndarray | ~xarray.core.dataarray.DataArray, rowsize: list[int] | ~numpy.ndarray[int] | ~xarray.core.dataarray.DataArray, *args: tuple, rows: int | ~typing.Iterable[int] = None, axis: int = 0, executor: ~concurrent.futures._base.Executor = <concurrent.futures.thread.ThreadPoolExecutor object>, **kwargs: dict) tuple[ndarray] | ndarray[source]#
+

Apply a function to a ragged array.

+

The function func will be applied to each contiguous row of arrays as +indicated by row sizes rowsize. The output of func will be +concatenated into a single ragged array.

+

You can pass arrays as NumPy arrays or xarray DataArrays, however, +the result will always be a NumPy array. Passing rows as an integer or +a sequence of integers will make apply_ragged process and return only +those specific rows, and otherwise, all rows in the input ragged array will +be processed. Further, you can use the axis parameter to specify the +ragged axis of the input array(s) (default is 0).

+

By default this function uses concurrent.futures.ThreadPoolExecutor to +run func in multiple threads. The number of threads can be controlled by +passing the max_workers argument to the executor instance passed to +apply_ragged. Alternatively, you can pass the concurrent.futures.ProcessPoolExecutor +instance to use processes instead. Passing alternative (3rd party library) +concurrent executors may work if they follow the same executor interface as +that of concurrent.futures, however this has not been tested yet.

+
+

Parameters#

+
+
funccallable

Function to apply to each row of each ragged array in arrays.

+
+
arrayslist[np.ndarray] or np.ndarray or xr.DataArray

An array or a list of arrays to apply func to.

+
+
rowsizelist[int] or np.ndarray[int] or xr.DataArray[int]

List of integers specifying the number of data points in each row.

+
+
*argstuple

Additional arguments to pass to func.

+
+
rowsint or Iterable[int], optional

The row(s) of the ragged array to apply func to. If rows is +None (default), then func will be applied to all rows.

+
+
axisint, optional

The ragged axis of the input arrays. Default is 0.

+
+
executorconcurrent.futures.Executor, optional

Executor to use for concurrent execution. Default is ThreadPoolExecutor +with the default number of max_workers. +Another supported option is ProcessPoolExecutor.

+
+
**kwargsdict

Additional keyword arguments to pass to func.

+
+
+
+
+

Returns#

+
+
outtuple[np.ndarray] or np.ndarray

Output array(s) from func.

+
+
+
+
+

Examples#

+

Using velocity_from_position with apply_ragged, calculate the velocities of +multiple particles, the coordinates of which are found in the ragged arrays x, y, and t +that share row sizes 2, 3, and 4:

+
>>> rowsize = [2, 3, 4]
+>>> x = np.array([1, 2, 10, 12, 14, 30, 33, 36, 39])
+>>> y = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8])
+>>> t = np.array([1, 2, 1, 2, 3, 1, 2, 3, 4])
+>>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, coord_system="cartesian")
+array([1., 1., 2., 2., 2., 3., 3., 3., 3.]),
+array([1., 1., 1., 1., 1., 1., 1., 1., 1.]))
+
+
+

To apply func to only a subset of rows, use the rows argument:

+
>>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, rows=0, coord_system="cartesian")
+array([1., 1.]),
+array([1., 1.]))
+>>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, rows=[0, 1], coord_system="cartesian")
+array([1., 1., 2., 2., 2.]),
+array([1., 1., 1., 1., 1.]))
+
+
+
+
+

Raises#

+
+
ValueError

If the sum of rowsize does not equal the length of arrays.

+
+
IndexError

If empty arrays.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.chunk.html b/_autosummary/clouddrift.ragged.chunk.html new file mode 100644 index 00000000..5f60f84e --- /dev/null +++ b/_autosummary/clouddrift.ragged.chunk.html @@ -0,0 +1,654 @@ + + + + + + + + + + + clouddrift.ragged.chunk — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.chunk

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.chunk#

+
+
+clouddrift.ragged.chunk(x: list | ndarray | DataArray | Series, length: int, overlap: int = 0, align: str = 'start') ndarray[source]#
+

Divide an array x into equal chunks of length length. The result +is a 2-dimensional NumPy array of shape (num_chunks, length). The resulting +number of chunks is determined based on the length of x, length, +and overlap.

+

chunk can be combined with apply_ragged() to chunk a ragged array.

+
+

Parameters#

+
+
xlist or array-like

Array to divide into chunks.

+
+
lengthint

The length of each chunk.

+
+
overlapint, optional

The number of overlapping array elements across chunks. The default is 0. +Must be smaller than length. For example, if length is 4 and +overlap is 2, the chunks of [0, 1, 2, 3, 4, 5] will be +np.array([[0, 1, 2, 3], [2, 3, 4, 5]]). Negative overlap can be used +to offset chunks by some number of elements. For example, if length +is 2 and overlap is -1, the chunks of [0, 1, 2, 3, 4, 5] will +be np.array([[0, 1], [3, 4]]).

+
+
alignstr, optional [“start”, “middle”, “end”]

If the remainder of the length of x divided by the chunk length is a number +N different from zero, this parameter controls which part of the array will be kept +into the chunks. If align="start", the elements at the beginning of the array +will be part of the chunks and N points are discarded at the end. If align=”middle”, +floor(N/2) and ceil(N/2) elements will be discarded from the beginning and the end +of the array, respectively. If align="end", the elements at the end of the array +will be kept, and the N first elements are discarded. The default is “start”.

+
+
+
+
+

Returns#

+
+
np.ndarray

2-dimensional array of shape (num_chunks, length).

+
+
+
+
+

Examples#

+

Chunk a simple list; this discards the end elements that exceed the last chunk:

+
>>> chunk([1, 2, 3, 4, 5], 2)
+array([[1, 2],
+       [3, 4]])
+
+
+

To discard the starting elements of the array instead, use align="end":

+
>>> chunk([1, 2, 3, 4, 5], 2, align="end")
+array([[2, 3],
+       [4, 5]])
+
+
+

To center the chunks by discarding both ends of the array, use align="middle":

+
>>> chunk([1, 2, 3, 4, 5, 6, 7, 8], 3, align="middle")
+array([[2, 3, 4],
+       [5, 6, 7]])
+
+
+

Specify overlap to get overlapping chunks:

+
>>> chunk([1, 2, 3, 4, 5], 2, overlap=1)
+array([[1, 2],
+       [2, 3],
+       [3, 4],
+       [4, 5]])
+
+
+

Use apply_ragged to chunk a ragged array by providing the row sizes; +notice that you must pass the array to chunk as an array-like, not a list:

+
>>> x = np.array([1, 2, 3, 4, 5])
+>>> rowsize = [2, 1, 2]
+>>> apply_ragged(chunk, x, rowsize, 2)
+array([[1, 2],
+       [4, 5]])
+
+
+
+
+

Raises#

+
+
ValueError

If length < 0.

+
+
ValueError

If align not in ["start", "middle", "end"].

+
+
ZeroDivisionError

if length == 0.

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.html b/_autosummary/clouddrift.ragged.html new file mode 100644 index 00000000..1becab27 --- /dev/null +++ b/_autosummary/clouddrift.ragged.html @@ -0,0 +1,1183 @@ + + + + + + + + + + + clouddrift.ragged — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged

+ +
+ +
+
+ + + + +
+ +
+

clouddrift.ragged#

+

Transformational and inquiry functions for ragged arrays.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

apply_ragged(func, arrays, rowsize, *args[, ...])

Apply a function to a ragged array.

chunk(x, length[, overlap, align])

Divide an array x into equal chunks of length length.

prune(ragged, rowsize, min_rowsize)

Within a ragged array, removes arrays less than a specified row size.

ragged_to_regular(ragged, rowsize[, fill_value])

Convert a ragged array to a two-dimensional array such that each contiguous segment of a ragged array is a row in the two-dimensional array.

regular_to_ragged(array[, fill_value])

Convert a two-dimensional array to a ragged array.

rowsize_to_index(rowsize)

Convert a list of row sizes to a list of indices.

segment(x, tolerance[, rowsize])

Divide an array into segments based on a tolerance value.

subset(ds, criteria[, id_var_name, ...])

Subset a ragged array dataset as a function of one or more criteria.

unpack(ragged_array, rowsize[, rows, axis])

Unpack a ragged array into a list of regular arrays.

+
+
+clouddrift.ragged.apply_ragged(func: callable, arrays: list[~numpy.ndarray | ~xarray.core.dataarray.DataArray] | ~numpy.ndarray | ~xarray.core.dataarray.DataArray, rowsize: list[int] | ~numpy.ndarray[int] | ~xarray.core.dataarray.DataArray, *args: tuple, rows: int | ~typing.Iterable[int] = None, axis: int = 0, executor: ~concurrent.futures._base.Executor = <concurrent.futures.thread.ThreadPoolExecutor object>, **kwargs: dict) tuple[ndarray] | ndarray[source]#
+

Apply a function to a ragged array.

+

The function func will be applied to each contiguous row of arrays as +indicated by row sizes rowsize. The output of func will be +concatenated into a single ragged array.

+

You can pass arrays as NumPy arrays or xarray DataArrays, however, +the result will always be a NumPy array. Passing rows as an integer or +a sequence of integers will make apply_ragged process and return only +those specific rows, and otherwise, all rows in the input ragged array will +be processed. Further, you can use the axis parameter to specify the +ragged axis of the input array(s) (default is 0).

+

By default this function uses concurrent.futures.ThreadPoolExecutor to +run func in multiple threads. The number of threads can be controlled by +passing the max_workers argument to the executor instance passed to +apply_ragged. Alternatively, you can pass the concurrent.futures.ProcessPoolExecutor +instance to use processes instead. Passing alternative (3rd party library) +concurrent executors may work if they follow the same executor interface as +that of concurrent.futures, however this has not been tested yet.

+
+

Parameters#

+
+
funccallable

Function to apply to each row of each ragged array in arrays.

+
+
arrayslist[np.ndarray] or np.ndarray or xr.DataArray

An array or a list of arrays to apply func to.

+
+
rowsizelist[int] or np.ndarray[int] or xr.DataArray[int]

List of integers specifying the number of data points in each row.

+
+
*argstuple

Additional arguments to pass to func.

+
+
rowsint or Iterable[int], optional

The row(s) of the ragged array to apply func to. If rows is +None (default), then func will be applied to all rows.

+
+
axisint, optional

The ragged axis of the input arrays. Default is 0.

+
+
executorconcurrent.futures.Executor, optional

Executor to use for concurrent execution. Default is ThreadPoolExecutor +with the default number of max_workers. +Another supported option is ProcessPoolExecutor.

+
+
**kwargsdict

Additional keyword arguments to pass to func.

+
+
+
+
+

Returns#

+
+
outtuple[np.ndarray] or np.ndarray

Output array(s) from func.

+
+
+
+
+

Examples#

+

Using velocity_from_position with apply_ragged, calculate the velocities of +multiple particles, the coordinates of which are found in the ragged arrays x, y, and t +that share row sizes 2, 3, and 4:

+
>>> rowsize = [2, 3, 4]
+>>> x = np.array([1, 2, 10, 12, 14, 30, 33, 36, 39])
+>>> y = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8])
+>>> t = np.array([1, 2, 1, 2, 3, 1, 2, 3, 4])
+>>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, coord_system="cartesian")
+array([1., 1., 2., 2., 2., 3., 3., 3., 3.]),
+array([1., 1., 1., 1., 1., 1., 1., 1., 1.]))
+
+
+

To apply func to only a subset of rows, use the rows argument:

+
>>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, rows=0, coord_system="cartesian")
+array([1., 1.]),
+array([1., 1.]))
+>>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, rows=[0, 1], coord_system="cartesian")
+array([1., 1., 2., 2., 2.]),
+array([1., 1., 1., 1., 1.]))
+
+
+
+
+

Raises#

+
+
ValueError

If the sum of rowsize does not equal the length of arrays.

+
+
IndexError

If empty arrays.

+
+
+
+
+ +
+
+clouddrift.ragged.chunk(x: list | ndarray | DataArray | Series, length: int, overlap: int = 0, align: str = 'start') ndarray[source]#
+

Divide an array x into equal chunks of length length. The result +is a 2-dimensional NumPy array of shape (num_chunks, length). The resulting +number of chunks is determined based on the length of x, length, +and overlap.

+

chunk can be combined with apply_ragged() to chunk a ragged array.

+
+

Parameters#

+
+
xlist or array-like

Array to divide into chunks.

+
+
lengthint

The length of each chunk.

+
+
overlapint, optional

The number of overlapping array elements across chunks. The default is 0. +Must be smaller than length. For example, if length is 4 and +overlap is 2, the chunks of [0, 1, 2, 3, 4, 5] will be +np.array([[0, 1, 2, 3], [2, 3, 4, 5]]). Negative overlap can be used +to offset chunks by some number of elements. For example, if length +is 2 and overlap is -1, the chunks of [0, 1, 2, 3, 4, 5] will +be np.array([[0, 1], [3, 4]]).

+
+
alignstr, optional [“start”, “middle”, “end”]

If the remainder of the length of x divided by the chunk length is a number +N different from zero, this parameter controls which part of the array will be kept +into the chunks. If align="start", the elements at the beginning of the array +will be part of the chunks and N points are discarded at the end. If align=”middle”, +floor(N/2) and ceil(N/2) elements will be discarded from the beginning and the end +of the array, respectively. If align="end", the elements at the end of the array +will be kept, and the N first elements are discarded. The default is “start”.

+
+
+
+
+

Returns#

+
+
np.ndarray

2-dimensional array of shape (num_chunks, length).

+
+
+
+
+

Examples#

+

Chunk a simple list; this discards the end elements that exceed the last chunk:

+
>>> chunk([1, 2, 3, 4, 5], 2)
+array([[1, 2],
+       [3, 4]])
+
+
+

To discard the starting elements of the array instead, use align="end":

+
>>> chunk([1, 2, 3, 4, 5], 2, align="end")
+array([[2, 3],
+       [4, 5]])
+
+
+

To center the chunks by discarding both ends of the array, use align="middle":

+
>>> chunk([1, 2, 3, 4, 5, 6, 7, 8], 3, align="middle")
+array([[2, 3, 4],
+       [5, 6, 7]])
+
+
+

Specify overlap to get overlapping chunks:

+
>>> chunk([1, 2, 3, 4, 5], 2, overlap=1)
+array([[1, 2],
+       [2, 3],
+       [3, 4],
+       [4, 5]])
+
+
+

Use apply_ragged to chunk a ragged array by providing the row sizes; +notice that you must pass the array to chunk as an array-like, not a list:

+
>>> x = np.array([1, 2, 3, 4, 5])
+>>> rowsize = [2, 1, 2]
+>>> apply_ragged(chunk, x, rowsize, 2)
+array([[1, 2],
+       [4, 5]])
+
+
+
+
+

Raises#

+
+
ValueError

If length < 0.

+
+
ValueError

If align not in ["start", "middle", "end"].

+
+
ZeroDivisionError

if length == 0.

+
+
+
+
+ +
+
+clouddrift.ragged.prune(ragged: list | ndarray | Series | DataArray, rowsize: list | ndarray | Series | DataArray, min_rowsize: float) Tuple[ndarray, ndarray][source]#
+

Within a ragged array, removes arrays less than a specified row size.

+
+

Parameters#

+
+
raggednp.ndarray or pd.Series or xr.DataArray

A ragged array.

+
+
rowsizelist or np.ndarray[int] or pd.Series or xr.DataArray[int]

The size of each row in the input ragged array.

+
+
min_rowsize :

The minimum row size that will be kept.

+
+
+
+
+

Returns#

+
+
tuple[np.ndarray, np.ndarray]

A tuple of ragged array and size of each row.

+
+
+
+
+

Examples#

+
>>> prune(np.array([1, 2, 3, 0, -1, -2]), np.array([3, 1, 2]),2)
+(array([1, 2, 3, -1, -2]), array([3, 2]))
+
+
+
+
+

Raises#

+
+
ValueError

If the sum of rowsize does not equal the length of arrays.

+
+
IndexError

If empty ragged.

+
+
+
+
+

See Also#

+

segment(), chunk

+
+
+ +
+
+clouddrift.ragged.ragged_to_regular(ragged: ndarray | Series | DataArray, rowsize: list | ndarray | Series | DataArray, fill_value: float = nan) ndarray[source]#
+

Convert a ragged array to a two-dimensional array such that each contiguous segment +of a ragged array is a row in the two-dimensional array. Each row of the two-dimensional +array is padded with NaNs as needed. The length of the first dimension of the output +array is the length of rowsize. The length of the second dimension is the maximum +element of rowsize.

+

Note: Although this function accepts parameters of type xarray.DataArray, +passing NumPy arrays is recommended for performance reasons.

+
+

Parameters#

+
+
raggednp.ndarray or pd.Series or xr.DataArray

A ragged array.

+
+
rowsizelist or np.ndarray[int] or pd.Series or xr.DataArray[int]

The size of each row in the ragged array.

+
+
fill_valuefloat, optional

Fill value to use for the trailing elements of each row of the resulting +regular array.

+
+
+
+
+

Returns#

+
+
np.ndarray

A two-dimensional array.

+
+
+
+
+

Examples#

+

By default, the fill value used is NaN:

+
>>> ragged_to_regular(np.array([1, 2, 3, 4, 5]), np.array([2, 1, 2]))
+array([[ 1.,  2.],
+       [ 3., nan],
+       [ 4.,  5.]])
+
+
+

You can specify an alternative fill value:

+
>>> ragged_to_regular(np.array([1, 2, 3, 4, 5]), np.array([2, 1, 2]), fill_value=999)
+array([[ 1.,    2.],
+       [ 3., -999.],
+       [ 4.,    5.]])
+
+
+
+
+

See Also#

+

regular_to_ragged()

+
+
+ +
+
+clouddrift.ragged.regular_to_ragged(array: ndarray, fill_value: float = nan) tuple[ndarray, ndarray][source]#
+

Convert a two-dimensional array to a ragged array. Fill values in the input array are +excluded from the output ragged array.

+
+

Parameters#

+
+
arraynp.ndarray

A two-dimensional array.

+
+
fill_valuefloat, optional

Fill value used to determine the bounds of contiguous segments.

+
+
+
+
+

Returns#

+
+
tuple[np.ndarray, np.ndarray]

A tuple of the ragged array and the size of each row.

+
+
+
+
+

Examples#

+

By default, NaN values found in the input regular array are excluded from +the output ragged array:

+
>>> regular_to_ragged(np.array([[1, 2], [3, np.nan], [4, 5]]))
+(array([1., 2., 3., 4., 5.]), array([2, 1, 2]))
+
+
+

Alternatively, a different fill value can be specified:

+
>>> regular_to_ragged(np.array([[1, 2], [3, -999], [4, 5]]), fill_value=-999)
+(array([1., 2., 3., 4., 5.]), array([2, 1, 2]))
+
+
+
+
+

See Also#

+

ragged_to_regular()

+
+
+ +
+
+clouddrift.ragged.rowsize_to_index(rowsize: list | ndarray | DataArray) ndarray[source]#
+

Convert a list of row sizes to a list of indices.

+

This function is typically used to obtain the indices of data rows organized +in a ragged array.

+
+

Parameters#

+
+
rowsizelist or np.ndarray or xr.DataArray

A list of row sizes.

+
+
+
+
+

Returns#

+
+
np.ndarray

A list of indices.

+
+
+
+
+

Examples#

+

To obtain the indices within a ragged array of three consecutive rows of sizes 100, 202, and 53:

+
>>> rowsize_to_index([100, 202, 53])
+array([0, 100, 302, 355])
+
+
+
+
+ +
+
+clouddrift.ragged.segment(x: ndarray, tolerance: float | timedelta64 | timedelta | Timedelta, rowsize: ndarray[int] = None) ndarray[int][source]#
+

Divide an array into segments based on a tolerance value.

+
+

Parameters#

+
+
xlist, np.ndarray, or xr.DataArray

An array to divide into segment.

+
+
tolerancefloat, np.timedelta64, timedelta, pd.Timedelta

The maximum signed difference between consecutive points in a segment. +The array x will be segmented wherever differences exceed the tolerance.

+
+
rowsizenp.ndarray[int], optional

The size of rows if x is originally a ragged array. If present, x will be +divided both by gaps that exceed the tolerance, and by the original rows +of the ragged array.

+
+
+
+
+

Returns#

+
+
np.ndarray[int]

An array of row sizes that divides the input array into segments.

+
+
+
+
+

Examples#

+

The simplest use of segment is to provide a tolerance value that is +used to divide an array into segments:

+
>>> x = [0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 4]
+>>> segment(x, 0.5)
+array([1, 3, 2, 4, 1])
+
+
+

If the array is already previously segmented (e.g. multiple rows in +a ragged array), then the rowsize argument can be used to preserve +the original segments:

+
>>> x = [0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 4]
+>>> rowsize = [3, 2, 6]
+>>> segment(x, 0.5, rowsize)
+array([1, 2, 1, 1, 1, 4, 1])
+
+
+

The tolerance can also be negative. In this case, the input array is +segmented where the negative difference exceeds the negative +value of the tolerance, i.e. where x[n+1] - x[n] < -tolerance:

+
>>> x = [0, 1, 2, 0, 1, 2]
+>>> segment(x, -0.5)
+array([3, 3])
+
+
+

To segment an array for both positive and negative gaps, invoke the function +twice, once for a positive tolerance and once for a negative tolerance. +The result of the first invocation can be passed as the rowsize argument +to the first segment invocation:

+
>>> x = [1, 1, 2, 2, 1, 1, 2, 2]
+>>> segment(x, 0.5, rowsize=segment(x, -0.5))
+array([2, 2, 2, 2])
+
+
+

If the input array contains time objects, the tolerance must be a time interval:

+
>>> x = np.array([np.datetime64("2023-01-01"), np.datetime64("2023-01-02"),
+                  np.datetime64("2023-01-03"), np.datetime64("2023-02-01"),
+                  np.datetime64("2023-02-02")])
+>>> segment(x, np.timedelta64(1, "D"))
+np.array([3, 2])
+
+
+
+
+ +
+
+clouddrift.ragged.subset(ds: Dataset, criteria: dict, id_var_name: str = 'id', rowsize_var_name: str = 'rowsize', traj_dim_name: str = 'traj', obs_dim_name: str = 'obs', full_trajectories=False) Dataset[source]#
+

Subset a ragged array dataset as a function of one or more criteria. +The criteria are passed with a dictionary, where a dictionary key +is a variable to subset and the associated dictionary value is either a range +(valuemin, valuemax), a list [value1, value2, valueN], a single value, or a +masking function applied to every row of the ragged array using apply_ragged.

+

This function needs to know the names of the dimensions of the ragged array dataset +(traj_dim_name and obs_dim_name), and the name of the rowsize variable (rowsize_var_name). +Default values are provided for these arguments (see below), but they can be changed if needed.

+
+

Parameters#

+
+
dsxr.Dataset

Dataset stored as ragged arrays

+
+
criteriadict

dictionary containing the variables (as keys) and the ranges/values/functions (as values) to subset

+
+
id_var_namestr, optional

Name of the variable containing the ID of the trajectories (default is “id”)

+
+
rowsize_var_namestr, optional

Name of the variable containing the number of observations per trajectory (default is “rowsize”)

+
+
traj_dim_namestr, optional

Name of the trajectory dimension (default is “traj”)

+
+
obs_dim_namestr, optional

Name of the observation dimension (default is “obs”)

+
+
full_trajectoriesbool, optional

If True, it returns the complete trajectories (rows) where at least one observation +matches the criteria, rather than just the segments where the criteria are satisfied. +Default is False.

+
+
+
+
+

Returns#

+
+
xr.Dataset

subset Dataset matching the criterion(a)

+
+
+
+
+

Examples#

+

Criteria are combined on any data or metadata variables part of the Dataset. +The following examples are based on NOAA GDP datasets which can be accessed with the +clouddrift.datasets module.

+

Retrieve a region, like the Gulf of Mexico, using ranges of latitude and longitude:

+
>>> subset(ds, {"lat": (21, 31), "lon": (-98, -78)})
+
+
+

The parameter full_trajectories can be used to retrieve trajectories passing through a region, for example all trajectories passing through the Gulf of Mexico:

+
>>> subset(ds, {"lat": (21, 31), "lon": (-98, -78)}, full_trajectories=True)
+
+
+

Retrieve drogued trajectory segments:

+
>>> subset(ds, {"drogue_status": True})
+
+
+

Retrieve trajectory segments with temperature higher than 25°C (303.15K):

+
>>> subset(ds, {"sst": (303.15, np.inf)})
+
+
+

You can use the same approach to return only the trajectories that are +shorter than some number of observations (similar to prune() but for +the entire dataset):

+
>>> subset(ds, {"rowsize": (0, 1000)})
+
+
+

Retrieve specific drifters from their IDs:

+
>>> subset(ds, {"id": [2578, 2582, 2583]})
+
+
+

Sometimes, you may want to retrieve specific rows of a ragged array. +You can do that by filtering along the trajectory dimension directly, since +this one corresponds to row numbers:

+
>>> rows = [5, 6, 7]
+>>> subset(ds, {"traj": rows})
+
+
+

Retrieve a specific time period:

+
>>> subset(ds, {"time": (np.datetime64("2000-01-01"), np.datetime64("2020-01-31"))})
+
+
+

Note that to subset time variable, the range has to be defined as a function +type of the variable. By default, xarray uses np.datetime64 to +represent datetime data. If the datetime data is a datetime.datetime, or +pd.Timestamp, the range would have to be defined accordingly.

+

Those criteria can also be combined:

+
>>> subset(ds, {"lat": (21, 31), "lon": (-98, -78), "drogue_status": True, "sst": (303.15, np.inf), "time": (np.datetime64("2000-01-01"), np.datetime64("2020-01-31"))})
+
+
+

You can also use a function to filter the data. For example, retrieve every other observation +of each trajectory (row):

+
>>> func = (lambda arr: ((arr - arr[0]) % 2) == 0)
+>>> subset(ds, {"time": func})
+
+
+
+
+

Raises#

+
+
ValueError

If one of the variable in a criterion is not found in the Dataset

+
+
+
+
+ +
+
+clouddrift.ragged.unpack(ragged_array: ndarray, rowsize: ndarray[int], rows: int | Iterable[int] = None, axis: int = 0) list[ndarray][source]#
+

Unpack a ragged array into a list of regular arrays.

+

Unpacking a np.ndarray ragged array is about 2 orders of magnitude +faster than unpacking an xr.DataArray ragged array, so unless you need a +DataArray as the result, we recommend passing np.ndarray as input.

+
+

Parameters#

+
+
ragged_arrayarray-like

A ragged_array to unpack

+
+
rowsizearray-like

An array of integers whose values is the size of each row in the ragged +array

+
+
rowsint or Iterable[int], optional

A row or list of rows to unpack. Default is None, which unpacks all rows.

+
+
axisint, optional

The axis along which to unpack the ragged array. Default is 0.

+
+
+
+
+

Returns#

+
+
list

A list of array-likes with sizes that correspond to the values in +rowsize, and types that correspond to the type of ragged_array

+
+
+
+
+

Examples#

+

Unpacking longitude arrays from a ragged Xarray Dataset:

+
lon = unpack(ds.lon, ds["rowsize"]) # return a list[xr.DataArray] (slower)
+lon = unpack(ds.lon.values, ds["rowsize"]) # return a list[np.ndarray] (faster)
+first_lon = unpack(ds.lon.values, ds["rowsize"], rows=0) # return only the first row
+first_two_lons = unpack(ds.lon.values, ds["rowsize"], rows=[0, 1]) # return first two rows
+
+
+

Looping over trajectories in a ragged Xarray Dataset to compute velocities +for each:

+
for lon, lat, time in list(zip(
+    unpack(ds.lon.values, ds["rowsize"]),
+    unpack(ds.lat.values, ds["rowsize"]),
+    unpack(ds.time.values, ds["rowsize"])
+)):
+    u, v = velocity_from_position(lon, lat, time)
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.prune.html b/_autosummary/clouddrift.ragged.prune.html new file mode 100644 index 00000000..f590d5a0 --- /dev/null +++ b/_autosummary/clouddrift.ragged.prune.html @@ -0,0 +1,607 @@ + + + + + + + + + + + clouddrift.ragged.prune — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.prune

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.prune#

+
+
+clouddrift.ragged.prune(ragged: list | ndarray | Series | DataArray, rowsize: list | ndarray | Series | DataArray, min_rowsize: float) Tuple[ndarray, ndarray][source]#
+

Within a ragged array, removes arrays less than a specified row size.

+
+

Parameters#

+
+
raggednp.ndarray or pd.Series or xr.DataArray

A ragged array.

+
+
rowsizelist or np.ndarray[int] or pd.Series or xr.DataArray[int]

The size of each row in the input ragged array.

+
+
min_rowsize :

The minimum row size that will be kept.

+
+
+
+
+

Returns#

+
+
tuple[np.ndarray, np.ndarray]

A tuple of ragged array and size of each row.

+
+
+
+
+

Examples#

+
>>> prune(np.array([1, 2, 3, 0, -1, -2]), np.array([3, 1, 2]),2)
+(array([1, 2, 3, -1, -2]), array([3, 2]))
+
+
+
+
+

Raises#

+
+
ValueError

If the sum of rowsize does not equal the length of arrays.

+
+
IndexError

If empty ragged.

+
+
+
+
+

See Also#

+

segment(), chunk

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.ragged_to_regular.html b/_autosummary/clouddrift.ragged.ragged_to_regular.html new file mode 100644 index 00000000..ec60b6a2 --- /dev/null +++ b/_autosummary/clouddrift.ragged.ragged_to_regular.html @@ -0,0 +1,615 @@ + + + + + + + + + + + clouddrift.ragged.ragged_to_regular — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.ragged_to_regular

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.ragged_to_regular#

+
+
+clouddrift.ragged.ragged_to_regular(ragged: ndarray | Series | DataArray, rowsize: list | ndarray | Series | DataArray, fill_value: float = nan) ndarray[source]#
+

Convert a ragged array to a two-dimensional array such that each contiguous segment +of a ragged array is a row in the two-dimensional array. Each row of the two-dimensional +array is padded with NaNs as needed. The length of the first dimension of the output +array is the length of rowsize. The length of the second dimension is the maximum +element of rowsize.

+

Note: Although this function accepts parameters of type xarray.DataArray, +passing NumPy arrays is recommended for performance reasons.

+
+

Parameters#

+
+
raggednp.ndarray or pd.Series or xr.DataArray

A ragged array.

+
+
rowsizelist or np.ndarray[int] or pd.Series or xr.DataArray[int]

The size of each row in the ragged array.

+
+
fill_valuefloat, optional

Fill value to use for the trailing elements of each row of the resulting +regular array.

+
+
+
+
+

Returns#

+
+
np.ndarray

A two-dimensional array.

+
+
+
+
+

Examples#

+

By default, the fill value used is NaN:

+
>>> ragged_to_regular(np.array([1, 2, 3, 4, 5]), np.array([2, 1, 2]))
+array([[ 1.,  2.],
+       [ 3., nan],
+       [ 4.,  5.]])
+
+
+

You can specify an alternative fill value:

+
>>> ragged_to_regular(np.array([1, 2, 3, 4, 5]), np.array([2, 1, 2]), fill_value=999)
+array([[ 1.,    2.],
+       [ 3., -999.],
+       [ 4.,    5.]])
+
+
+
+
+

See Also#

+

regular_to_ragged()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.regular_to_ragged.html b/_autosummary/clouddrift.ragged.regular_to_ragged.html new file mode 100644 index 00000000..6908c27b --- /dev/null +++ b/_autosummary/clouddrift.ragged.regular_to_ragged.html @@ -0,0 +1,604 @@ + + + + + + + + + + + clouddrift.ragged.regular_to_ragged — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.regular_to_ragged

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.regular_to_ragged#

+
+
+clouddrift.ragged.regular_to_ragged(array: ndarray, fill_value: float = nan) tuple[ndarray, ndarray][source]#
+

Convert a two-dimensional array to a ragged array. Fill values in the input array are +excluded from the output ragged array.

+
+

Parameters#

+
+
arraynp.ndarray

A two-dimensional array.

+
+
fill_valuefloat, optional

Fill value used to determine the bounds of contiguous segments.

+
+
+
+
+

Returns#

+
+
tuple[np.ndarray, np.ndarray]

A tuple of the ragged array and the size of each row.

+
+
+
+
+

Examples#

+

By default, NaN values found in the input regular array are excluded from +the output ragged array:

+
>>> regular_to_ragged(np.array([[1, 2], [3, np.nan], [4, 5]]))
+(array([1., 2., 3., 4., 5.]), array([2, 1, 2]))
+
+
+

Alternatively, a different fill value can be specified:

+
>>> regular_to_ragged(np.array([[1, 2], [3, -999], [4, 5]]), fill_value=-999)
+(array([1., 2., 3., 4., 5.]), array([2, 1, 2]))
+
+
+
+
+

See Also#

+

ragged_to_regular()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.rowsize_to_index.html b/_autosummary/clouddrift.ragged.rowsize_to_index.html new file mode 100644 index 00000000..bf59d439 --- /dev/null +++ b/_autosummary/clouddrift.ragged.rowsize_to_index.html @@ -0,0 +1,593 @@ + + + + + + + + + + + clouddrift.ragged.rowsize_to_index — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.rowsize_to_index

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.rowsize_to_index#

+
+
+clouddrift.ragged.rowsize_to_index(rowsize: list | ndarray | DataArray) ndarray[source]#
+

Convert a list of row sizes to a list of indices.

+

This function is typically used to obtain the indices of data rows organized +in a ragged array.

+
+

Parameters#

+
+
rowsizelist or np.ndarray or xr.DataArray

A list of row sizes.

+
+
+
+
+

Returns#

+
+
np.ndarray

A list of indices.

+
+
+
+
+

Examples#

+

To obtain the indices within a ragged array of three consecutive rows of sizes 100, 202, and 53:

+
>>> rowsize_to_index([100, 202, 53])
+array([0, 100, 302, 355])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.segment.html b/_autosummary/clouddrift.ragged.segment.html new file mode 100644 index 00000000..96b89842 --- /dev/null +++ b/_autosummary/clouddrift.ragged.segment.html @@ -0,0 +1,634 @@ + + + + + + + + + + + clouddrift.ragged.segment — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.segment

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.segment#

+
+
+clouddrift.ragged.segment(x: ndarray, tolerance: float | timedelta64 | timedelta | Timedelta, rowsize: ndarray[int] = None) ndarray[int][source]#
+

Divide an array into segments based on a tolerance value.

+
+

Parameters#

+
+
xlist, np.ndarray, or xr.DataArray

An array to divide into segment.

+
+
tolerancefloat, np.timedelta64, timedelta, pd.Timedelta

The maximum signed difference between consecutive points in a segment. +The array x will be segmented wherever differences exceed the tolerance.

+
+
rowsizenp.ndarray[int], optional

The size of rows if x is originally a ragged array. If present, x will be +divided both by gaps that exceed the tolerance, and by the original rows +of the ragged array.

+
+
+
+
+

Returns#

+
+
np.ndarray[int]

An array of row sizes that divides the input array into segments.

+
+
+
+
+

Examples#

+

The simplest use of segment is to provide a tolerance value that is +used to divide an array into segments:

+
>>> x = [0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 4]
+>>> segment(x, 0.5)
+array([1, 3, 2, 4, 1])
+
+
+

If the array is already previously segmented (e.g. multiple rows in +a ragged array), then the rowsize argument can be used to preserve +the original segments:

+
>>> x = [0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 4]
+>>> rowsize = [3, 2, 6]
+>>> segment(x, 0.5, rowsize)
+array([1, 2, 1, 1, 1, 4, 1])
+
+
+

The tolerance can also be negative. In this case, the input array is +segmented where the negative difference exceeds the negative +value of the tolerance, i.e. where x[n+1] - x[n] < -tolerance:

+
>>> x = [0, 1, 2, 0, 1, 2]
+>>> segment(x, -0.5)
+array([3, 3])
+
+
+

To segment an array for both positive and negative gaps, invoke the function +twice, once for a positive tolerance and once for a negative tolerance. +The result of the first invocation can be passed as the rowsize argument +to the first segment invocation:

+
>>> x = [1, 1, 2, 2, 1, 1, 2, 2]
+>>> segment(x, 0.5, rowsize=segment(x, -0.5))
+array([2, 2, 2, 2])
+
+
+

If the input array contains time objects, the tolerance must be a time interval:

+
>>> x = np.array([np.datetime64("2023-01-01"), np.datetime64("2023-01-02"),
+                  np.datetime64("2023-01-03"), np.datetime64("2023-02-01"),
+                  np.datetime64("2023-02-02")])
+>>> segment(x, np.timedelta64(1, "D"))
+np.array([3, 2])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.subset.html b/_autosummary/clouddrift.ragged.subset.html new file mode 100644 index 00000000..963b9d79 --- /dev/null +++ b/_autosummary/clouddrift.ragged.subset.html @@ -0,0 +1,668 @@ + + + + + + + + + + + clouddrift.ragged.subset — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.subset

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.subset#

+
+
+clouddrift.ragged.subset(ds: Dataset, criteria: dict, id_var_name: str = 'id', rowsize_var_name: str = 'rowsize', traj_dim_name: str = 'traj', obs_dim_name: str = 'obs', full_trajectories=False) Dataset[source]#
+

Subset a ragged array dataset as a function of one or more criteria. +The criteria are passed with a dictionary, where a dictionary key +is a variable to subset and the associated dictionary value is either a range +(valuemin, valuemax), a list [value1, value2, valueN], a single value, or a +masking function applied to every row of the ragged array using apply_ragged.

+

This function needs to know the names of the dimensions of the ragged array dataset +(traj_dim_name and obs_dim_name), and the name of the rowsize variable (rowsize_var_name). +Default values are provided for these arguments (see below), but they can be changed if needed.

+
+

Parameters#

+
+
dsxr.Dataset

Dataset stored as ragged arrays

+
+
criteriadict

dictionary containing the variables (as keys) and the ranges/values/functions (as values) to subset

+
+
id_var_namestr, optional

Name of the variable containing the ID of the trajectories (default is “id”)

+
+
rowsize_var_namestr, optional

Name of the variable containing the number of observations per trajectory (default is “rowsize”)

+
+
traj_dim_namestr, optional

Name of the trajectory dimension (default is “traj”)

+
+
obs_dim_namestr, optional

Name of the observation dimension (default is “obs”)

+
+
full_trajectoriesbool, optional

If True, it returns the complete trajectories (rows) where at least one observation +matches the criteria, rather than just the segments where the criteria are satisfied. +Default is False.

+
+
+
+
+

Returns#

+
+
xr.Dataset

subset Dataset matching the criterion(a)

+
+
+
+
+

Examples#

+

Criteria are combined on any data or metadata variables part of the Dataset. +The following examples are based on NOAA GDP datasets which can be accessed with the +clouddrift.datasets module.

+

Retrieve a region, like the Gulf of Mexico, using ranges of latitude and longitude:

+
>>> subset(ds, {"lat": (21, 31), "lon": (-98, -78)})
+
+
+

The parameter full_trajectories can be used to retrieve trajectories passing through a region, for example all trajectories passing through the Gulf of Mexico:

+
>>> subset(ds, {"lat": (21, 31), "lon": (-98, -78)}, full_trajectories=True)
+
+
+

Retrieve drogued trajectory segments:

+
>>> subset(ds, {"drogue_status": True})
+
+
+

Retrieve trajectory segments with temperature higher than 25°C (303.15K):

+
>>> subset(ds, {"sst": (303.15, np.inf)})
+
+
+

You can use the same approach to return only the trajectories that are +shorter than some number of observations (similar to prune() but for +the entire dataset):

+
>>> subset(ds, {"rowsize": (0, 1000)})
+
+
+

Retrieve specific drifters from their IDs:

+
>>> subset(ds, {"id": [2578, 2582, 2583]})
+
+
+

Sometimes, you may want to retrieve specific rows of a ragged array. +You can do that by filtering along the trajectory dimension directly, since +this one corresponds to row numbers:

+
>>> rows = [5, 6, 7]
+>>> subset(ds, {"traj": rows})
+
+
+

Retrieve a specific time period:

+
>>> subset(ds, {"time": (np.datetime64("2000-01-01"), np.datetime64("2020-01-31"))})
+
+
+

Note that to subset time variable, the range has to be defined as a function +type of the variable. By default, xarray uses np.datetime64 to +represent datetime data. If the datetime data is a datetime.datetime, or +pd.Timestamp, the range would have to be defined accordingly.

+

Those criteria can also be combined:

+
>>> subset(ds, {"lat": (21, 31), "lon": (-98, -78), "drogue_status": True, "sst": (303.15, np.inf), "time": (np.datetime64("2000-01-01"), np.datetime64("2020-01-31"))})
+
+
+

You can also use a function to filter the data. For example, retrieve every other observation +of each trajectory (row):

+
>>> func = (lambda arr: ((arr - arr[0]) % 2) == 0)
+>>> subset(ds, {"time": func})
+
+
+
+
+

Raises#

+
+
ValueError

If one of the variable in a criterion is not found in the Dataset

+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.ragged.unpack.html b/_autosummary/clouddrift.ragged.unpack.html new file mode 100644 index 00000000..043fc6c9 --- /dev/null +++ b/_autosummary/clouddrift.ragged.unpack.html @@ -0,0 +1,614 @@ + + + + + + + + + + + clouddrift.ragged.unpack — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.ragged.unpack

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.ragged.unpack#

+
+
+clouddrift.ragged.unpack(ragged_array: ndarray, rowsize: ndarray[int], rows: int | Iterable[int] = None, axis: int = 0) list[ndarray][source]#
+

Unpack a ragged array into a list of regular arrays.

+

Unpacking a np.ndarray ragged array is about 2 orders of magnitude +faster than unpacking an xr.DataArray ragged array, so unless you need a +DataArray as the result, we recommend passing np.ndarray as input.

+
+

Parameters#

+
+
ragged_arrayarray-like

A ragged_array to unpack

+
+
rowsizearray-like

An array of integers whose values is the size of each row in the ragged +array

+
+
rowsint or Iterable[int], optional

A row or list of rows to unpack. Default is None, which unpacks all rows.

+
+
axisint, optional

The axis along which to unpack the ragged array. Default is 0.

+
+
+
+
+

Returns#

+
+
list

A list of array-likes with sizes that correspond to the values in +rowsize, and types that correspond to the type of ragged_array

+
+
+
+
+

Examples#

+

Unpacking longitude arrays from a ragged Xarray Dataset:

+
lon = unpack(ds.lon, ds["rowsize"]) # return a list[xr.DataArray] (slower)
+lon = unpack(ds.lon.values, ds["rowsize"]) # return a list[np.ndarray] (faster)
+first_lon = unpack(ds.lon.values, ds["rowsize"], rows=0) # return only the first row
+first_two_lons = unpack(ds.lon.values, ds["rowsize"], rows=[0, 1]) # return first two rows
+
+
+

Looping over trajectories in a ragged Xarray Dataset to compute velocities +for each:

+
for lon, lat, time in list(zip(
+    unpack(ds.lon.values, ds["rowsize"]),
+    unpack(ds.lat.values, ds["rowsize"]),
+    unpack(ds.time.values, ds["rowsize"])
+)):
+    u, v = velocity_from_position(lon, lat, time)
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.raggedarray.RaggedArray.html b/_autosummary/clouddrift.raggedarray.RaggedArray.html new file mode 100644 index 00000000..a439e103 --- /dev/null +++ b/_autosummary/clouddrift.raggedarray.RaggedArray.html @@ -0,0 +1,919 @@ + + + + + + + + + + + clouddrift.raggedarray.RaggedArray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.raggedarray.RaggedArray#

+
+
+class clouddrift.raggedarray.RaggedArray(coords: dict, metadata: dict, data: dict, attrs_global: dict | None = {}, attrs_variables: dict | None = {})[source]#
+

Bases: object

+
+
+__init__(coords: dict, metadata: dict, data: dict, attrs_global: dict | None = {}, attrs_variables: dict | None = {})[source]#
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(coords, metadata, data[, ...])

allocate(preprocess_func, indices, rowsize, ...)

Iterate through the files and fill for the ragged array associated with coordinates, and selected metadata and data variables.

attributes(ds, name_coords, name_meta, name_data)

Return global attributes and the attributes of all variables (name_coords, name_meta, and name_data) from an Xarray Dataset.

from_awkward(array[, name_coords])

Load a RaggedArray instance from an Awkward Array.

from_files(indices, preprocess_func, name_coords)

Generate a ragged array archive from a list of trajectory files

from_netcdf(filename)

Read a ragged arrays archive from a NetCDF file.

from_parquet(filename[, name_coords])

Read a ragged array from a parquet file.

from_xarray(ds[, dim_traj, dim_obs])

Populate a RaggedArray instance from an xarray Dataset instance.

number_of_observations(rowsize_func, ...)

Iterate through the files and evaluate the number of observations.

to_awkward()

Convert ragged array object to an Awkward Array.

to_netcdf(filename)

Export ragged array object to a NetCDF file.

to_parquet(filename)

Export ragged array object to a parquet file.

to_xarray([cast_to_float32])

Convert ragged array object to a xarray Dataset.

validate_attributes()

Validate that each variable has an assigned attribute tag.

+
+
+static allocate(preprocess_func: Callable[[int], Dataset], indices: list, rowsize: list, name_coords: list, name_meta: list, name_data: list, **kwargs) Tuple[dict, dict, dict][source]#
+

Iterate through the files and fill for the ragged array associated +with coordinates, and selected metadata and data variables.

+
+

Parameters#

+
+
preprocess_funcCallable[[int], xr.Dataset]

Returns a processed xarray Dataset from an identification number.

+
+
indiceslist

List of indices separating trajectory in the ragged arrays.

+
+
rowsizelist

List of the number of observations per trajectory.

+
+
name_coordslist

Name of the coordinate variables to include in the archive.

+
+
name_metalist, optional

Name of metadata variables to include in the archive (Defaults to []).

+
+
name_datalist, optional

Name of the data variables to include in the archive (Defaults to []).

+
+
+
+
+

Returns#

+
+
Tuple[dict, dict, dict]

Dictionaries containing numerical data and attributes of coordinates, metadata and data variables.

+
+
+
+
+ +
+
+static attributes(ds: Dataset, name_coords: list, name_meta: list, name_data: list) Tuple[dict, dict][source]#
+

Return global attributes and the attributes of all variables +(name_coords, name_meta, and name_data) from an Xarray Dataset.

+
+

Parameters#

+
+
dsxr.Dataset

_description_

+
+
name_coordslist

Name of the coordinate variables to include in the archive

+
+
name_metalist, optional

Name of metadata variables to include in the archive (default is [])

+
+
name_datalist, optional

Name of the data variables to include in the archive (default is [])

+
+
+
+
+

Returns#

+
+
Tuple[dict, dict]

The global and variables attributes

+
+
+
+
+ +
+
+classmethod from_awkward(array: Array, name_coords: list | None = ['time', 'lon', 'lat', 'ids'])[source]#
+

Load a RaggedArray instance from an Awkward Array.

+
+

Parameters#

+
+
arrayak.Array

Awkward Array instance to load the data from

+
+
name_coordslist, optional

Names of the coordinate variables in the ragged arrays

+
+
+
+
+

Returns#

+
+
RaggedArray

A RaggedArray instance

+
+
+
+
+ +
+
+classmethod from_files(indices: list, preprocess_func: Callable[[int], Dataset], name_coords: list, name_meta: list | None = [], name_data: list | None = [], rowsize_func: Callable[[int], int] | None = None, **kwargs)[source]#
+

Generate a ragged array archive from a list of trajectory files

+
+

Parameters#

+
+
indiceslist

Identification numbers list to iterate

+
+
preprocess_funcCallable[[int], xr.Dataset]

Returns a processed xarray Dataset from an identification number

+
+
name_coordslist

Name of the coordinate variables to include in the archive

+
+
name_metalist, optional

Name of metadata variables to include in the archive (Defaults to [])

+
+
name_datalist, optional

Name of the data variables to include in the archive (Defaults to [])

+
+
rowsize_funcOptional[Callable[[int], int]], optional

Returns the number of observations from an identification number (to speed up processing) (Defaults to None)

+
+
+
+
+

Returns#

+
+
RaggedArray

A RaggedArray instance

+
+
+
+
+ +
+
+classmethod from_netcdf(filename: str)[source]#
+

Read a ragged arrays archive from a NetCDF file.

+

This is a thin wrapper around from_xarray().

+
+

Parameters#

+
+
filenamestr

File name of the NetCDF archive to read.

+
+
+
+
+

Returns#

+
+
RaggedArray

A ragged array instance

+
+
+
+
+ +
+
+classmethod from_parquet(filename: str, name_coords: list | None = ['time', 'lon', 'lat', 'ids'])[source]#
+

Read a ragged array from a parquet file.

+
+

Parameters#

+
+
filenamestr

File name of the parquet archive to read.

+
+
name_coordslist, optional

Names of the coordinate variables in the ragged arrays

+
+
+
+
+

Returns#

+
+
RaggedArray

A ragged array instance

+
+
+
+
+ +
+
+classmethod from_xarray(ds: Dataset, dim_traj: str = 'traj', dim_obs: str = 'obs')[source]#
+

Populate a RaggedArray instance from an xarray Dataset instance.

+
+

Parameters#

+
+
dsxr.Dataset

Xarray Dataset from which to load the RaggedArray

+
+
dim_trajstr, optional

Name of the trajectories dimension in the xarray Dataset

+
+
dim_obsstr, optional

Name of the observations dimension in the xarray Dataset

+
+
+
+
+

Returns#

+
+
RaggedArray

A RaggedArray instance

+
+
+
+
+ +
+
+static number_of_observations(rowsize_func: Callable[[int], int], indices: list, **kwargs) array[source]#
+

Iterate through the files and evaluate the number of observations.

+
+

Parameters#

+
+
rowsize_funcCallable[[int], int]]

Function that returns the number observations of a trajectory from +its identification number

+
+
indiceslist

Identification numbers list to iterate

+
+
+
+
+

Returns#

+
+
np.ndarray

Number of observations of each trajectory

+
+
+
+
+ +
+
+to_awkward()[source]#
+

Convert ragged array object to an Awkward Array.

+
+

Returns#

+
+
ak.Array

Awkward Array containing the ragged array and its attributes

+
+
+
+
+ +
+
+to_netcdf(filename: str)[source]#
+

Export ragged array object to a NetCDF file.

+
+

Parameters#

+
+
filenamestr

Name of the NetCDF file to create.

+
+
+
+
+ +
+
+to_parquet(filename: str)[source]#
+

Export ragged array object to a parquet file.

+
+

Parameters#

+
+
filenamestr

Name of the parquet file to create.

+
+
+
+
+ +
+
+to_xarray(cast_to_float32: bool = True)[source]#
+

Convert ragged array object to a xarray Dataset.

+
+

Parameters#

+
+
cast_to_float32bool, optional

Cast all float64 variables to float32 (default is True). This option aims at +minimizing the size of the xarray dataset.

+
+
+
+
+

Returns#

+
+
xr.Dataset

Xarray Dataset containing the ragged arrays and their attributes

+
+
+
+
+ +
+
+validate_attributes()[source]#
+

Validate that each variable has an assigned attribute tag.

+
+ +
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.raggedarray.html b/_autosummary/clouddrift.raggedarray.html new file mode 100644 index 00000000..6cdb1cfb --- /dev/null +++ b/_autosummary/clouddrift.raggedarray.html @@ -0,0 +1,551 @@ + + + + + + + + + + + clouddrift.raggedarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.raggedarray

+ +
+
+ +
+
+
+ + + + +
+ +
+

clouddrift.raggedarray#

+

This module defines the RaggedArray class, which is the intermediate data +structure used by CloudDrift to process custom Lagrangian datasets to Xarray +Datasets and Awkward Arrays.

+

Classes

+ + + + + + +

RaggedArray(coords, metadata, data[, ...])

+
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.signal.analytic_signal.html b/_autosummary/clouddrift.signal.analytic_signal.html new file mode 100644 index 00000000..1e18b149 --- /dev/null +++ b/_autosummary/clouddrift.signal.analytic_signal.html @@ -0,0 +1,644 @@ + + + + + + + + + + + clouddrift.signal.analytic_signal — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.signal.analytic_signal

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.signal.analytic_signal#

+
+
+clouddrift.signal.analytic_signal(x: ndarray | DataArray, boundary: str | None = 'mirror', time_axis: int | None = -1) ndarray | Tuple[ndarray, ndarray][source]#
+

Return the analytic signal from a real-valued signal or the analytic and +conjugate analytic signals from a complex-valued signal.

+

If the input is a real-valued signal, the analytic signal is calculated as +the inverse Fourier transform of the positive-frequency part of the Fourier +transform. If the input is a complex-valued signal, the conjugate analytic signal +is additionally calculated as the inverse Fourier transform of the positive-frequency +part of the Fourier transform of the complex conjugate of the input signal.

+

For a complex-valued signal, the mean is evenly divided between the analytic and +conjugate analytic signal.

+

The calculation is performed along the last axis of the input array by default. +Alternatively, the user can specify the time axis of the input. The user can also +specify the boundary conditions to be applied to the input array (default is “mirror”).

+
+

Parameters#

+
+
xarray_like

Real- or complex-valued signal.

+
+
boundarystr, optional

The boundary condition to be imposed at the edges of the time series. +Allowed values are “mirror”, “zeros”, and “periodic”. +Default is “mirror”.

+
+
time_axisint, optional

Axis on which the time is defined (default is -1).

+
+
+
+
+

Returns#

+
+
xanp.ndarray

Analytic signal. It is a tuple if the input is a complex-valed signal +with the first element being the analytic signal and the second element +being the conjugate analytic signal.

+
+
+
+
+

Examples#

+

To obtain the analytic signal of a real-valued signal:

+
>>> x = np.random.rand(99)
+>>> xa = analytic_signal(x)
+
+
+

To obtain the analytic and conjugate analytic signals of a complex-valued signal:

+
>>> w = np.random.rand(99)+1j*np.random.rand(99)
+>>> wp, wn = analytic_signal(w)
+
+
+

To specify that a periodic boundary condition should be used:

+
>>> x = np.random.rand(99)
+>>> xa = analytic_signal(x, boundary="periodic")
+
+
+

To specify that the time axis is along the first axis and apply +zero boundary conditions:

+
>>> x = np.random.rand(100, 99)
+>>> xa = analytic_signal(x, time_axis=0, boundary="zeros")
+
+
+
+
+

Raises#

+
+
ValueError

If the time axis is outside of the valid range ([-1, N-1]). +If boundary not in ["mirror", "zeros", "periodic"].

+
+
+
+
+

References#

+

[1] Gabor D. 1946 Theory of communication. Proc. IEE 93, 429–457. (10.1049/ji-1.1947.0015).

+

[2] Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729).

+
+
+

See Also#

+

rotary_to_cartesian(), cartesian_to_rotary()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.signal.cartesian_to_rotary.html b/_autosummary/clouddrift.signal.cartesian_to_rotary.html new file mode 100644 index 00000000..231940f1 --- /dev/null +++ b/_autosummary/clouddrift.signal.cartesian_to_rotary.html @@ -0,0 +1,626 @@ + + + + + + + + + + + clouddrift.signal.cartesian_to_rotary — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.signal.cartesian_to_rotary

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.signal.cartesian_to_rotary#

+
+
+clouddrift.signal.cartesian_to_rotary(ua: ndarray | DataArray, va: ndarray | DataArray, time_axis: int | None = -1) Tuple[ndarray, ndarray][source]#
+

Return rotary signals (wp,wn) from analytic Cartesian signals (ua,va).

+

If ua is the analytic signal from real-valued signal u, and va the analytic signal +from real-valued signal v, then the positive (counterclockwise) and negative (clockwise) +signals are defined by wp = 0.5*(up+1j*vp), wp = 0.5*(up-1j*vp).

+

This function is the inverse of rotary_to_cartesian().

+
+

Parameters#

+
+
uaarray_like

Complex-valued analytic signal for first Cartesian component (zonal, east-west)

+
+
vaarray_like

Complex-valued analytic signal for second Cartesian component (meridional, north-south)

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
wpnp.ndarray

Complex-valued positive (counterclockwise) rotary signal.

+
+
wnnp.ndarray

Complex-valued negative (clockwise) rotary signal.

+
+
+
+
+

Examples#

+

To obtain the rotary signals from a pair of real-valued signal:

+
>>> u = np.random.rand(99)
+>>> v = np.random.rand(99)
+>>> wp, wn = cartesian_to_rotary(analytic_signal(u), analytic_signal(v))
+
+
+

To specify that the time axis is along the first axis:

+
>>> u = np.random.rand(100, 99)
+>>> v = np.random.rand(100, 99)
+>>> wp, wn = cartesian_to_rotary(analytic_signal(u), analytic_signal(v), time_axis=0)
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]).

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729)

+
+
+

See Also#

+

analytic_signal(), rotary_to_cartesian()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.signal.ellipse_parameters.html b/_autosummary/clouddrift.signal.ellipse_parameters.html new file mode 100644 index 00000000..648191ce --- /dev/null +++ b/_autosummary/clouddrift.signal.ellipse_parameters.html @@ -0,0 +1,614 @@ + + + + + + + + + + + clouddrift.signal.ellipse_parameters — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.signal.ellipse_parameters

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.signal.ellipse_parameters#

+
+
+clouddrift.signal.ellipse_parameters(xa: ndarray | DataArray, ya: ndarray | DataArray) Tuple[ndarray, ndarray, ndarray, ndarray][source]#
+

Return the instantaneous parameters of a modulated elliptical signal from its analytic Cartesian signals.

+
+

Parameters#

+
+
xaarray_like

Complex-valued analytic signal for first Cartesian component (zonal, east-west).

+
+
yaarray_like

Complex-valued analytic signal for second Cartesian component (meridional, north-south).

+
+
+
+
+

Returns#

+
+
kappanp.ndarray

Ellipse root-mean-square amplitude.

+
+
lambdanp.ndarray

Ellipse linearity between -1 and 1, or departure from circular motion (lambda=0).

+
+
thetanp.ndarray

Ellipse orientation in radian.

+
+
phinp.ndarray

Ellipse phase in radian.

+
+
+
+
+

Examples#

+

To obtain the ellipse parameters from a pair of real-valued signals (x, y):

+
>>> kappa, lambda, theta, phi = ellipse_parameters(analytic_signal(x), analytic_signal(y))
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape.

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729).

+
+
+

See Also#

+

modulated_ellipse_signal(), analytic_signal(), rotary_to_cartesian(), cartesian_to_rotary()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.signal.html b/_autosummary/clouddrift.signal.html new file mode 100644 index 00000000..2effca1a --- /dev/null +++ b/_autosummary/clouddrift.signal.html @@ -0,0 +1,899 @@ + + + + + + + + + + + clouddrift.signal — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.signal#

+

This module provides signal processing functions.

+

Functions

+ + + + + + + + + + + + + + + + + + +

analytic_signal(x[, boundary, time_axis])

Return the analytic signal from a real-valued signal or the analytic and conjugate analytic signals from a complex-valued signal.

cartesian_to_rotary(ua, va[, time_axis])

Return rotary signals (wp,wn) from analytic Cartesian signals (ua,va).

ellipse_parameters(xa, ya)

Return the instantaneous parameters of a modulated elliptical signal from its analytic Cartesian signals.

modulated_ellipse_signal(kappa, lambda_, ...)

Return the analytic Cartesian signals (xa, ya) from the instantaneous parameters of a modulated elliptical signal.

rotary_to_cartesian(wp, wn[, time_axis])

Return Cartesian analytic signals (ua, va) from rotary signals (wp, wn) as ua = wp + wn and va = -1j * (wp - wn).

+
+
+clouddrift.signal.analytic_signal(x: ndarray | DataArray, boundary: str | None = 'mirror', time_axis: int | None = -1) ndarray | Tuple[ndarray, ndarray][source]#
+

Return the analytic signal from a real-valued signal or the analytic and +conjugate analytic signals from a complex-valued signal.

+

If the input is a real-valued signal, the analytic signal is calculated as +the inverse Fourier transform of the positive-frequency part of the Fourier +transform. If the input is a complex-valued signal, the conjugate analytic signal +is additionally calculated as the inverse Fourier transform of the positive-frequency +part of the Fourier transform of the complex conjugate of the input signal.

+

For a complex-valued signal, the mean is evenly divided between the analytic and +conjugate analytic signal.

+

The calculation is performed along the last axis of the input array by default. +Alternatively, the user can specify the time axis of the input. The user can also +specify the boundary conditions to be applied to the input array (default is “mirror”).

+
+

Parameters#

+
+
xarray_like

Real- or complex-valued signal.

+
+
boundarystr, optional

The boundary condition to be imposed at the edges of the time series. +Allowed values are “mirror”, “zeros”, and “periodic”. +Default is “mirror”.

+
+
time_axisint, optional

Axis on which the time is defined (default is -1).

+
+
+
+
+

Returns#

+
+
xanp.ndarray

Analytic signal. It is a tuple if the input is a complex-valed signal +with the first element being the analytic signal and the second element +being the conjugate analytic signal.

+
+
+
+
+

Examples#

+

To obtain the analytic signal of a real-valued signal:

+
>>> x = np.random.rand(99)
+>>> xa = analytic_signal(x)
+
+
+

To obtain the analytic and conjugate analytic signals of a complex-valued signal:

+
>>> w = np.random.rand(99)+1j*np.random.rand(99)
+>>> wp, wn = analytic_signal(w)
+
+
+

To specify that a periodic boundary condition should be used:

+
>>> x = np.random.rand(99)
+>>> xa = analytic_signal(x, boundary="periodic")
+
+
+

To specify that the time axis is along the first axis and apply +zero boundary conditions:

+
>>> x = np.random.rand(100, 99)
+>>> xa = analytic_signal(x, time_axis=0, boundary="zeros")
+
+
+
+
+

Raises#

+
+
ValueError

If the time axis is outside of the valid range ([-1, N-1]). +If boundary not in ["mirror", "zeros", "periodic"].

+
+
+
+
+

References#

+

[1] Gabor D. 1946 Theory of communication. Proc. IEE 93, 429–457. (10.1049/ji-1.1947.0015).

+

[2] Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729).

+
+
+

See Also#

+

rotary_to_cartesian(), cartesian_to_rotary()

+
+
+ +
+
+clouddrift.signal.cartesian_to_rotary(ua: ndarray | DataArray, va: ndarray | DataArray, time_axis: int | None = -1) Tuple[ndarray, ndarray][source]#
+

Return rotary signals (wp,wn) from analytic Cartesian signals (ua,va).

+

If ua is the analytic signal from real-valued signal u, and va the analytic signal +from real-valued signal v, then the positive (counterclockwise) and negative (clockwise) +signals are defined by wp = 0.5*(up+1j*vp), wp = 0.5*(up-1j*vp).

+

This function is the inverse of rotary_to_cartesian().

+
+

Parameters#

+
+
uaarray_like

Complex-valued analytic signal for first Cartesian component (zonal, east-west)

+
+
vaarray_like

Complex-valued analytic signal for second Cartesian component (meridional, north-south)

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
wpnp.ndarray

Complex-valued positive (counterclockwise) rotary signal.

+
+
wnnp.ndarray

Complex-valued negative (clockwise) rotary signal.

+
+
+
+
+

Examples#

+

To obtain the rotary signals from a pair of real-valued signal:

+
>>> u = np.random.rand(99)
+>>> v = np.random.rand(99)
+>>> wp, wn = cartesian_to_rotary(analytic_signal(u), analytic_signal(v))
+
+
+

To specify that the time axis is along the first axis:

+
>>> u = np.random.rand(100, 99)
+>>> v = np.random.rand(100, 99)
+>>> wp, wn = cartesian_to_rotary(analytic_signal(u), analytic_signal(v), time_axis=0)
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]).

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729)

+
+
+

See Also#

+

analytic_signal(), rotary_to_cartesian()

+
+
+ +
+
+clouddrift.signal.ellipse_parameters(xa: ndarray | DataArray, ya: ndarray | DataArray) Tuple[ndarray, ndarray, ndarray, ndarray][source]#
+

Return the instantaneous parameters of a modulated elliptical signal from its analytic Cartesian signals.

+
+

Parameters#

+
+
xaarray_like

Complex-valued analytic signal for first Cartesian component (zonal, east-west).

+
+
yaarray_like

Complex-valued analytic signal for second Cartesian component (meridional, north-south).

+
+
+
+
+

Returns#

+
+
kappanp.ndarray

Ellipse root-mean-square amplitude.

+
+
lambdanp.ndarray

Ellipse linearity between -1 and 1, or departure from circular motion (lambda=0).

+
+
thetanp.ndarray

Ellipse orientation in radian.

+
+
phinp.ndarray

Ellipse phase in radian.

+
+
+
+
+

Examples#

+

To obtain the ellipse parameters from a pair of real-valued signals (x, y):

+
>>> kappa, lambda, theta, phi = ellipse_parameters(analytic_signal(x), analytic_signal(y))
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape.

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729).

+
+
+

See Also#

+

modulated_ellipse_signal(), analytic_signal(), rotary_to_cartesian(), cartesian_to_rotary()

+
+
+ +
+
+clouddrift.signal.modulated_ellipse_signal(kappa: ndarray | DataArray, lambda_: ndarray | DataArray, theta: ndarray | DataArray, phi: ndarray | DataArray) Tuple[ndarray, ndarray][source]#
+

Return the analytic Cartesian signals (xa, ya) from the instantaneous parameters of a modulated elliptical signal.

+

This function is the inverse of ellipse_parameters().

+
+

Parameters#

+
+
kappaarray_like

Ellipse root-mean-square amplitude.

+
+
lambdaarray_like

Ellipse linearity between -1 and 1, or departure from circular motion (lambda=0).

+
+
thetaarray_like

Ellipse orientation in radian.

+
+
phiarray_like

Ellipse phase in radian.

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
xanp.ndarray

Complex-valued analytic signal for first Cartesian component (zonal, east-west).

+
+
yanp.ndarray

Complex-valued analytic signal for second Cartesian component (meridional, north-south).

+
+
+
+
+

Examples#

+

To obtain the analytic signals from the instantaneous parameters of a modulated elliptical signal:

+
>>> xa, ya = modulated_ellipse_signal(kappa, lambda, theta, phi)
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape.

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729).

+
+
+

See Also#

+

ellipse_parameters(), analytic_signal(), rotary_to_cartesian(), cartesian_to_rotary()

+
+
+ +
+
+clouddrift.signal.rotary_to_cartesian(wp: ndarray | DataArray, wn: ndarray | DataArray, time_axis: int | None = -1) Tuple[ndarray, ndarray][source]#
+

Return Cartesian analytic signals (ua, va) from rotary signals (wp, wn) +as ua = wp + wn and va = -1j * (wp - wn).

+

This function is the inverse of cartesian_to_rotary().

+
+

Parameters#

+
+
wparray_like

Complex-valued positive (counterclockwise) rotary signal.

+
+
wnarray_like

Complex-valued negative (clockwise) rotary signal.

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
uaarray_like

Complex-valued analytic signal, first Cartesian component (zonal, east-west)

+
+
vaarray_like

Complex-valued analytic signal, second Cartesian component (meridional, north-south)

+
+
+
+
+

Examples#

+

To obtain the Cartesian analytic signals from a pair of rotary signals (wp,wn):

+
>>> ua, va = rotary_to_cartesian(wp, wn)
+
+
+

To specify that the time axis is along the first axis:

+
>>> ua, va = rotary_to_cartesian(wp, wn, time_axis=0)
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]).

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729)

+
+
+

See Also#

+

analytic_signal(), cartesian_to_rotary()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.signal.modulated_ellipse_signal.html b/_autosummary/clouddrift.signal.modulated_ellipse_signal.html new file mode 100644 index 00000000..340acd18 --- /dev/null +++ b/_autosummary/clouddrift.signal.modulated_ellipse_signal.html @@ -0,0 +1,618 @@ + + + + + + + + + + + clouddrift.signal.modulated_ellipse_signal — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.signal.modulated_ellipse_signal

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.signal.modulated_ellipse_signal#

+
+
+clouddrift.signal.modulated_ellipse_signal(kappa: ndarray | DataArray, lambda_: ndarray | DataArray, theta: ndarray | DataArray, phi: ndarray | DataArray) Tuple[ndarray, ndarray][source]#
+

Return the analytic Cartesian signals (xa, ya) from the instantaneous parameters of a modulated elliptical signal.

+

This function is the inverse of ellipse_parameters().

+
+

Parameters#

+
+
kappaarray_like

Ellipse root-mean-square amplitude.

+
+
lambdaarray_like

Ellipse linearity between -1 and 1, or departure from circular motion (lambda=0).

+
+
thetaarray_like

Ellipse orientation in radian.

+
+
phiarray_like

Ellipse phase in radian.

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
xanp.ndarray

Complex-valued analytic signal for first Cartesian component (zonal, east-west).

+
+
yanp.ndarray

Complex-valued analytic signal for second Cartesian component (meridional, north-south).

+
+
+
+
+

Examples#

+

To obtain the analytic signals from the instantaneous parameters of a modulated elliptical signal:

+
>>> xa, ya = modulated_ellipse_signal(kappa, lambda, theta, phi)
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape.

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729).

+
+
+

See Also#

+

ellipse_parameters(), analytic_signal(), rotary_to_cartesian(), cartesian_to_rotary()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.signal.rotary_to_cartesian.html b/_autosummary/clouddrift.signal.rotary_to_cartesian.html new file mode 100644 index 00000000..72358107 --- /dev/null +++ b/_autosummary/clouddrift.signal.rotary_to_cartesian.html @@ -0,0 +1,620 @@ + + + + + + + + + + + clouddrift.signal.rotary_to_cartesian — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.signal.rotary_to_cartesian

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.signal.rotary_to_cartesian#

+
+
+clouddrift.signal.rotary_to_cartesian(wp: ndarray | DataArray, wn: ndarray | DataArray, time_axis: int | None = -1) Tuple[ndarray, ndarray][source]#
+

Return Cartesian analytic signals (ua, va) from rotary signals (wp, wn) +as ua = wp + wn and va = -1j * (wp - wn).

+

This function is the inverse of cartesian_to_rotary().

+
+

Parameters#

+
+
wparray_like

Complex-valued positive (counterclockwise) rotary signal.

+
+
wnarray_like

Complex-valued negative (clockwise) rotary signal.

+
+
time_axisint, optional

The axis of the time array. Default is -1, which corresponds to the +last axis.

+
+
+
+
+

Returns#

+
+
uaarray_like

Complex-valued analytic signal, first Cartesian component (zonal, east-west)

+
+
vaarray_like

Complex-valued analytic signal, second Cartesian component (meridional, north-south)

+
+
+
+
+

Examples#

+

To obtain the Cartesian analytic signals from a pair of rotary signals (wp,wn):

+
>>> ua, va = rotary_to_cartesian(wp, wn)
+
+
+

To specify that the time axis is along the first axis:

+
>>> ua, va = rotary_to_cartesian(wp, wn, time_axis=0)
+
+
+
+
+

Raises#

+
+
ValueError

If the input arrays do not have the same shape. +If the time axis is outside of the valid range ([-1, N-1]).

+
+
+
+
+

References#

+

Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. +IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729)

+
+
+

See Also#

+

analytic_signal(), cartesian_to_rotary()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.bearing.html b/_autosummary/clouddrift.sphere.bearing.html new file mode 100644 index 00000000..58c45b74 --- /dev/null +++ b/_autosummary/clouddrift.sphere.bearing.html @@ -0,0 +1,607 @@ + + + + + + + + + + + clouddrift.sphere.bearing — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.bearing

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.bearing#

+
+
+clouddrift.sphere.bearing(lon1: float | list | ndarray | DataArray, lat1: float | list | ndarray | DataArray, lon2: float | list | ndarray | DataArray, lat2: float | list | ndarray | DataArray) float | ndarray[source]#
+

Return elementwise initial (forward) bearing in radians from arrays of +latitude and longitude in degrees, based on the spherical law of cosines.

+

The formula is:

+

θ = atan2(cos φ1 ⋅ sin φ2 - sin φ1 ⋅ cos φ2 ⋅ cos Δλ, sin Δλ ⋅ cos φ2)

+

where (φ, λ) is (lat, lon) and θ is bearing, all in radians. +Bearing is defined as zero toward East and positive counterclockwise.

+
+

Parameters#

+
+
lon1float or array-like

Longitudes of the first set of points, in degrees

+
+
lat1float or array-like

Latitudes of the first set of points, in degrees

+
+
lon2float or array-like

Longitudes of the second set of points, in degrees

+
+
lat2float or array-like

Latitudes of the second set of points, in degrees

+
+
+
+
+

Returns#

+
+
thetafloat or np.ndarray

Bearing angles in radians

+
+
+
+
+

Examples#

+

Calculate the bearing of one degree longitude on the equator:

+
>>> bearing(0, 0, 1, 0)
+0.0
+
+
+

Calculate the bearing of 10 degrees longitude at 45-degrees North latitude:

+
>>> bearing(0, 45, 10, 45)
+0.06178508761798218
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.cartesian_to_spherical.html b/_autosummary/clouddrift.sphere.cartesian_to_spherical.html new file mode 100644 index 00000000..66591215 --- /dev/null +++ b/_autosummary/clouddrift.sphere.cartesian_to_spherical.html @@ -0,0 +1,621 @@ + + + + + + + + + + + clouddrift.sphere.cartesian_to_spherical — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.cartesian_to_spherical

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.cartesian_to_spherical#

+
+
+clouddrift.sphere.cartesian_to_spherical(x: float | ndarray | DataArray, y: float | ndarray | DataArray, z: float | ndarray | DataArray) Tuple[ndarray, ndarray][source]#
+

Converts Cartesian three-dimensional coordinates to latitude and longitude on a +spherical body.

+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of the sphere. It is oriented with the +Z-axis passing through the poles and the X-axis passing through +the point lon = 0, lat = 0. This function is inverted by spherical_to_cartesian.

+
+

Parameters#

+
+
xfloat or array-like

x-coordinates in 3D.

+
+
yfloat or array-like

y-coordinates in 3D.

+
+
zfloat or array-like

z-coordinates in 3D.

+
+
+
+
+

Returns#

+
+
lonfloat or array-like

An N-d array of longitudes in degrees in range [-180, 180].

+
+
latfloat or array-like

An N-d array of latitudes in degrees.

+
+
+
+
+

Examples#

+
>>> x = EARTH_RADIUS_METERS * np.cos(np.deg2rad(45))
+>>> y = EARTH_RADIUS_METERS * np.cos(np.deg2rad(45))
+>>> z = 0 * x
+>>> cartesian_to_spherical(x, y, z)
+(44.99999999999985, 0.0)
+
+
+

cartesian_to_spherical is inverted by spherical_to_cartesian:

+
>>> x, y, z = spherical_to_cartesian(np.array([45]),np.array(0))
+>>> cartesian_to_spherical(x, y, z)
+(array([45.]), array([0.]))
+
+
+
+
+

Raises#

+
+
AttributeError

If x, y, and z are not NumPy arrays.

+
+
+
+
+

See Also#

+

spherical_to_cartesian()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.cartesian_to_tangentplane.html b/_autosummary/clouddrift.sphere.cartesian_to_tangentplane.html new file mode 100644 index 00000000..ee45c8a2 --- /dev/null +++ b/_autosummary/clouddrift.sphere.cartesian_to_tangentplane.html @@ -0,0 +1,620 @@ + + + + + + + + + + + clouddrift.sphere.cartesian_to_tangentplane — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.cartesian_to_tangentplane

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.cartesian_to_tangentplane#

+
+
+clouddrift.sphere.cartesian_to_tangentplane(u: float | ndarray, v: float | ndarray, w: float | ndarray, longitude: float | ndarray, latitude: float | ndarray) Tuple[float] | Tuple[ndarray][source]#
+

Project a three-dimensional Cartesian vector on a plane tangent to +a spherical Earth.

+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of a sphere. It is oriented with the +Z-axis passing through the north pole at lat = 90, the X-axis passing through +the point lon = 0, lat = 0, and the Y-axis passing through the point lon = 90, +lat = 0.

+
+

Parameters#

+
+
+
ufloat or np.ndarray

First component of Cartesian vector.

+
+
vfloat or np.ndarray

Second component of Cartesian vector.

+
+
wfloat or np.ndarray

Third component of Cartesian vector.

+
+
longitudefloat or np.ndarray

Longitude in degrees of tangent point of plane.

+
+
latitudefloat or np.ndarray

Latitude in degrees of tangent point of plane.

+
+
+
+
+
+

Returns#

+
+
+
up: float or np.ndarray

First component of projected vector on tangent plane (positive eastward).

+
+
vp: float or np.ndarray

Second component of projected vector on tangent plane (positive northward).

+
+
+
+
+
+

Raises#

+
+
Warning

Raised if the input latitude is not in the expected range [-90, 90].

+
+
+
+
+

Examples#

+
>>> u, v = cartesian_to_tangentplane(1, 1, 1, 45, 90)
+
+
+
+
+

See Also#

+

tangentplane_to_cartesian()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.coriolis_frequency.html b/_autosummary/clouddrift.sphere.coriolis_frequency.html new file mode 100644 index 00000000..819fb76f --- /dev/null +++ b/_autosummary/clouddrift.sphere.coriolis_frequency.html @@ -0,0 +1,589 @@ + + + + + + + + + + + clouddrift.sphere.coriolis_frequency — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.coriolis_frequency

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.coriolis_frequency#

+
+
+clouddrift.sphere.coriolis_frequency(latitude: float | ndarray) float | ndarray[source]#
+

Return the Coriolis frequency or commonly known f parameter in geophysical fluid dynamics.

+
+

Parameters#

+
+
latitudefloat or np.ndarray

Latitude in degrees.

+
+
+
+
+

Returns#

+
+
ffloat or np.ndarray

Signed Coriolis frequency in radian per seconds.

+
+
+
+
+

Examples#

+
>>> f = coriolis_frequency(np.array([0, 45, 90]))
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.cumulative_distance.html b/_autosummary/clouddrift.sphere.cumulative_distance.html new file mode 100644 index 00000000..7ce7051d --- /dev/null +++ b/_autosummary/clouddrift.sphere.cumulative_distance.html @@ -0,0 +1,597 @@ + + + + + + + + + + + clouddrift.sphere.cumulative_distance — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.cumulative_distance

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.cumulative_distance#

+
+
+clouddrift.sphere.cumulative_distance(longitude: list | ndarray | DataArray, latitude: list | ndarray | DataArray) ndarray[source]#
+

Return the cumulative great circle distance in meters along a sequence of geographical locations.

+
+

Parameters#

+
+
latitudearray-like

Latitude sequence, in degrees.

+
+
longitudearray-like

Longitude sequence, in degrees.

+
+
+
+
+

Returns#

+
+
outnp.ndarray

Cumulative distance.

+
+
+
+
+

See Also#

+

distance()

+
+
+

Examples#

+

Calculate the cumulative distance in meters along a path of three points:

+
>>> cumulative_distance(np.array([0, 1, 2]), np.array([0, 1, 2]))
+array([     0.        , 157424.62387233, 314825.27182116])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.distance.html b/_autosummary/clouddrift.sphere.distance.html new file mode 100644 index 00000000..1d65aea8 --- /dev/null +++ b/_autosummary/clouddrift.sphere.distance.html @@ -0,0 +1,612 @@ + + + + + + + + + + + clouddrift.sphere.distance — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.distance

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.distance#

+
+
+clouddrift.sphere.distance(lon1: float | list | ndarray | DataArray, lat1: float | list | ndarray | DataArray, lon2: float | list | ndarray | DataArray, lat2: float | list | ndarray | DataArray) float | ndarray[source]#
+

Return elementwise great circle distance in meters between one or more +points from arrays of their latitudes and longitudes, using the Haversine +formula.

+

d = 2⋅r⋅asin √[sin²(Δφ/2) + cos φ1 ⋅ cos φ2 ⋅ sin²(Δλ/2)]

+

where (φ, λ) is (lat, lon) in radians and r is the radius of the sphere in +meters.

+
+

Parameters#

+
+
lon1np.ndarray

Longitudes of the first set of points, in degrees

+
+
lat1np.ndarray

Latitudes of the first set of points, in degrees

+
+
lon2np.ndarray

Longitudes of the second set of points, in degrees

+
+
lat2np.ndarray

Latitudes of the second set of points, in degrees

+
+
+
+
+

Returns#

+
+
outnp.ndarray

Great circle distance

+
+
+
+
+

Examples#

+

Calculate the distance of one degree longitude on the equator:

+
>>> distance(0, 0, 0, 1)
+111318.84502145034
+
+
+

Calculate the distance of one degree longitude at 45-degrees North latitude:

+
>>> distance(0, 45, 1, 45)
+78713.81064540472
+
+
+

You can also pass array-like inputs to calculate an array of distances:

+
>>> distance([0, 0], [0, 45], [0, 1], [1, 45])
+array([111318.84502145,  78713.8106454 ])
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.html b/_autosummary/clouddrift.sphere.html new file mode 100644 index 00000000..6d409300 --- /dev/null +++ b/_autosummary/clouddrift.sphere.html @@ -0,0 +1,1317 @@ + + + + + + + + + + + clouddrift.sphere — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.sphere#

+

This module provides functions for spherical geometry calculations.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

bearing(lon1, lat1, lon2, lat2)

Return elementwise initial (forward) bearing in radians from arrays of latitude and longitude in degrees, based on the spherical law of cosines.

cartesian_to_spherical(x, y, z)

Converts Cartesian three-dimensional coordinates to latitude and longitude on a spherical body.

cartesian_to_tangentplane(u, v, w, ...)

Project a three-dimensional Cartesian vector on a plane tangent to a spherical Earth.

coriolis_frequency(latitude)

Return the Coriolis frequency or commonly known f parameter in geophysical fluid dynamics.

cumulative_distance(longitude, latitude)

Return the cumulative great circle distance in meters along a sequence of geographical locations.

distance(lon1, lat1, lon2, lat2)

Return elementwise great circle distance in meters between one or more points from arrays of their latitudes and longitudes, using the Haversine formula.

plane_to_sphere(x, y[, lon_origin, lat_origin])

Convert Cartesian coordinates on a plane to spherical coordinates.

position_from_distance_and_bearing(lon, lat, ...)

Return elementwise new position in degrees from arrays of latitude and longitude in degrees, distance in meters, and bearing in radians, based on the spherical law of cosines.

recast_lon(lon[, lon0])

Recast (convert) longitude values to a selected range of 360 degrees starting from lon0.

recast_lon180(lon)

Recast (convert) longitude values to the range [-180, 180[.

recast_lon360(lon)

Recast (convert) longitude values to the range [0, 360[.

sphere_to_plane(lon, lat[, lon_origin, ...])

Convert spherical coordinates to a tangent (Cartesian) plane.

spherical_to_cartesian(lon, lat[, radius])

Converts latitude and longitude on a spherical body to

tangentplane_to_cartesian(up, vp, longitude, ...)

Return the three-dimensional Cartesian components of a vector contained in a plane tangent to a spherical Earth.

+
+
+clouddrift.sphere.bearing(lon1: float | list | ndarray | DataArray, lat1: float | list | ndarray | DataArray, lon2: float | list | ndarray | DataArray, lat2: float | list | ndarray | DataArray) float | ndarray[source]#
+

Return elementwise initial (forward) bearing in radians from arrays of +latitude and longitude in degrees, based on the spherical law of cosines.

+

The formula is:

+

θ = atan2(cos φ1 ⋅ sin φ2 - sin φ1 ⋅ cos φ2 ⋅ cos Δλ, sin Δλ ⋅ cos φ2)

+

where (φ, λ) is (lat, lon) and θ is bearing, all in radians. +Bearing is defined as zero toward East and positive counterclockwise.

+
+

Parameters#

+
+
lon1float or array-like

Longitudes of the first set of points, in degrees

+
+
lat1float or array-like

Latitudes of the first set of points, in degrees

+
+
lon2float or array-like

Longitudes of the second set of points, in degrees

+
+
lat2float or array-like

Latitudes of the second set of points, in degrees

+
+
+
+
+

Returns#

+
+
thetafloat or np.ndarray

Bearing angles in radians

+
+
+
+
+

Examples#

+

Calculate the bearing of one degree longitude on the equator:

+
>>> bearing(0, 0, 1, 0)
+0.0
+
+
+

Calculate the bearing of 10 degrees longitude at 45-degrees North latitude:

+
>>> bearing(0, 45, 10, 45)
+0.06178508761798218
+
+
+
+
+ +
+
+clouddrift.sphere.cartesian_to_spherical(x: float | ndarray | DataArray, y: float | ndarray | DataArray, z: float | ndarray | DataArray) Tuple[ndarray, ndarray][source]#
+

Converts Cartesian three-dimensional coordinates to latitude and longitude on a +spherical body.

+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of the sphere. It is oriented with the +Z-axis passing through the poles and the X-axis passing through +the point lon = 0, lat = 0. This function is inverted by spherical_to_cartesian.

+
+

Parameters#

+
+
xfloat or array-like

x-coordinates in 3D.

+
+
yfloat or array-like

y-coordinates in 3D.

+
+
zfloat or array-like

z-coordinates in 3D.

+
+
+
+
+

Returns#

+
+
lonfloat or array-like

An N-d array of longitudes in degrees in range [-180, 180].

+
+
latfloat or array-like

An N-d array of latitudes in degrees.

+
+
+
+
+

Examples#

+
>>> x = EARTH_RADIUS_METERS * np.cos(np.deg2rad(45))
+>>> y = EARTH_RADIUS_METERS * np.cos(np.deg2rad(45))
+>>> z = 0 * x
+>>> cartesian_to_spherical(x, y, z)
+(44.99999999999985, 0.0)
+
+
+

cartesian_to_spherical is inverted by spherical_to_cartesian:

+
>>> x, y, z = spherical_to_cartesian(np.array([45]),np.array(0))
+>>> cartesian_to_spherical(x, y, z)
+(array([45.]), array([0.]))
+
+
+
+
+

Raises#

+
+
AttributeError

If x, y, and z are not NumPy arrays.

+
+
+
+
+

See Also#

+

spherical_to_cartesian()

+
+
+ +
+
+clouddrift.sphere.cartesian_to_tangentplane(u: float | ndarray, v: float | ndarray, w: float | ndarray, longitude: float | ndarray, latitude: float | ndarray) Tuple[float] | Tuple[ndarray][source]#
+

Project a three-dimensional Cartesian vector on a plane tangent to +a spherical Earth.

+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of a sphere. It is oriented with the +Z-axis passing through the north pole at lat = 90, the X-axis passing through +the point lon = 0, lat = 0, and the Y-axis passing through the point lon = 90, +lat = 0.

+
+

Parameters#

+
+
+
ufloat or np.ndarray

First component of Cartesian vector.

+
+
vfloat or np.ndarray

Second component of Cartesian vector.

+
+
wfloat or np.ndarray

Third component of Cartesian vector.

+
+
longitudefloat or np.ndarray

Longitude in degrees of tangent point of plane.

+
+
latitudefloat or np.ndarray

Latitude in degrees of tangent point of plane.

+
+
+
+
+
+

Returns#

+
+
+
up: float or np.ndarray

First component of projected vector on tangent plane (positive eastward).

+
+
vp: float or np.ndarray

Second component of projected vector on tangent plane (positive northward).

+
+
+
+
+
+

Raises#

+
+
Warning

Raised if the input latitude is not in the expected range [-90, 90].

+
+
+
+
+

Examples#

+
>>> u, v = cartesian_to_tangentplane(1, 1, 1, 45, 90)
+
+
+
+
+

See Also#

+

tangentplane_to_cartesian()

+
+
+ +
+
+clouddrift.sphere.coriolis_frequency(latitude: float | ndarray) float | ndarray[source]#
+

Return the Coriolis frequency or commonly known f parameter in geophysical fluid dynamics.

+
+

Parameters#

+
+
latitudefloat or np.ndarray

Latitude in degrees.

+
+
+
+
+

Returns#

+
+
ffloat or np.ndarray

Signed Coriolis frequency in radian per seconds.

+
+
+
+
+

Examples#

+
>>> f = coriolis_frequency(np.array([0, 45, 90]))
+
+
+
+
+ +
+
+clouddrift.sphere.cumulative_distance(longitude: list | ndarray | DataArray, latitude: list | ndarray | DataArray) ndarray[source]#
+

Return the cumulative great circle distance in meters along a sequence of geographical locations.

+
+

Parameters#

+
+
latitudearray-like

Latitude sequence, in degrees.

+
+
longitudearray-like

Longitude sequence, in degrees.

+
+
+
+
+

Returns#

+
+
outnp.ndarray

Cumulative distance.

+
+
+
+
+

See Also#

+

distance()

+
+
+

Examples#

+

Calculate the cumulative distance in meters along a path of three points:

+
>>> cumulative_distance(np.array([0, 1, 2]), np.array([0, 1, 2]))
+array([     0.        , 157424.62387233, 314825.27182116])
+
+
+
+
+ +
+
+clouddrift.sphere.distance(lon1: float | list | ndarray | DataArray, lat1: float | list | ndarray | DataArray, lon2: float | list | ndarray | DataArray, lat2: float | list | ndarray | DataArray) float | ndarray[source]#
+

Return elementwise great circle distance in meters between one or more +points from arrays of their latitudes and longitudes, using the Haversine +formula.

+

d = 2⋅r⋅asin √[sin²(Δφ/2) + cos φ1 ⋅ cos φ2 ⋅ sin²(Δλ/2)]

+

where (φ, λ) is (lat, lon) in radians and r is the radius of the sphere in +meters.

+
+

Parameters#

+
+
lon1np.ndarray

Longitudes of the first set of points, in degrees

+
+
lat1np.ndarray

Latitudes of the first set of points, in degrees

+
+
lon2np.ndarray

Longitudes of the second set of points, in degrees

+
+
lat2np.ndarray

Latitudes of the second set of points, in degrees

+
+
+
+
+

Returns#

+
+
outnp.ndarray

Great circle distance

+
+
+
+
+

Examples#

+

Calculate the distance of one degree longitude on the equator:

+
>>> distance(0, 0, 0, 1)
+111318.84502145034
+
+
+

Calculate the distance of one degree longitude at 45-degrees North latitude:

+
>>> distance(0, 45, 1, 45)
+78713.81064540472
+
+
+

You can also pass array-like inputs to calculate an array of distances:

+
>>> distance([0, 0], [0, 45], [0, 1], [1, 45])
+array([111318.84502145,  78713.8106454 ])
+
+
+
+
+ +
+
+clouddrift.sphere.plane_to_sphere(x: ndarray, y: ndarray, lon_origin: float = 0, lat_origin: float = 0) Tuple[ndarray, ndarray][source]#
+

Convert Cartesian coordinates on a plane to spherical coordinates.

+

The arrays of input zonal and meridional displacements x and y are +assumed to follow a contiguous trajectory. The spherical coordinate of each +successive point is determined by following a great circle path from the +previous point. The spherical coordinate of the first point is determined by +following a great circle path from the origin, by default (0, 0).

+

The output arrays have the same floating-point output type as the input.

+

If projecting multiple trajectories onto the same plane, use +apply_ragged() for highest accuracy.

+
+

Parameters#

+
+
xnp.ndarray

An N-d array of zonal displacements in meters

+
+
ynp.ndarray

An N-d array of meridional displacements in meters

+
+
lon_originfloat, optional

Origin longitude of the tangent plane in degrees, default 0

+
+
lat_originfloat, optional

Origin latitude of the tangent plane in degrees, default 0

+
+
+
+
+

Returns#

+
+
lonnp.ndarray

Longitude in degrees

+
+
latnp.ndarray

Latitude in degrees

+
+
+
+
+

Examples#

+
>>> plane_to_sphere(np.array([0., 0.]), np.array([0., 1000.]))
+(array([0.00000000e+00, 5.50062664e-19]), array([0.       , 0.0089832]))
+
+
+

You can also specify an origin longitude and latitude:

+
>>> plane_to_sphere(np.array([0., 0.]), np.array([0., 1000.]), lon_origin=1, lat_origin=0)
+(array([1., 1.]), array([0.       , 0.0089832]))
+
+
+
+
+

Raises#

+
+
AttributeError

If x and y are not NumPy arrays

+
+
+
+
+

See Also#

+

sphere_to_plane()

+
+
+ +
+
+clouddrift.sphere.position_from_distance_and_bearing(lon: float, lat: float, distance: float, bearing: float) Tuple[float, float][source]#
+

Return elementwise new position in degrees from arrays of latitude and +longitude in degrees, distance in meters, and bearing in radians, based on +the spherical law of cosines.

+

The formula is:

+

φ2 = asin( sin φ1 ⋅ cos δ + cos φ1 ⋅ sin δ ⋅ cos θ ) +λ2 = λ1 + atan2( sin θ ⋅ sin δ ⋅ cos φ1, cos δ − sin φ1 ⋅ sin φ2 )

+

where (φ, λ) is (lat, lon) and θ is bearing, all in radians. +Bearing is defined as zero toward East and positive counterclockwise.

+
+

Parameters#

+
+
lonfloat

Longitude of the first set of points, in degrees

+
+
latfloat

Latitude of the first set of points, in degrees

+
+
distancearray_like

Distance in meters

+
+
bearingarray_like

Bearing angles in radians

+
+
+
+
+

Returns#

+
+
lon2array_like

Latitudes of the second set of points, in degrees, in the range [-90, 90]

+
+
lat2array_like

Longitudes of the second set of points, in degrees, in the range [-180, 180]

+
+
+
+
+

Examples#

+

Calculate the position of one degree longitude distance on the equator:

+
>>> position_from_distance_and_bearing(0, 0, 111318.84502145034, 0)
+(1.0, 0.0)
+
+
+

Calculate the position of one degree latitude distance from 45 degrees North latitude:

+
>>> position_from_distance_and_bearing(0, 45, 111318.84502145034, np.pi / 2)
+(8.81429402840006e-17, 45.99999999999999)
+
+
+
+
+ +
+
+clouddrift.sphere.recast_lon(lon: ndarray, lon0: float | None = -180) ndarray[source]#
+

Recast (convert) longitude values to a selected range of 360 degrees +starting from lon0.

+
+

Parameters#

+
+
lonnp.ndarray or float

An N-d array of longitudes in degrees

+
+
lon0float, optional

Starting longitude of the recasted range (default -180).

+
+
+
+
+

Returns#

+
+
np.ndarray or float

Converted longitudes in the range [lon0, lon0+360[

+
+
+
+
+

Examples#

+

By default, recast_lon converts longitude values to the range +[-180, 180[:

+
>>> recast_lon(200)
+-160
+
+
+
>>> recast_lon(180)
+-180
+
+
+

The range of the output longitude is controlled by lon0. +For example, with lon0 = 0, the longitude values are converted to the +range [0, 360[.

+
>>> recast_lon(200, -180)
+-160
+
+
+

With lon0 = 20, longitude values are converted to range [20, 380], +which can be useful to avoid cutting the major ocean basins.

+
>>> recast_lon(10, 20)
+370
+
+
+
+
+

See Also#

+

recast_lon360(), recast_lon180()

+
+
+ +
+
+clouddrift.sphere.recast_lon180(lon: ndarray) ndarray[source]#
+

Recast (convert) longitude values to the range [-180, 180[. +This is a convenience wrapper around recast_lon() with lon0 = -180.

+
+

Parameters#

+
+
lonnp.ndarray

An N-d array of longitudes in degrees

+
+
+
+
+

Returns#

+
+
np.ndarray

Converted longitudes in the range [-180, 180[

+
+
+
+
+

Examples#

+
>>> recast_lon180(200)
+-160
+
+
+
>>> recast_lon180(-200)
+160
+
+
+
+
+

See Also#

+

recast_lon(), recast_lon360()

+
+
+ +
+
+clouddrift.sphere.recast_lon360(lon: ndarray) ndarray[source]#
+

Recast (convert) longitude values to the range [0, 360[. +This is a convenience wrapper around recast_lon() with lon0 = 0.

+
+

Parameters#

+
+
lonnp.ndarray

An N-d array of longitudes in degrees

+
+
+
+
+

Returns#

+
+
np.ndarray

Converted longitudes in the range [0, 360[

+
+
+
+
+

Examples#

+
>>> recast_lon360(200)
+200
+
+
+
>>> recast_lon360(-200)
+160
+
+
+
+
+

See Also#

+

recast_lon(), recast_lon180()

+
+
+ +
+
+clouddrift.sphere.sphere_to_plane(lon: ndarray, lat: ndarray, lon_origin: float = 0, lat_origin: float = 0) Tuple[ndarray, ndarray][source]#
+

Convert spherical coordinates to a tangent (Cartesian) plane.

+

The arrays of input longitudes and latitudes are assumed to be following +a contiguous trajectory. The Cartesian coordinate of each successive point +is determined by following a great circle path from the previous point. +The Cartesian coordinate of the first point is determined by following a +great circle path from the origin, by default (0, 0).

+

The output arrays have the same floating-point output type as the input.

+

If projecting multiple trajectories onto the same plane, use +apply_ragged() for highest accuracy.

+
+

Parameters#

+
+
lonnp.ndarray

An N-d array of longitudes in degrees

+
+
latnp.ndarray

An N-d array of latitudes in degrees

+
+
lon_originfloat, optional

Origin longitude of the tangent plane in degrees, default 0

+
+
lat_originfloat, optional

Origin latitude of the tangent plane in degrees, default 0

+
+
+
+
+

Returns#

+
+
xnp.ndarray

x-coordinates on the tangent plane

+
+
ynp.ndarray

y-coordinates on the tangent plane

+
+
+
+
+

Examples#

+
>>> sphere_to_plane(np.array([0., 1.]), np.array([0., 0.]))
+(array([     0.        , 111318.84502145]), array([0., 0.]))
+
+
+

You can also specify an origin longitude and latitude:

+
>>> sphere_to_plane(np.array([0., 1.]), np.array([0., 0.]), lon_origin=1, lat_origin=0)
+(array([-111318.84502145,       0.        ]),
+ array([1.36326267e-11, 1.36326267e-11]))
+
+
+
+
+

Raises#

+
+
AttributeError

If lon and lat are not NumPy arrays

+
+
+
+
+

See Also#

+

plane_to_sphere()

+
+
+ +
+
+clouddrift.sphere.spherical_to_cartesian(lon: float | list | ndarray | DataArray, lat: float | list | ndarray | DataArray, radius: float | None = 6378100.0) Tuple[ndarray, ndarray, ndarray][source]#
+
+
Converts latitude and longitude on a spherical body to

three-dimensional Cartesian coordinates.

+
+
+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of a sphere. It is oriented with the +Z-axis passing through the poles and the X-axis passing through +the point lon = 0, lat = 0. This function is inverted by +cartesian_to_spherical().

+
+

Parameters#

+
+
lonarray-like

An N-d array of longitudes in degrees.

+
+
latarray-like

An N-d array of latitudes in degrees.

+
+
radius: float, optional

The radius of the spherical body in meters. The default assumes the Earth with +EARTH_RADIUS_METERS = 6.3781e6.

+
+
+
+
+

Returns#

+
+
xfloat or array-like

x-coordinates in 3D in meters.

+
+
yfloat or array-like

y-coordinates in 3D in meters.

+
+
zfloat or array-like

z-coordinates in 3D in meters.

+
+
+
+
+

Examples#

+
>>> spherical_to_cartesian(np.array([0, 45]), np.array([0, 45]))
+(array([6378100., 3189050.]),
+array([      0., 3189050.]),
+array([      0.        , 4509997.76108592]))
+
+
+
>>> spherical_to_cartesian(np.array([0, 45, 90]), np.array([0, 90, 180]), radius=1)
+(array([ 1.00000000e+00,  4.32978028e-17, -6.12323400e-17]),
+array([ 0.00000000e+00,  4.32978028e-17, -1.00000000e+00]),
+array([0.0000000e+00, 1.0000000e+00, 1.2246468e-16]))
+
+
+
>>> x, y, z = spherical_to_cartesian(np.array([0, 5]), np.array([0, 5]))
+
+
+
+
+

Raises#

+
+
AttributeError

If lon and lat are not NumPy arrays.

+
+
+
+
+

See Also#

+

cartesian_to_spherical()

+
+
+ +
+
+clouddrift.sphere.tangentplane_to_cartesian(up: float | ndarray, vp: float | ndarray, longitude: float | ndarray, latitude: float | ndarray) Tuple[float] | Tuple[ndarray][source]#
+

Return the three-dimensional Cartesian components of a vector contained in +a plane tangent to a spherical Earth.

+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of a sphere. It is oriented with the +Z-axis passing through the north pole at lat = 90, the X-axis passing through +the point lon = 0, lat = 0, and the Y-axis passing through the point lon = 90, +lat = 0.

+
+

Parameters#

+
+
+
up: float or np.ndarray

First component of vector on tangent plane (positive eastward).

+
+
vp: float or np.ndarray

Second component of vector on tangent plane (positive northward).

+
+
longitudefloat or np.ndarray

Longitude in degrees of tangent point of plane.

+
+
latitudefloat or np.ndarray

Latitude in degrees of tangent point of plane.

+
+
+
+
+
+

Returns#

+
+
+
ufloat or np.ndarray

First component of Cartesian vector.

+
+
vfloat or np.ndarray

Second component of Cartesian vector.

+
+
wfloat or np.ndarray

Third component of Cartesian vector.

+
+
+
+
+
+

Examples#

+
>>> u, v, w = tangentplane_to_cartesian(1, 1, 45, 90)
+
+
+
+
+

Notes#

+

This function is inverted by cartesian_to_tangetplane().

+
+
+

See Also#

+

cartesian_to_tangentplane()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.plane_to_sphere.html b/_autosummary/clouddrift.sphere.plane_to_sphere.html new file mode 100644 index 00000000..3c4ec110 --- /dev/null +++ b/_autosummary/clouddrift.sphere.plane_to_sphere.html @@ -0,0 +1,622 @@ + + + + + + + + + + + clouddrift.sphere.plane_to_sphere — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.plane_to_sphere

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.plane_to_sphere#

+
+
+clouddrift.sphere.plane_to_sphere(x: ndarray, y: ndarray, lon_origin: float = 0, lat_origin: float = 0) Tuple[ndarray, ndarray][source]#
+

Convert Cartesian coordinates on a plane to spherical coordinates.

+

The arrays of input zonal and meridional displacements x and y are +assumed to follow a contiguous trajectory. The spherical coordinate of each +successive point is determined by following a great circle path from the +previous point. The spherical coordinate of the first point is determined by +following a great circle path from the origin, by default (0, 0).

+

The output arrays have the same floating-point output type as the input.

+

If projecting multiple trajectories onto the same plane, use +apply_ragged() for highest accuracy.

+
+

Parameters#

+
+
xnp.ndarray

An N-d array of zonal displacements in meters

+
+
ynp.ndarray

An N-d array of meridional displacements in meters

+
+
lon_originfloat, optional

Origin longitude of the tangent plane in degrees, default 0

+
+
lat_originfloat, optional

Origin latitude of the tangent plane in degrees, default 0

+
+
+
+
+

Returns#

+
+
lonnp.ndarray

Longitude in degrees

+
+
latnp.ndarray

Latitude in degrees

+
+
+
+
+

Examples#

+
>>> plane_to_sphere(np.array([0., 0.]), np.array([0., 1000.]))
+(array([0.00000000e+00, 5.50062664e-19]), array([0.       , 0.0089832]))
+
+
+

You can also specify an origin longitude and latitude:

+
>>> plane_to_sphere(np.array([0., 0.]), np.array([0., 1000.]), lon_origin=1, lat_origin=0)
+(array([1., 1.]), array([0.       , 0.0089832]))
+
+
+
+
+

Raises#

+
+
AttributeError

If x and y are not NumPy arrays

+
+
+
+
+

See Also#

+

sphere_to_plane()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.position_from_distance_and_bearing.html b/_autosummary/clouddrift.sphere.position_from_distance_and_bearing.html new file mode 100644 index 00000000..65a6c442 --- /dev/null +++ b/_autosummary/clouddrift.sphere.position_from_distance_and_bearing.html @@ -0,0 +1,611 @@ + + + + + + + + + + + clouddrift.sphere.position_from_distance_and_bearing — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.position_from_distance_and_bearing

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.position_from_distance_and_bearing#

+
+
+clouddrift.sphere.position_from_distance_and_bearing(lon: float, lat: float, distance: float, bearing: float) Tuple[float, float][source]#
+

Return elementwise new position in degrees from arrays of latitude and +longitude in degrees, distance in meters, and bearing in radians, based on +the spherical law of cosines.

+

The formula is:

+

φ2 = asin( sin φ1 ⋅ cos δ + cos φ1 ⋅ sin δ ⋅ cos θ ) +λ2 = λ1 + atan2( sin θ ⋅ sin δ ⋅ cos φ1, cos δ − sin φ1 ⋅ sin φ2 )

+

where (φ, λ) is (lat, lon) and θ is bearing, all in radians. +Bearing is defined as zero toward East and positive counterclockwise.

+
+

Parameters#

+
+
lonfloat

Longitude of the first set of points, in degrees

+
+
latfloat

Latitude of the first set of points, in degrees

+
+
distancearray_like

Distance in meters

+
+
bearingarray_like

Bearing angles in radians

+
+
+
+
+

Returns#

+
+
lon2array_like

Latitudes of the second set of points, in degrees, in the range [-90, 90]

+
+
lat2array_like

Longitudes of the second set of points, in degrees, in the range [-180, 180]

+
+
+
+
+

Examples#

+

Calculate the position of one degree longitude distance on the equator:

+
>>> position_from_distance_and_bearing(0, 0, 111318.84502145034, 0)
+(1.0, 0.0)
+
+
+

Calculate the position of one degree latitude distance from 45 degrees North latitude:

+
>>> position_from_distance_and_bearing(0, 45, 111318.84502145034, np.pi / 2)
+(8.81429402840006e-17, 45.99999999999999)
+
+
+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.recast_lon.html b/_autosummary/clouddrift.sphere.recast_lon.html new file mode 100644 index 00000000..88a1a881 --- /dev/null +++ b/_autosummary/clouddrift.sphere.recast_lon.html @@ -0,0 +1,616 @@ + + + + + + + + + + + clouddrift.sphere.recast_lon — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.recast_lon

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.recast_lon#

+
+
+clouddrift.sphere.recast_lon(lon: ndarray, lon0: float | None = -180) ndarray[source]#
+

Recast (convert) longitude values to a selected range of 360 degrees +starting from lon0.

+
+

Parameters#

+
+
lonnp.ndarray or float

An N-d array of longitudes in degrees

+
+
lon0float, optional

Starting longitude of the recasted range (default -180).

+
+
+
+
+

Returns#

+
+
np.ndarray or float

Converted longitudes in the range [lon0, lon0+360[

+
+
+
+
+

Examples#

+

By default, recast_lon converts longitude values to the range +[-180, 180[:

+
>>> recast_lon(200)
+-160
+
+
+
>>> recast_lon(180)
+-180
+
+
+

The range of the output longitude is controlled by lon0. +For example, with lon0 = 0, the longitude values are converted to the +range [0, 360[.

+
>>> recast_lon(200, -180)
+-160
+
+
+

With lon0 = 20, longitude values are converted to range [20, 380], +which can be useful to avoid cutting the major ocean basins.

+
>>> recast_lon(10, 20)
+370
+
+
+
+
+

See Also#

+

recast_lon360(), recast_lon180()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.recast_lon180.html b/_autosummary/clouddrift.sphere.recast_lon180.html new file mode 100644 index 00000000..c1c1f347 --- /dev/null +++ b/_autosummary/clouddrift.sphere.recast_lon180.html @@ -0,0 +1,599 @@ + + + + + + + + + + + clouddrift.sphere.recast_lon180 — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.recast_lon180

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.recast_lon180#

+
+
+clouddrift.sphere.recast_lon180(lon: ndarray) ndarray[source]#
+

Recast (convert) longitude values to the range [-180, 180[. +This is a convenience wrapper around recast_lon() with lon0 = -180.

+
+

Parameters#

+
+
lonnp.ndarray

An N-d array of longitudes in degrees

+
+
+
+
+

Returns#

+
+
np.ndarray

Converted longitudes in the range [-180, 180[

+
+
+
+
+

Examples#

+
>>> recast_lon180(200)
+-160
+
+
+
>>> recast_lon180(-200)
+160
+
+
+
+
+

See Also#

+

recast_lon(), recast_lon360()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.recast_lon360.html b/_autosummary/clouddrift.sphere.recast_lon360.html new file mode 100644 index 00000000..2818e12e --- /dev/null +++ b/_autosummary/clouddrift.sphere.recast_lon360.html @@ -0,0 +1,599 @@ + + + + + + + + + + + clouddrift.sphere.recast_lon360 — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.recast_lon360

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.recast_lon360#

+
+
+clouddrift.sphere.recast_lon360(lon: ndarray) ndarray[source]#
+

Recast (convert) longitude values to the range [0, 360[. +This is a convenience wrapper around recast_lon() with lon0 = 0.

+
+

Parameters#

+
+
lonnp.ndarray

An N-d array of longitudes in degrees

+
+
+
+
+

Returns#

+
+
np.ndarray

Converted longitudes in the range [0, 360[

+
+
+
+
+

Examples#

+
>>> recast_lon360(200)
+200
+
+
+
>>> recast_lon360(-200)
+160
+
+
+
+
+

See Also#

+

recast_lon(), recast_lon180()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.sphere_to_plane.html b/_autosummary/clouddrift.sphere.sphere_to_plane.html new file mode 100644 index 00000000..64a26e0e --- /dev/null +++ b/_autosummary/clouddrift.sphere.sphere_to_plane.html @@ -0,0 +1,623 @@ + + + + + + + + + + + clouddrift.sphere.sphere_to_plane — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.sphere_to_plane

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.sphere_to_plane#

+
+
+clouddrift.sphere.sphere_to_plane(lon: ndarray, lat: ndarray, lon_origin: float = 0, lat_origin: float = 0) Tuple[ndarray, ndarray][source]#
+

Convert spherical coordinates to a tangent (Cartesian) plane.

+

The arrays of input longitudes and latitudes are assumed to be following +a contiguous trajectory. The Cartesian coordinate of each successive point +is determined by following a great circle path from the previous point. +The Cartesian coordinate of the first point is determined by following a +great circle path from the origin, by default (0, 0).

+

The output arrays have the same floating-point output type as the input.

+

If projecting multiple trajectories onto the same plane, use +apply_ragged() for highest accuracy.

+
+

Parameters#

+
+
lonnp.ndarray

An N-d array of longitudes in degrees

+
+
latnp.ndarray

An N-d array of latitudes in degrees

+
+
lon_originfloat, optional

Origin longitude of the tangent plane in degrees, default 0

+
+
lat_originfloat, optional

Origin latitude of the tangent plane in degrees, default 0

+
+
+
+
+

Returns#

+
+
xnp.ndarray

x-coordinates on the tangent plane

+
+
ynp.ndarray

y-coordinates on the tangent plane

+
+
+
+
+

Examples#

+
>>> sphere_to_plane(np.array([0., 1.]), np.array([0., 0.]))
+(array([     0.        , 111318.84502145]), array([0., 0.]))
+
+
+

You can also specify an origin longitude and latitude:

+
>>> sphere_to_plane(np.array([0., 1.]), np.array([0., 0.]), lon_origin=1, lat_origin=0)
+(array([-111318.84502145,       0.        ]),
+ array([1.36326267e-11, 1.36326267e-11]))
+
+
+
+
+

Raises#

+
+
AttributeError

If lon and lat are not NumPy arrays

+
+
+
+
+

See Also#

+

plane_to_sphere()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.spherical_to_cartesian.html b/_autosummary/clouddrift.sphere.spherical_to_cartesian.html new file mode 100644 index 00000000..41bddf1a --- /dev/null +++ b/_autosummary/clouddrift.sphere.spherical_to_cartesian.html @@ -0,0 +1,629 @@ + + + + + + + + + + + clouddrift.sphere.spherical_to_cartesian — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.spherical_to_cartesian

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.spherical_to_cartesian#

+
+
+clouddrift.sphere.spherical_to_cartesian(lon: float | list | ndarray | DataArray, lat: float | list | ndarray | DataArray, radius: float | None = 6378100.0) Tuple[ndarray, ndarray, ndarray][source]#
+
+
Converts latitude and longitude on a spherical body to

three-dimensional Cartesian coordinates.

+
+
+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of a sphere. It is oriented with the +Z-axis passing through the poles and the X-axis passing through +the point lon = 0, lat = 0. This function is inverted by +cartesian_to_spherical().

+
+

Parameters#

+
+
lonarray-like

An N-d array of longitudes in degrees.

+
+
latarray-like

An N-d array of latitudes in degrees.

+
+
radius: float, optional

The radius of the spherical body in meters. The default assumes the Earth with +EARTH_RADIUS_METERS = 6.3781e6.

+
+
+
+
+

Returns#

+
+
xfloat or array-like

x-coordinates in 3D in meters.

+
+
yfloat or array-like

y-coordinates in 3D in meters.

+
+
zfloat or array-like

z-coordinates in 3D in meters.

+
+
+
+
+

Examples#

+
>>> spherical_to_cartesian(np.array([0, 45]), np.array([0, 45]))
+(array([6378100., 3189050.]),
+array([      0., 3189050.]),
+array([      0.        , 4509997.76108592]))
+
+
+
>>> spherical_to_cartesian(np.array([0, 45, 90]), np.array([0, 90, 180]), radius=1)
+(array([ 1.00000000e+00,  4.32978028e-17, -6.12323400e-17]),
+array([ 0.00000000e+00,  4.32978028e-17, -1.00000000e+00]),
+array([0.0000000e+00, 1.0000000e+00, 1.2246468e-16]))
+
+
+
>>> x, y, z = spherical_to_cartesian(np.array([0, 5]), np.array([0, 5]))
+
+
+
+
+

Raises#

+
+
AttributeError

If lon and lat are not NumPy arrays.

+
+
+
+
+

See Also#

+

cartesian_to_spherical()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.sphere.tangentplane_to_cartesian.html b/_autosummary/clouddrift.sphere.tangentplane_to_cartesian.html new file mode 100644 index 00000000..c534e400 --- /dev/null +++ b/_autosummary/clouddrift.sphere.tangentplane_to_cartesian.html @@ -0,0 +1,617 @@ + + + + + + + + + + + clouddrift.sphere.tangentplane_to_cartesian — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.sphere.tangentplane_to_cartesian

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.sphere.tangentplane_to_cartesian#

+
+
+clouddrift.sphere.tangentplane_to_cartesian(up: float | ndarray, vp: float | ndarray, longitude: float | ndarray, latitude: float | ndarray) Tuple[float] | Tuple[ndarray][source]#
+

Return the three-dimensional Cartesian components of a vector contained in +a plane tangent to a spherical Earth.

+

The Cartesian coordinate system is a right-handed system whose +origin lies at the center of a sphere. It is oriented with the +Z-axis passing through the north pole at lat = 90, the X-axis passing through +the point lon = 0, lat = 0, and the Y-axis passing through the point lon = 90, +lat = 0.

+
+

Parameters#

+
+
+
up: float or np.ndarray

First component of vector on tangent plane (positive eastward).

+
+
vp: float or np.ndarray

Second component of vector on tangent plane (positive northward).

+
+
longitudefloat or np.ndarray

Longitude in degrees of tangent point of plane.

+
+
latitudefloat or np.ndarray

Latitude in degrees of tangent point of plane.

+
+
+
+
+
+

Returns#

+
+
+
ufloat or np.ndarray

First component of Cartesian vector.

+
+
vfloat or np.ndarray

Second component of Cartesian vector.

+
+
wfloat or np.ndarray

Third component of Cartesian vector.

+
+
+
+
+
+

Examples#

+
>>> u, v, w = tangentplane_to_cartesian(1, 1, 45, 90)
+
+
+
+
+

Notes#

+

This function is inverted by cartesian_to_tangetplane().

+
+
+

See Also#

+

cartesian_to_tangentplane()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.html b/_autosummary/clouddrift.wavelet.html new file mode 100644 index 00000000..e05dc56e --- /dev/null +++ b/_autosummary/clouddrift.wavelet.html @@ -0,0 +1,1088 @@ + + + + + + + + + + + clouddrift.wavelet — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + +
+ +
+

clouddrift.wavelet#

+

This module provides functions for computing wavelet transforms and time-frequency analyses, +notably using generalized Morse wavelets.

+

The Python code in this module was translated from the MATLAB implementation +by J. M. Lilly in the jWavelet module of jLab (http://jmlilly.net/code.html).

+

Lilly, J. M. (2021), jLab: A data analysis package for Matlab, v.1.7.1, +doi:10.5281/zenodo.4547006, http://www.jmlilly.net/software.

+

jLab is licensed under the Creative Commons Attribution-Noncommercial-ShareAlike +License (https://creativecommons.org/licenses/by-nc-sa/4.0/). The code that is +directly translated from jLab/jWavelet is licensed under the same license. +Any other code that is added to this module and that is specific to Python and +not the MATLAB implementation is licensed under CloudDrift’s MIT license.

+

Functions

+ + + + + + + + + + + + + + + + + + + + + + + + +

morse_amplitude(gamma, beta[, order, ...])

Calculate the amplitude coefficient of the generalized Morse wavelets.

morse_freq(gamma, beta)

Frequency measures for generalized Morse wavelets.

morse_logspace_freq(gamma, beta, length[, ...])

Compute logarithmically-spaced frequencies for generalized Morse wavelets with parameters gamma and beta.

morse_properties(gamma, beta)

Calculate the properties of the demodulated generalized Morse wavelets.

morse_wavelet(length, gamma, beta, ...[, ...])

Compute the generalized Morse wavelets of Olhede and Walden (2002), doi: 10.1109/TSP.2002.804066.

morse_wavelet_transform(x, gamma, beta, ...)

Apply a continuous wavelet transform to an input signal using the generalized Morse wavelets of Olhede and Walden (2002).

wavelet_transform(x, wavelet[, boundary, ...])

Apply a continuous wavelet transform to an input signal using an input wavelet function.

+
+
+clouddrift.wavelet.morse_amplitude(gamma: ndarray | float, beta: ndarray | float, order: int64 | None = 1, normalization: str | None = 'bandpass') float[source]#
+

Calculate the amplitude coefficient of the generalized Morse wavelets. +By default, the amplitude is calculated such that the maximum of the +frequency-domain wavelet is equal to 2, which is the bandpass normalization. +Optionally, specify normalization="energy" in order to return the coefficient +giving the wavelets unit energies. See Lilly and Olhede (2009), doi doi: 10.1109/TSP.2008.2007607.

+
+

Parameters#

+
+
gammanp.ndarray or float

Gamma parameter of the wavelets.

+
+
betanp.ndarray or float

Beta parameter of the wavelets.

+
+
orderint, optional

Order of wavelets, default is 1.

+
+
normalizationstr, optional

Normalization for the wavelets. By default it is assumed to be "bandpass" +which uses a bandpass normalization, meaning that the FFT of the wavelets +have peak value of 2 for all central frequencies radian_frequency. The other option is "energy" +which uses the unit energy normalization. In this last case the time-domain wavelet +energies np.sum(np.abs(wave)**2) are always unity.

+
+
+
+
+

Returns#

+
+
ampnp.ndarray or float

The amplitude coefficient of the wavelets.

+
+
+
+
+

Examples#

+

TODO

+
+
+

See Also#

+

morse_wavelet(), morse_freq(), morse_properties(), morse_logspace_freq().

+
+
+ +
+
+clouddrift.wavelet.morse_freq(gamma: ndarray | float, beta: ndarray | float) Tuple[ndarray] | Tuple[float][source]#
+

Frequency measures for generalized Morse wavelets. This functions calculates +three different measures fm, fe, and fi of the frequency of the lowest-order generalized Morse +wavelet specified by parameters gamma and beta.

+

Note that all frequency quantities here are in radian as in cos(f t) and not +cyclic as in np.cos(2 np.pi f t).

+

For beta=0, the corresponding wavelet becomes an analytic lowpass filter, and fm +is not defined in the usual way but as the point at which the filter has decayed +to one-half of its peak power.

+

For details see Lilly and Olhede (2009), doi: 10.1109/TSP.2008.2007607.

+
+

Parameters#

+
+
gammanp.ndarray or float

Gamma parameter of the wavelets.

+
+
betanp.ndarray or float

Beta parameter of the wavelets.

+
+
+
+
+

Returns#

+
+
fmnp.ndarray

The modal or peak frequency.

+
+
fenp.ndarray

The energy frequency.

+
+
finp.ndarray

The instantaneous frequency at the wavelets’ centers.

+
+
+
+
+

Examples#

+
>>> fm, fe, fi = morse_freq(3, 4)
+
+
+
>>> morse_freq(3, 4)
+(array(1.10064242), 1.1025129235952809, 1.1077321674324723)
+
+
+
>>> morse_freq(3, np.array([10, 20, 30]))
+(array([1.49380158, 1.88207206, 2.15443469]),
+array([1.49421505, 1.88220264, 2.15450116]),
+array([1.49543843, 1.88259299, 2.15470024]))
+
+
+
>>> morse_freq(np.array([3, 4, 5]), np.array([10, 20, 30]))
+(array([1.49380158, 1.49534878, 1.43096908]),
+array([1.49421505, 1.49080278, 1.4262489 ]),
+array([1.49543843, 1.48652036, 1.42163583]))
+
+
+
>>> morse_freq(np.array([3, 4, 5]), 10)
+(array([1.49380158, 1.25743343, 1.14869835]),
+array([1.49421505, 1.25000964, 1.13759731]),
+array([1.49543843, 1.24350315, 1.12739747]))
+
+
+
+
+

See Also#

+

morse_wavelet(), morse_amplitude()

+
+
+ +
+
+clouddrift.wavelet.morse_logspace_freq(gamma: float, beta: float, length: int, highset: Tuple[float] | None = (0.1, 3.141592653589793), lowset: Tuple[float] | None = (5, 0), density: int | None = 4) ndarray[source]#
+

Compute logarithmically-spaced frequencies for generalized Morse wavelets +with parameters gamma and beta. This is a useful function to obtain the frequencies +needed for time-frequency analyses using wavelets. If radian_frequencies is the +output, np.log(radian_frequencies) is uniformly spaced, following convention +for wavelet analysis. See Lilly (2017), doi: 10.1098/rspa.2016.0776.

+

Default settings to compute the frequencies can be changed by passing optional +arguments lowset, highset, and density. See below.

+
+

Parameters#

+
+
gammafloat

Gamma parameter of the Morse wavelets.

+
+
betafloat

Beta parameter of the Morse wavelets.

+
+
lengthint

Length of the Morse wavelets and input signals.

+
+
highsettuple of floats, optional.

Tuple of values (eta, high) used for high-frequency cutoff calculation. The highest +frequency is set to be the minimum of a specified value and a cutoff frequency +based on a Nyquist overlap condition: the highest frequency is the minimum of +the specified value high, and the largest frequency for which the wavelet will +satisfy the threshold level eta. Here eta be a number between zero and one +specifying the ratio of a frequency-domain wavelet at the Nyquist frequency +to its peak value. Default is (eta, high) = (0.1, np.pi).

+
+
lowsettuple of floats, optional.

Tupe of values (P, low) set used for low-frequency cutoff calculation based on an +endpoint overlap condition. The lowest frequency is set such that the lowest-frequency +wavelet will reach some number P, called the packing number, times its central window +width at the ends of the time series. A choice of P=1 corresponds to roughly 95% of +the time-domain wavelet energy being contained within the time series endpoints for +a wavelet at the center of the domain. The second value of the tuple is the absolute +lowest frequency. Default is (P, low) = (5, 0).

+
+
densityint, optional

This optional argument controls the number of points in the returned frequency +array. Higher values of density mean more overlap in the frequency +domain between transforms. When density=1, the peak of one wavelet is located at the +half-power points of the adjacent wavelet. The default density=4 means +that four other wavelets will occur between the peak of one wavelet and +its half-power point.

+
+
+
+
+

Returns#

+
+
radian_frequencynp.ndarray

Logarithmically-spaced frequencies in radians cycles per unit time, +sorted in descending order.

+
+
+
+
+

Examples#

+

Generate a frequency array for the generalized Morse wavelet +with parameters gamma=3 and beta=5 for a time series of length n=1024:

+
>>> radian_frequency = morse_logspace_freq(3, 5, 1024)
+>>> radian_frequency = morse_logspace_freq(3, 5, 1024, highset=(0.2, np.pi), lowset=(5, 0))
+>>> radian_frequency = morse_logspace_freq(3, 5, 1024, highset=(0.2, np.pi), lowset=(5, 0), density=10)
+
+
+
+
+

See Also#

+

morse_wavelet(), morse_freq(), morse_properties()

+
+
+ +
+
+clouddrift.wavelet.morse_properties(gamma: ndarray | float, beta: ndarray | float) Tuple[ndarray] | Tuple[float][source]#
+

Calculate the properties of the demodulated generalized Morse wavelets. +See Lilly and Olhede (2009), doi: 10.1109/TSP.2008.2007607.

+
+

Parameters#

+
+
gammanp.ndarray or float

Gamma parameter of the wavelets.

+
+
betanp.ndarray or float

Beta parameter of the wavelets.

+
+
+
+
+

Returns#

+
+
widthnp.ndarray or float

Dimensionless time-domain window width of the wavelets.

+
+
skewnp.ndarray or float

Imaginary part of normalized third moment of the time-domain demodulate, +or ‘demodulate skewness’.

+
+
kurtnp.ndarray or float

Normalized fourth moment of the time-domain demodulate, +or ‘demodulate kurtosis’.

+
+
+
+
+

Examples#

+

TODO

+
+
+

See Also#

+

morse_wavelet(), morse_freq(), morse_amplitude(), morse_logspace_freq().

+
+
+ +
+
+clouddrift.wavelet.morse_wavelet(length: int, gamma: float, beta: float, radian_frequency: ndarray, order: int | None = 1, normalization: str | None = 'bandpass') Tuple[ndarray, ndarray][source]#
+

Compute the generalized Morse wavelets of Olhede and Walden (2002), doi: 10.1109/TSP.2002.804066.

+
+

Parameters#

+
+
lengthint

Length of the wavelets.

+
+
gammafloat

Gamma parameter of the wavelets.

+
+
betafloat

Beta parameter of the wavelets.

+
+
radian_frequencynp.ndarray

The radian frequencies at which the Fourier transform of the wavelets +reach their maximum amplitudes. radian_frequency is between 0 and 2 * np.pi * 0.5, +the normalized Nyquist radian frequency.

+
+
orderint, optional

Order of wavelets, default is 1.

+
+
normalizationstr, optional

Normalization for the wavelet output. By default it is assumed to be "bandpass" +which uses a bandpass normalization, meaning that the FFT of the wavelets +have peak value of 2 for all central frequencies radian_frequency. The other option is +"energy"``which uses the unit energy normalization. In this last case, the time-domain wavelet +energies ``np.sum(np.abs(wave)**2) are always unity.

+
+
+
+
+

Returns#

+
+
waveletnp.ndarray

Time-domain wavelets with shape (order, radian_frequency, length).

+
+
wavelet_fft: np.ndarray

Frequency-domain wavelets with shape (order, radian_frequency, length).

+
+
+
+
+

Examples#

+

Compute a Morse wavelet with gamma parameter 3, beta parameter 4, at radian +frequency 0.2 cycles per unit time:

+
>>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]))
+>>> np.shape(wavelet)
+(1, 1, 1024)
+
+
+

Compute a suite of Morse wavelets with gamma parameter 3, beta parameter 4, up to order 3, +at radian frequencies 0.2 and 0.3 cycles per unit time:

+
>>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2, 2*np.pi*0.3]), order=3)
+>>> np.shape(wavelet)
+(3, 2, 1024)
+
+
+

Compute a Morse wavelet specifying an energy normalization : +>>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]), normalization=”energy”)

+
+
+

Raises#

+
+
ValueError

If normalization optional argument is not in [“bandpass”, “energy”]``.

+
+
+
+
+

See Also#

+

wavelet_transform(), morse_wavelet_transform(), morse_freq(), morse_logspace_freq(), morse_amplitude(), morse_properties()

+
+
+ +
+
+clouddrift.wavelet.morse_wavelet_transform(x: ndarray, gamma: float, beta: float, radian_frequency: ndarray, complex: bool | None = False, order: int | None = 1, normalization: str | None = 'bandpass', boundary: str | None = 'mirror', time_axis: int | None = -1) Tuple[ndarray] | ndarray[source]#
+

Apply a continuous wavelet transform to an input signal using the generalized Morse +wavelets of Olhede and Walden (2002). The wavelet transform is normalized differently +for complex-valued input than for real-valued input, and this in turns depends on whether the +optional argument normalization is set to "bandpass" or "energy" normalizations.

+
+

Parameters#

+
+
xnp.ndarray

Real- or complex-valued signals. The time axis is assumed to be the last. If not, specify optional +argument time_axis.

+
+
gammafloat

Gamma parameter of the Morse wavelets.

+
+
betafloat

Beta parameter of the Morse wavelets.

+
+
radian_frequencynp.ndarray

An array of radian frequencies at which the Fourier transform of the wavelets +reach their maximum amplitudes. radian_frequency is typically between 0 and 2 * np.pi * 0.5, +the normalized Nyquist radian frequency.

+
+
complexboolean, optional

Specify explicitely if the input signal x is a complex signal. Default is False which +means that the input is real but that is not explicitely tested by the function. +This choice affects the normalization of the outputs and their interpretation. +See examples below.

+
+
time_axisint, optional

Axis on which the time is defined for input x (default is last, or -1).

+
+
normalizationstr, optional

Normalization for the wavelet transforms. By default it is assumed to be +"bandpass" which uses a bandpass normalization, meaning that the FFT +of the wavelets have peak value of 2 for all central frequencies +radian_frequency. However, if the optional argument complex=True +is specified, the wavelets will be divided by 2 so that the total +variance of the input complex signal is equal to the sum of the +variances of the returned analytic (positive) and conjugate analytic +(negative) parts. See examples below. The other option is "energy" +which uses the unit energy normalization. In this last case, the +time-domain wavelet energies np.sum(np.abs(wave)**2) are always +unity.

+
+
boundarystr, optional

The boundary condition to be imposed at the edges of the input signal x. +Allowed values are "mirror", "zeros", and "periodic". Default is "mirror".

+
+
orderint, optional

Order of Morse wavelets, default is 1.

+
+
+
+
+

Returns#

+

If the input signal is real as specificied by complex=False:

+
+
wtxnp.ndarray

Time-domain wavelet transform of input x with shape ((x shape without time_axis), orders, frequencies, time_axis) +but with dimensions of length 1 removed (squeezed).

+
+
+

If the input signal is complex as specificied by complex=True, a tuple is returned:

+
+
wtx_pnp.array

Time-domain positive wavelet transform of input x with shape ((x shape without time_axis), frequencies, orders), +but with dimensions of length 1 removed (squeezed).

+
+
wtx_nnp.array

Time-domain negative wavelet transform of input x with shape ((x shape without time_axis), frequencies, orders), +but with dimensions of length 1 removed (squeezed).

+
+
+
+
+

Examples#

+

Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta parameter 4, +at radian frequency 0.2 cycles per unit time:

+
>>> x = np.random.random(1024)
+>>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]))
+
+
+

Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta parameter 4, +for a complex input signal at radian frequency 0.2 cycles per unit time. This case returns the +analytic and conjugate analytic components:

+
>>> z = np.random.random(1024) + 1j*np.random.random(1024)
+>>> wtz_p, wtz_n = morse_wavelet_transform(z, 3, 4, np.array([2*np.pi*0.2]), complex=True)
+
+
+

The same result as above can be otained by applying the Morse transform on the real and imaginary +component of z and recombining the results as follows for the “bandpass” normalization: +>>> wtz_real = morse_wavelet_transform(np.real(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_imag = morse_wavelet_transform(np.imag(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_p, wtz_n = (wtz_real + 1j*wtz_imag) / 2, (wtz_real - 1j*wtz_imag) / 2

+

For the “energy” normalization, the analytic and conjugate analytic components are obtained as follows +with this alternative method: +>>> wtz_real = morse_wavelet_transform(np.real(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_imag = morse_wavelet_transform(np.imag(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_p, wtz_n = (wtz_real + 1j*wtz_imag) / np.sqrt(2), (wtz_real - 1j*wtz_imag) / np.sqrt(2)

+

The input signal can have an arbitrary number of dimensions but its time_axis must be +specified if it is not the last:

+
>>> x = np.random.random((1024,10,15))
+>>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]), time_axis=0)
+
+
+

The default way to handle the boundary conditions is to mirror the ends points +but this can be changed by specifying the chosen boundary method:

+
>>> x = np.random.random((10,15,1024))
+>>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]), boundary="periodic")
+
+
+

This function can be used to conduct a time-frequency analysis of the input signal by specifying +a range of randian frequencies using the morse_logspace_freq function as an example:

+
>>> x = np.random.random(1024)
+>>> gamma = 3
+>>> beta = 4
+>>> radian_frequency = morse_logspace_freq(gamma, beta, np.shape(x)[0])
+>>> wtx = morse_wavelet_transform(x, gamma, beta, radian_frequency)
+
+
+
+
+

Raises#

+
+
ValueError

If the time axis is outside of the valid range ([-1, np.ndim(x)-1]). +If boundary optional argument is not in [“mirror”, “zeros”, “periodic”]``. +If normalization optional argument is not in [“bandpass”, “energy”]``.

+
+
+
+
+

See Also#

+

morse_wavelet(), wavelet_transform(), morse_logspace_freq()

+
+
+ +
+
+clouddrift.wavelet.wavelet_transform(x: ndarray, wavelet: ndarray, boundary: str | None = 'mirror', time_axis: int | None = -1, freq_axis: int | None = -2, order_axis: int | None = -3) ndarray[source]#
+

Apply a continuous wavelet transform to an input signal using an input wavelet +function. Such wavelet can be provided by the function morse_wavelet.

+
+

Parameters#

+
+
xnp.ndarray

Real- or complex-valued signals.

+
+
waveletnp.ndarray

A suite of time-domain wavelets, typically returned by the function morse_wavelet. +The length of the time axis of the wavelets must be the last one and matches the +length of the time axis of x. The other dimensions (axes) of the wavelets (such as orders and frequencies) are +typically organized as orders, frequencies, and time, unless specified by optional arguments freq_axis and order_axis. +The normalization of the wavelets is assumed to be “bandpass”, if not, use kwarg normalization=”energy”, see morse_wavelet.

+
+
boundarystr, optional

The boundary condition to be imposed at the edges of the input signal x. +Allowed values are "mirror", "zeros", and "periodic". Default is "mirror".

+
+
time_axisint, optional

Axis on which the time is defined for input x (default is last, or -1). Note that the time axis of the +wavelets must be last.

+
+
freq_axisint, optional

Axis of wavelet for the frequencies (default is second or 1)

+
+
order_axisint, optional

Axis of wavelet for the orders (default is first or 0)

+
+
+
+
+

Returns#

+
+
wtxnp.ndarray

Time-domain wavelet transform of x with shape ((x shape without time_axis), orders, frequencies, time_axis) +but with dimensions of length 1 removed (squeezed).

+
+
+
+
+

Examples#

+

Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta +parameter 4, at radian frequency 0.2 cycles per unit time:

+
>>> x = np.random.random(1024)
+>>> wavelet, _ = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]))
+>>> wtx = wavelet_transform(x, wavelet)
+
+
+

The input signal can have an arbitrary number of dimensions but its +time_axis must be specified if it is not the last:

+
>>> x = np.random.random((1024,10,15))
+>>> wavelet, _ = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]))
+>>> wtx = wavelet_transform(x, wavelet,time_axis=0)
+
+
+
+
+

Raises#

+
+
ValueError

If the time axis is outside of the valid range ([-1, N-1]). +If the shape of time axis is different for input signal and wavelet. +If boundary optional argument is not in [“mirror”, “zeros”, “periodic”]``.

+
+
+
+
+

See Also#

+

morse_wavelet(), morse_wavelet_transform(), morse_freq()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + + + + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.morse_amplitude.html b/_autosummary/clouddrift.wavelet.morse_amplitude.html new file mode 100644 index 00000000..1bd7f539 --- /dev/null +++ b/_autosummary/clouddrift.wavelet.morse_amplitude.html @@ -0,0 +1,605 @@ + + + + + + + + + + + clouddrift.wavelet.morse_amplitude — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.morse_amplitude

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.morse_amplitude#

+
+
+clouddrift.wavelet.morse_amplitude(gamma: ndarray | float, beta: ndarray | float, order: int64 | None = 1, normalization: str | None = 'bandpass') float[source]#
+

Calculate the amplitude coefficient of the generalized Morse wavelets. +By default, the amplitude is calculated such that the maximum of the +frequency-domain wavelet is equal to 2, which is the bandpass normalization. +Optionally, specify normalization="energy" in order to return the coefficient +giving the wavelets unit energies. See Lilly and Olhede (2009), doi doi: 10.1109/TSP.2008.2007607.

+
+

Parameters#

+
+
gammanp.ndarray or float

Gamma parameter of the wavelets.

+
+
betanp.ndarray or float

Beta parameter of the wavelets.

+
+
orderint, optional

Order of wavelets, default is 1.

+
+
normalizationstr, optional

Normalization for the wavelets. By default it is assumed to be "bandpass" +which uses a bandpass normalization, meaning that the FFT of the wavelets +have peak value of 2 for all central frequencies radian_frequency. The other option is "energy" +which uses the unit energy normalization. In this last case the time-domain wavelet +energies np.sum(np.abs(wave)**2) are always unity.

+
+
+
+
+

Returns#

+
+
ampnp.ndarray or float

The amplitude coefficient of the wavelets.

+
+
+
+
+

Examples#

+

TODO

+
+
+

See Also#

+

morse_wavelet(), morse_freq(), morse_properties(), morse_logspace_freq().

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.morse_freq.html b/_autosummary/clouddrift.wavelet.morse_freq.html new file mode 100644 index 00000000..6222d6d5 --- /dev/null +++ b/_autosummary/clouddrift.wavelet.morse_freq.html @@ -0,0 +1,629 @@ + + + + + + + + + + + clouddrift.wavelet.morse_freq — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.morse_freq

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.morse_freq#

+
+
+clouddrift.wavelet.morse_freq(gamma: ndarray | float, beta: ndarray | float) Tuple[ndarray] | Tuple[float][source]#
+

Frequency measures for generalized Morse wavelets. This functions calculates +three different measures fm, fe, and fi of the frequency of the lowest-order generalized Morse +wavelet specified by parameters gamma and beta.

+

Note that all frequency quantities here are in radian as in cos(f t) and not +cyclic as in np.cos(2 np.pi f t).

+

For beta=0, the corresponding wavelet becomes an analytic lowpass filter, and fm +is not defined in the usual way but as the point at which the filter has decayed +to one-half of its peak power.

+

For details see Lilly and Olhede (2009), doi: 10.1109/TSP.2008.2007607.

+
+

Parameters#

+
+
gammanp.ndarray or float

Gamma parameter of the wavelets.

+
+
betanp.ndarray or float

Beta parameter of the wavelets.

+
+
+
+
+

Returns#

+
+
fmnp.ndarray

The modal or peak frequency.

+
+
fenp.ndarray

The energy frequency.

+
+
finp.ndarray

The instantaneous frequency at the wavelets’ centers.

+
+
+
+
+

Examples#

+
>>> fm, fe, fi = morse_freq(3, 4)
+
+
+
>>> morse_freq(3, 4)
+(array(1.10064242), 1.1025129235952809, 1.1077321674324723)
+
+
+
>>> morse_freq(3, np.array([10, 20, 30]))
+(array([1.49380158, 1.88207206, 2.15443469]),
+array([1.49421505, 1.88220264, 2.15450116]),
+array([1.49543843, 1.88259299, 2.15470024]))
+
+
+
>>> morse_freq(np.array([3, 4, 5]), np.array([10, 20, 30]))
+(array([1.49380158, 1.49534878, 1.43096908]),
+array([1.49421505, 1.49080278, 1.4262489 ]),
+array([1.49543843, 1.48652036, 1.42163583]))
+
+
+
>>> morse_freq(np.array([3, 4, 5]), 10)
+(array([1.49380158, 1.25743343, 1.14869835]),
+array([1.49421505, 1.25000964, 1.13759731]),
+array([1.49543843, 1.24350315, 1.12739747]))
+
+
+
+
+

See Also#

+

morse_wavelet(), morse_amplitude()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.morse_logspace_freq.html b/_autosummary/clouddrift.wavelet.morse_logspace_freq.html new file mode 100644 index 00000000..5cfcceb5 --- /dev/null +++ b/_autosummary/clouddrift.wavelet.morse_logspace_freq.html @@ -0,0 +1,631 @@ + + + + + + + + + + + clouddrift.wavelet.morse_logspace_freq — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.morse_logspace_freq

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.morse_logspace_freq#

+
+
+clouddrift.wavelet.morse_logspace_freq(gamma: float, beta: float, length: int, highset: Tuple[float] | None = (0.1, 3.141592653589793), lowset: Tuple[float] | None = (5, 0), density: int | None = 4) ndarray[source]#
+

Compute logarithmically-spaced frequencies for generalized Morse wavelets +with parameters gamma and beta. This is a useful function to obtain the frequencies +needed for time-frequency analyses using wavelets. If radian_frequencies is the +output, np.log(radian_frequencies) is uniformly spaced, following convention +for wavelet analysis. See Lilly (2017), doi: 10.1098/rspa.2016.0776.

+

Default settings to compute the frequencies can be changed by passing optional +arguments lowset, highset, and density. See below.

+
+

Parameters#

+
+
gammafloat

Gamma parameter of the Morse wavelets.

+
+
betafloat

Beta parameter of the Morse wavelets.

+
+
lengthint

Length of the Morse wavelets and input signals.

+
+
highsettuple of floats, optional.

Tuple of values (eta, high) used for high-frequency cutoff calculation. The highest +frequency is set to be the minimum of a specified value and a cutoff frequency +based on a Nyquist overlap condition: the highest frequency is the minimum of +the specified value high, and the largest frequency for which the wavelet will +satisfy the threshold level eta. Here eta be a number between zero and one +specifying the ratio of a frequency-domain wavelet at the Nyquist frequency +to its peak value. Default is (eta, high) = (0.1, np.pi).

+
+
lowsettuple of floats, optional.

Tupe of values (P, low) set used for low-frequency cutoff calculation based on an +endpoint overlap condition. The lowest frequency is set such that the lowest-frequency +wavelet will reach some number P, called the packing number, times its central window +width at the ends of the time series. A choice of P=1 corresponds to roughly 95% of +the time-domain wavelet energy being contained within the time series endpoints for +a wavelet at the center of the domain. The second value of the tuple is the absolute +lowest frequency. Default is (P, low) = (5, 0).

+
+
densityint, optional

This optional argument controls the number of points in the returned frequency +array. Higher values of density mean more overlap in the frequency +domain between transforms. When density=1, the peak of one wavelet is located at the +half-power points of the adjacent wavelet. The default density=4 means +that four other wavelets will occur between the peak of one wavelet and +its half-power point.

+
+
+
+
+

Returns#

+
+
radian_frequencynp.ndarray

Logarithmically-spaced frequencies in radians cycles per unit time, +sorted in descending order.

+
+
+
+
+

Examples#

+

Generate a frequency array for the generalized Morse wavelet +with parameters gamma=3 and beta=5 for a time series of length n=1024:

+
>>> radian_frequency = morse_logspace_freq(3, 5, 1024)
+>>> radian_frequency = morse_logspace_freq(3, 5, 1024, highset=(0.2, np.pi), lowset=(5, 0))
+>>> radian_frequency = morse_logspace_freq(3, 5, 1024, highset=(0.2, np.pi), lowset=(5, 0), density=10)
+
+
+
+
+

See Also#

+

morse_wavelet(), morse_freq(), morse_properties()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.morse_properties.html b/_autosummary/clouddrift.wavelet.morse_properties.html new file mode 100644 index 00000000..4cfb1ade --- /dev/null +++ b/_autosummary/clouddrift.wavelet.morse_properties.html @@ -0,0 +1,600 @@ + + + + + + + + + + + clouddrift.wavelet.morse_properties — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.morse_properties

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.morse_properties#

+
+
+clouddrift.wavelet.morse_properties(gamma: ndarray | float, beta: ndarray | float) Tuple[ndarray] | Tuple[float][source]#
+

Calculate the properties of the demodulated generalized Morse wavelets. +See Lilly and Olhede (2009), doi: 10.1109/TSP.2008.2007607.

+
+

Parameters#

+
+
gammanp.ndarray or float

Gamma parameter of the wavelets.

+
+
betanp.ndarray or float

Beta parameter of the wavelets.

+
+
+
+
+

Returns#

+
+
widthnp.ndarray or float

Dimensionless time-domain window width of the wavelets.

+
+
skewnp.ndarray or float

Imaginary part of normalized third moment of the time-domain demodulate, +or ‘demodulate skewness’.

+
+
kurtnp.ndarray or float

Normalized fourth moment of the time-domain demodulate, +or ‘demodulate kurtosis’.

+
+
+
+
+

Examples#

+

TODO

+
+
+

See Also#

+

morse_wavelet(), morse_freq(), morse_amplitude(), morse_logspace_freq().

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.morse_wavelet.html b/_autosummary/clouddrift.wavelet.morse_wavelet.html new file mode 100644 index 00000000..a8a34964 --- /dev/null +++ b/_autosummary/clouddrift.wavelet.morse_wavelet.html @@ -0,0 +1,631 @@ + + + + + + + + + + + clouddrift.wavelet.morse_wavelet — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.morse_wavelet

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.morse_wavelet#

+
+
+clouddrift.wavelet.morse_wavelet(length: int, gamma: float, beta: float, radian_frequency: ndarray, order: int | None = 1, normalization: str | None = 'bandpass') Tuple[ndarray, ndarray][source]#
+

Compute the generalized Morse wavelets of Olhede and Walden (2002), doi: 10.1109/TSP.2002.804066.

+
+

Parameters#

+
+
lengthint

Length of the wavelets.

+
+
gammafloat

Gamma parameter of the wavelets.

+
+
betafloat

Beta parameter of the wavelets.

+
+
radian_frequencynp.ndarray

The radian frequencies at which the Fourier transform of the wavelets +reach their maximum amplitudes. radian_frequency is between 0 and 2 * np.pi * 0.5, +the normalized Nyquist radian frequency.

+
+
orderint, optional

Order of wavelets, default is 1.

+
+
normalizationstr, optional

Normalization for the wavelet output. By default it is assumed to be "bandpass" +which uses a bandpass normalization, meaning that the FFT of the wavelets +have peak value of 2 for all central frequencies radian_frequency. The other option is +"energy"``which uses the unit energy normalization. In this last case, the time-domain wavelet +energies ``np.sum(np.abs(wave)**2) are always unity.

+
+
+
+
+

Returns#

+
+
waveletnp.ndarray

Time-domain wavelets with shape (order, radian_frequency, length).

+
+
wavelet_fft: np.ndarray

Frequency-domain wavelets with shape (order, radian_frequency, length).

+
+
+
+
+

Examples#

+

Compute a Morse wavelet with gamma parameter 3, beta parameter 4, at radian +frequency 0.2 cycles per unit time:

+
>>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]))
+>>> np.shape(wavelet)
+(1, 1, 1024)
+
+
+

Compute a suite of Morse wavelets with gamma parameter 3, beta parameter 4, up to order 3, +at radian frequencies 0.2 and 0.3 cycles per unit time:

+
>>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2, 2*np.pi*0.3]), order=3)
+>>> np.shape(wavelet)
+(3, 2, 1024)
+
+
+

Compute a Morse wavelet specifying an energy normalization : +>>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]), normalization=”energy”)

+
+
+

Raises#

+
+
ValueError

If normalization optional argument is not in [“bandpass”, “energy”]``.

+
+
+
+
+

See Also#

+

wavelet_transform(), morse_wavelet_transform(), morse_freq(), morse_logspace_freq(), morse_amplitude(), morse_properties()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.morse_wavelet_transform.html b/_autosummary/clouddrift.wavelet.morse_wavelet_transform.html new file mode 100644 index 00000000..a57d9657 --- /dev/null +++ b/_autosummary/clouddrift.wavelet.morse_wavelet_transform.html @@ -0,0 +1,690 @@ + + + + + + + + + + + clouddrift.wavelet.morse_wavelet_transform — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.morse_wavelet_transform

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.morse_wavelet_transform#

+
+
+clouddrift.wavelet.morse_wavelet_transform(x: ndarray, gamma: float, beta: float, radian_frequency: ndarray, complex: bool | None = False, order: int | None = 1, normalization: str | None = 'bandpass', boundary: str | None = 'mirror', time_axis: int | None = -1) Tuple[ndarray] | ndarray[source]#
+

Apply a continuous wavelet transform to an input signal using the generalized Morse +wavelets of Olhede and Walden (2002). The wavelet transform is normalized differently +for complex-valued input than for real-valued input, and this in turns depends on whether the +optional argument normalization is set to "bandpass" or "energy" normalizations.

+
+

Parameters#

+
+
xnp.ndarray

Real- or complex-valued signals. The time axis is assumed to be the last. If not, specify optional +argument time_axis.

+
+
gammafloat

Gamma parameter of the Morse wavelets.

+
+
betafloat

Beta parameter of the Morse wavelets.

+
+
radian_frequencynp.ndarray

An array of radian frequencies at which the Fourier transform of the wavelets +reach their maximum amplitudes. radian_frequency is typically between 0 and 2 * np.pi * 0.5, +the normalized Nyquist radian frequency.

+
+
complexboolean, optional

Specify explicitely if the input signal x is a complex signal. Default is False which +means that the input is real but that is not explicitely tested by the function. +This choice affects the normalization of the outputs and their interpretation. +See examples below.

+
+
time_axisint, optional

Axis on which the time is defined for input x (default is last, or -1).

+
+
normalizationstr, optional

Normalization for the wavelet transforms. By default it is assumed to be +"bandpass" which uses a bandpass normalization, meaning that the FFT +of the wavelets have peak value of 2 for all central frequencies +radian_frequency. However, if the optional argument complex=True +is specified, the wavelets will be divided by 2 so that the total +variance of the input complex signal is equal to the sum of the +variances of the returned analytic (positive) and conjugate analytic +(negative) parts. See examples below. The other option is "energy" +which uses the unit energy normalization. In this last case, the +time-domain wavelet energies np.sum(np.abs(wave)**2) are always +unity.

+
+
boundarystr, optional

The boundary condition to be imposed at the edges of the input signal x. +Allowed values are "mirror", "zeros", and "periodic". Default is "mirror".

+
+
orderint, optional

Order of Morse wavelets, default is 1.

+
+
+
+
+

Returns#

+

If the input signal is real as specificied by complex=False:

+
+
wtxnp.ndarray

Time-domain wavelet transform of input x with shape ((x shape without time_axis), orders, frequencies, time_axis) +but with dimensions of length 1 removed (squeezed).

+
+
+

If the input signal is complex as specificied by complex=True, a tuple is returned:

+
+
wtx_pnp.array

Time-domain positive wavelet transform of input x with shape ((x shape without time_axis), frequencies, orders), +but with dimensions of length 1 removed (squeezed).

+
+
wtx_nnp.array

Time-domain negative wavelet transform of input x with shape ((x shape without time_axis), frequencies, orders), +but with dimensions of length 1 removed (squeezed).

+
+
+
+
+

Examples#

+

Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta parameter 4, +at radian frequency 0.2 cycles per unit time:

+
>>> x = np.random.random(1024)
+>>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]))
+
+
+

Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta parameter 4, +for a complex input signal at radian frequency 0.2 cycles per unit time. This case returns the +analytic and conjugate analytic components:

+
>>> z = np.random.random(1024) + 1j*np.random.random(1024)
+>>> wtz_p, wtz_n = morse_wavelet_transform(z, 3, 4, np.array([2*np.pi*0.2]), complex=True)
+
+
+

The same result as above can be otained by applying the Morse transform on the real and imaginary +component of z and recombining the results as follows for the “bandpass” normalization: +>>> wtz_real = morse_wavelet_transform(np.real(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_imag = morse_wavelet_transform(np.imag(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_p, wtz_n = (wtz_real + 1j*wtz_imag) / 2, (wtz_real - 1j*wtz_imag) / 2

+

For the “energy” normalization, the analytic and conjugate analytic components are obtained as follows +with this alternative method: +>>> wtz_real = morse_wavelet_transform(np.real(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_imag = morse_wavelet_transform(np.imag(z)), 3, 4, np.array([2*np.pi*0.2])) +>>> wtz_p, wtz_n = (wtz_real + 1j*wtz_imag) / np.sqrt(2), (wtz_real - 1j*wtz_imag) / np.sqrt(2)

+

The input signal can have an arbitrary number of dimensions but its time_axis must be +specified if it is not the last:

+
>>> x = np.random.random((1024,10,15))
+>>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]), time_axis=0)
+
+
+

The default way to handle the boundary conditions is to mirror the ends points +but this can be changed by specifying the chosen boundary method:

+
>>> x = np.random.random((10,15,1024))
+>>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]), boundary="periodic")
+
+
+

This function can be used to conduct a time-frequency analysis of the input signal by specifying +a range of randian frequencies using the morse_logspace_freq function as an example:

+
>>> x = np.random.random(1024)
+>>> gamma = 3
+>>> beta = 4
+>>> radian_frequency = morse_logspace_freq(gamma, beta, np.shape(x)[0])
+>>> wtx = morse_wavelet_transform(x, gamma, beta, radian_frequency)
+
+
+
+
+

Raises#

+
+
ValueError

If the time axis is outside of the valid range ([-1, np.ndim(x)-1]). +If boundary optional argument is not in [“mirror”, “zeros”, “periodic”]``. +If normalization optional argument is not in [“bandpass”, “energy”]``.

+
+
+
+
+

See Also#

+

morse_wavelet(), wavelet_transform(), morse_logspace_freq()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_autosummary/clouddrift.wavelet.wavelet_transform.html b/_autosummary/clouddrift.wavelet.wavelet_transform.html new file mode 100644 index 00000000..d213c52d --- /dev/null +++ b/_autosummary/clouddrift.wavelet.wavelet_transform.html @@ -0,0 +1,621 @@ + + + + + + + + + + + clouddrift.wavelet.wavelet_transform — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

clouddrift.wavelet.wavelet_transform

+ +
+
+ +
+

Contents

+
+ +
+
+
+ + + + +
+ +
+

clouddrift.wavelet.wavelet_transform#

+
+
+clouddrift.wavelet.wavelet_transform(x: ndarray, wavelet: ndarray, boundary: str | None = 'mirror', time_axis: int | None = -1, freq_axis: int | None = -2, order_axis: int | None = -3) ndarray[source]#
+

Apply a continuous wavelet transform to an input signal using an input wavelet +function. Such wavelet can be provided by the function morse_wavelet.

+
+

Parameters#

+
+
xnp.ndarray

Real- or complex-valued signals.

+
+
waveletnp.ndarray

A suite of time-domain wavelets, typically returned by the function morse_wavelet. +The length of the time axis of the wavelets must be the last one and matches the +length of the time axis of x. The other dimensions (axes) of the wavelets (such as orders and frequencies) are +typically organized as orders, frequencies, and time, unless specified by optional arguments freq_axis and order_axis. +The normalization of the wavelets is assumed to be “bandpass”, if not, use kwarg normalization=”energy”, see morse_wavelet.

+
+
boundarystr, optional

The boundary condition to be imposed at the edges of the input signal x. +Allowed values are "mirror", "zeros", and "periodic". Default is "mirror".

+
+
time_axisint, optional

Axis on which the time is defined for input x (default is last, or -1). Note that the time axis of the +wavelets must be last.

+
+
freq_axisint, optional

Axis of wavelet for the frequencies (default is second or 1)

+
+
order_axisint, optional

Axis of wavelet for the orders (default is first or 0)

+
+
+
+
+

Returns#

+
+
wtxnp.ndarray

Time-domain wavelet transform of x with shape ((x shape without time_axis), orders, frequencies, time_axis) +but with dimensions of length 1 removed (squeezed).

+
+
+
+
+

Examples#

+

Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta +parameter 4, at radian frequency 0.2 cycles per unit time:

+
>>> x = np.random.random(1024)
+>>> wavelet, _ = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]))
+>>> wtx = wavelet_transform(x, wavelet)
+
+
+

The input signal can have an arbitrary number of dimensions but its +time_axis must be specified if it is not the last:

+
>>> x = np.random.random((1024,10,15))
+>>> wavelet, _ = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]))
+>>> wtx = wavelet_transform(x, wavelet,time_axis=0)
+
+
+
+
+

Raises#

+
+
ValueError

If the time axis is outside of the valid range ([-1, N-1]). +If the shape of time axis is different for input signal and wavelet. +If boundary optional argument is not in [“mirror”, “zeros”, “periodic”]``.

+
+
+
+
+

See Also#

+

morse_wavelet(), morse_wavelet_transform(), morse_freq()

+
+
+ +
+ + +
+ + + + + + + + +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_images/ragged_array.png b/_images/ragged_array.png new file mode 100644 index 00000000..e3e42bb3 Binary files /dev/null and b/_images/ragged_array.png differ diff --git a/_modules/clouddrift/adapters/gdp.html b/_modules/clouddrift/adapters/gdp.html new file mode 100644 index 00000000..ca8754f7 --- /dev/null +++ b/_modules/clouddrift/adapters/gdp.html @@ -0,0 +1,820 @@ + + + + + + + + + + clouddrift.adapters.gdp — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.adapters.gdp

+"""
+This module provides functions and metadata to convert the Global Drifter
+Program (GDP) data to a ``clouddrift.RaggedArray`` instance. The functions
+defined in this module are common to both hourly (``clouddrift.adapters.gdp1h``)
+and six-hourly (``clouddrift.adapters.gdp6h``) GDP modules.
+"""
+
+from clouddrift.adapters.utils import download_with_progress
+import numpy as np
+import os
+import pandas as pd
+import xarray as xr
+
+GDP_COORDS = [
+    "ids",
+    "time",
+]
+
+GDP_METADATA = [
+    "ID",
+    "rowsize",
+    "WMO",
+    "expno",
+    "deploy_date",
+    "deploy_lat",
+    "deploy_lon",
+    "start_date",
+    "start_lat",
+    "start_lon",
+    "end_date",
+    "end_lat",
+    "end_lon",
+    "drogue_lost_date",
+    "typedeath",
+    "typebuoy",
+    "location_type",
+    "DeployingShip",
+    "DeploymentStatus",
+    "BuoyTypeManufacturer",
+    "BuoyTypeSensorArray",
+    "CurrentProgram",
+    "PurchaserFunding",
+    "SensorUpgrade",
+    "Transmissions",
+    "DeployingCountry",
+    "DeploymentComments",
+    "ManufactureYear",
+    "ManufactureMonth",
+    "ManufactureSensorType",
+    "ManufactureVoltage",
+    "FloatDiameter",
+    "SubsfcFloatPresence",
+    "DrogueType",
+    "DrogueLength",
+    "DrogueBallast",
+    "DragAreaAboveDrogue",
+    "DragAreaOfDrogue",
+    "DragAreaRatio",
+    "DrogueCenterDepth",
+    "DrogueDetectSensor",
+]
+
+
+
+[docs] +def cast_float64_variables_to_float32( + ds: xr.Dataset, variables_to_skip: list[str] = ["time", "lat", "lon"] +) -> xr.Dataset: + """Cast all float64 variables except ``variables_to_skip`` to float32. + Extra precision from float64 is not needed and takes up memory and disk + space. + + Parameters + ---------- + ds : xr.Dataset + Dataset to modify + variables_to_skip : list[str] + List of variables to skip; default is ["time", "lat", "lon"]. + + Returns + ------- + ds : xr.Dataset + Modified dataset + """ + for var in ds.variables: + if var in variables_to_skip: + continue + if ds[var].dtype == "float64": + ds[var] = ds[var].astype("float32") + return ds
+ + + +
+[docs] +def parse_directory_file(filename: str) -> pd.DataFrame: + """Read a GDP directory file that contains metadata of drifter releases. + + Parameters + ---------- + filename : str + Name of the directory file to parse. + + Returns + ------- + df : pd.DataFrame + List of drifters from a single directory file as a pandas DataFrame. + """ + GDP_DIRECTORY_FILE_URL = "https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/" + df = pd.read_csv( + os.path.join(GDP_DIRECTORY_FILE_URL, filename), delimiter="\s+", header=None + ) + + # Combine the date and time columns to easily parse dates below. + df[4] += " " + df[5] + df[8] += " " + df[9] + df[12] += " " + df[13] + df = df.drop(columns=[5, 9, 13]) + df.columns = [ + "ID", + "WMO_number", + "program_number", + "buoys_type", + "Start_date", + "Start_lat", + "Start_lon", + "End_date", + "End_lat", + "End_lon", + "Drogue_off_date", + "death_code", + ] + for t in ["Start_date", "End_date", "Drogue_off_date"]: + df[t] = pd.to_datetime(df[t], format="%Y/%m/%d %H:%M", errors="coerce") + + return df
+ + + +
+[docs] +def get_gdp_metadata() -> pd.DataFrame: + """Download and parse GDP metadata and return it as a Pandas DataFrame. + + Returns + ------- + df : pd.DataFrame + Sorted list of drifters as a pandas DataFrame. + """ + directory_file_pattern = "dirfl_{low}_{high}.dat" + + dfs = [] + start = 1 + while True: + name = directory_file_pattern.format(low=start, high=start + 4999) + try: + dfs.append(parse_directory_file(name)) + start += 5000 + except: + break + + name = directory_file_pattern.format(low=start, high="current") + dfs.append(parse_directory_file(name)) + + df = pd.concat(dfs) + df.sort_values(["Start_date"], inplace=True, ignore_index=True) + return df
+ + + +
+[docs] +def order_by_date(df: pd.DataFrame, idx: list[int]) -> np.ndarray[int]: + """From the previously sorted DataFrame of directory files, return the + unique set of drifter IDs sorted by their start date (the date of the first + quality-controlled data point). + + Parameters + ---------- + idx : list + List of drifters to include in the ragged array + + Returns + ------- + idx : list + Unique set of drifter IDs sorted by their start date. + """ + return df.ID[np.where(np.in1d(df.ID, idx))[0]].values
+ + + +
+[docs] +def fetch_netcdf(url: str, file: str): + """Download and save the file from the given url, if not already downloaded. + + Parameters + ---------- + url : str + URL from which to download the file. + file : str + Name of the file to save. + """ + download_with_progress([(url, file)])
+ + + +
+[docs] +def decode_date(t): + """The date format is specified as 'seconds since 1970-01-01 00:00:00' but + the missing values are stored as -1e+34 which is not supported by the + default parsing mechanism in xarray. + + This function returns replaced the missing value by NaN and returns a + datetime instance. + + Parameters + ---------- + t : array + Array of time values + + Returns + ------- + out : datetime + Datetime instance with the missing value replaced by NaN + """ + nat_index = np.logical_or(np.isclose(t, -1e34), np.isnan(t)) + t[nat_index] = np.nan + return t
+ + + +
+[docs] +def fill_values(var, default=np.nan): + """Change fill values (-1e+34, inf, -inf) in var array to the value + specified by default. + + Parameters + ---------- + var : array + Array to fill + default : float + Default value to use for fill values + """ + missing_value = np.logical_or(np.isclose(var, -1e34), ~np.isfinite(var)) + if np.any(missing_value): + var[missing_value] = default + return var
+ + + +
+[docs] +def str_to_float(value: str, default: float = np.nan) -> float: + """Convert a string to float, while returning the value of default if the + string is not convertible to a float, or if it's a NaN. + + Parameters + ---------- + value : str + String to convert to float + default : float + Default value to return if the string is not convertible to float + + Returns + ------- + out : float + Float value of the string, or default if the string is not convertible to float. + """ + try: + fvalue = float(value) + if np.isnan(fvalue): + return default + else: + return fvalue + except ValueError: + return default
+ + + +
+[docs] +def cut_str(value: str, max_length: int) -> np.chararray: + """Cut a string to a specific length and return it as a numpy chararray. + + Parameters + ---------- + value : str + String to cut + max_length : int + Length of the output + + Returns + ------- + out : np.chararray + String with max_length characters + """ + charar = np.chararray(1, max_length) + charar[:max_length] = value + return charar
+ + + +
+[docs] +def drogue_presence(lost_time, time) -> bool: + """Create drogue status from the drogue lost time and the trajectory time. + + Parameters + ---------- + lost_time + Timestamp of the drogue loss (or NaT) + time + Observation time + + Returns + ------- + out : bool + True if drogues and False otherwise + """ + if pd.isnull(lost_time) or lost_time >= time[-1]: + return np.ones_like(time, dtype="bool") + else: + return time < lost_time
+ + + +
+[docs] +def rowsize(index: int, **kwargs) -> int: + try: + return xr.open_dataset( + os.path.join( + kwargs["tmp_path"], kwargs["filename_pattern"].format(id=index) + ), + decode_cf=False, + decode_times=False, + concat_characters=False, + decode_coords=False, + ).sizes["obs"] + except Exception as e: + print( + f"Error processing {os.path.join(kwargs['tmp_path'], kwargs['filename_pattern'].format(id=index))}" + ) + print(str(e)) + return 0
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/adapters/gdp1h.html b/_modules/clouddrift/adapters/gdp1h.html new file mode 100644 index 00000000..7109144b --- /dev/null +++ b/_modules/clouddrift/adapters/gdp1h.html @@ -0,0 +1,1091 @@ + + + + + + + + + + clouddrift.adapters.gdp1h — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.adapters.gdp1h

+"""
+This module provides functions and metadata that can be used to convert the
+hourly Global Drifter Program (GDP) data to a ``clouddrift.RaggedArray``
+instance.
+"""
+
+import clouddrift.adapters.gdp as gdp
+from clouddrift.raggedarray import RaggedArray
+from clouddrift.adapters.utils import download_with_progress
+from datetime import datetime, timedelta
+import numpy as np
+import urllib.request
+import re
+import tempfile
+from typing import Optional
+import os
+import warnings
+import xarray as xr
+
+GDP_VERSION = "2.01"
+
+GDP_DATA_URL = "https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/hourly_product/v2.01/"
+GDP_DATA_URL_EXPERIMENTAL = (
+    "https://www.aoml.noaa.gov/ftp/pub/phod/lumpkin/hourly/experimental/"
+)
+GDP_TMP_PATH = os.path.join(tempfile.gettempdir(), "clouddrift", "gdp")
+GDP_TMP_PATH_EXPERIMENTAL = os.path.join(tempfile.gettempdir(), "clouddrift", "gdp_exp")
+GDP_DATA = [
+    "lon",
+    "lat",
+    "ve",
+    "vn",
+    "err_lat",
+    "err_lon",
+    "err_ve",
+    "err_vn",
+    "gap",
+    "sst",
+    "sst1",
+    "sst2",
+    "err_sst",
+    "err_sst1",
+    "err_sst2",
+    "flg_sst",
+    "flg_sst1",
+    "flg_sst2",
+    "drogue_status",
+]
+
+
+
+[docs] +def download( + drifter_ids: list = None, + n_random_id: int = None, + url: str = GDP_DATA_URL, + tmp_path: str = None, +): + """Download individual NetCDF files from the AOML server. + + Parameters + ---------- + drifter_ids : list + List of drifter to retrieve (Default: all) + n_random_id : int + Randomly select n_random_id drifter IDs to download (Default: None) + url : str + URL from which to download the data (Default: GDP_DATA_URL). Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL. + tmp_path : str, optional + Path to the directory where the individual NetCDF files are stored + (default varies depending on operating system; /tmp/clouddrift/gdp on Linux) + + Returns + ------- + out : list + List of retrieved drifters + """ + + # adjust the tmp_path if using the experimental source + if tmp_path is None: + tmp_path = GDP_TMP_PATH if url == GDP_DATA_URL else GDP_TMP_PATH_EXPERIMENTAL + + print(f"Downloading GDP hourly data from {url} to {tmp_path}...") + + # Create a temporary directory if doesn't already exists. + os.makedirs(tmp_path, exist_ok=True) + + if url == GDP_DATA_URL: + pattern = "drifter_hourly_[0-9]*.nc" + filename_pattern = "drifter_hourly_{id}.nc" + elif url == GDP_DATA_URL_EXPERIMENTAL: + pattern = "drifter_hourly_[0-9]*.nc" + filename_pattern = "drifter_hourly_{id}.nc" + + # retrieve all drifter ID numbers + if drifter_ids is None: + urlpath = urllib.request.urlopen(url) + string = urlpath.read().decode("utf-8") + filelist = re.compile(pattern).findall(string) + drifter_ids = np.unique([int(f.split("_")[-1][:-3]) for f in filelist]) + + # retrieve only a subset of n_random_id trajectories + if n_random_id: + if n_random_id > len(drifter_ids): + warnings.warn( + f"Retrieving all listed trajectories because {n_random_id} is larger than the {len(drifter_ids)} listed trajectories." + ) + else: + rng = np.random.RandomState(42) + drifter_ids = sorted(rng.choice(drifter_ids, n_random_id, replace=False)) + + download_requests = [ + (os.path.join(url, file_name), os.path.join(tmp_path, file_name)) + for file_name in map(lambda d_id: filename_pattern.format(id=d_id), drifter_ids) + ] + download_with_progress(download_requests) + # Download the metadata so we can order the drifter IDs by end date. + gdp_metadata = gdp.get_gdp_metadata() + + return gdp.order_by_date(gdp_metadata, drifter_ids)
+ + + +
+[docs] +def preprocess(index: int, **kwargs) -> xr.Dataset: + """Extract and preprocess the Lagrangian data and attributes. + + This function takes an identification number that can be used to create a + file or url pattern or select data from a Dataframe. It then preprocesses + the data and returns a clean Xarray Dataset. + + Parameters + ---------- + index : int + Drifter's identification number + + Returns + ------- + ds : xr.Dataset + Xarray Dataset containing the data and attributes + """ + ds = xr.load_dataset( + os.path.join(kwargs["tmp_path"], kwargs["filename_pattern"].format(id=index)), + decode_times=False, + decode_coords=False, + ) + + # parse the date with custom function + ds["deploy_date"].data = gdp.decode_date(np.array([ds.deploy_date.data[0]])) + ds["end_date"].data = gdp.decode_date(np.array([ds.end_date.data[0]])) + ds["drogue_lost_date"].data = gdp.decode_date( + np.array([ds.drogue_lost_date.data[0]]) + ) + ds["time"].data = gdp.decode_date(np.array([ds.time.data[0]])) + + # convert fill values to nan + for var in [ + "err_lon", + "err_lat", + "err_ve", + "err_vn", + "sst", + "sst1", + "sst2", + "err_sst", + "err_sst1", + "err_sst2", + ]: + try: + ds[var].data = gdp.fill_values(ds[var].data) + except KeyError: + warnings.warn(f"Variable {var} not found; skipping.") + + # fix missing values stored as str + for var in [ + "longitude", + "latitude", + "err_lat", + "err_lon", + "ve", + "vn", + "err_ve", + "err_vn", + "sst", + "sst1", + "sst2", + ]: + try: + ds[var].encoding["missing value"] = -1e-34 + except KeyError: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + + # convert type of some variable + target_dtype = { + "ID": "int64", + "WMO": "int32", + "expno": "int32", + "typedeath": "int8", + "flg_sst": "int8", + "flg_sst1": "int8", + "flg_sst2": "int8", + } + + for var in target_dtype.keys(): + if var in ds.keys(): + ds[var].data = ds[var].data.astype(target_dtype[var]) + else: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + + # new variables + ds["ids"] = (["traj", "obs"], [np.repeat(ds.ID.values, ds.sizes["obs"])]) + ds["drogue_status"] = ( + ["traj", "obs"], + [gdp.drogue_presence(ds.drogue_lost_date.data, ds.time.data[0])], + ) + + # convert attributes to variable + ds["location_type"] = ( + ("traj"), + [False if ds.get("location_type") == "Argos" else True], + ) # 0 for Argos, 1 for GPS + ds["DeployingShip"] = (("traj"), gdp.cut_str(ds.DeployingShip, 20)) + ds["DeploymentStatus"] = (("traj"), gdp.cut_str(ds.DeploymentStatus, 20)) + ds["BuoyTypeManufacturer"] = (("traj"), gdp.cut_str(ds.BuoyTypeManufacturer, 20)) + ds["BuoyTypeSensorArray"] = (("traj"), gdp.cut_str(ds.BuoyTypeSensorArray, 20)) + ds["CurrentProgram"] = ( + ("traj"), + np.int32([gdp.str_to_float(ds.CurrentProgram, -1)]), + ) + ds["PurchaserFunding"] = (("traj"), gdp.cut_str(ds.PurchaserFunding, 20)) + ds["SensorUpgrade"] = (("traj"), gdp.cut_str(ds.SensorUpgrade, 20)) + ds["Transmissions"] = (("traj"), gdp.cut_str(ds.Transmissions, 20)) + ds["DeployingCountry"] = (("traj"), gdp.cut_str(ds.DeployingCountry, 20)) + ds["DeploymentComments"] = ( + ("traj"), + gdp.cut_str( + ds.DeploymentComments.encode("ascii", "ignore").decode("ascii"), 20 + ), + ) # remove non ascii char + ds["ManufactureYear"] = ( + ("traj"), + np.int16([gdp.str_to_float(ds.ManufactureYear, -1)]), + ) + ds["ManufactureMonth"] = ( + ("traj"), + np.int16([gdp.str_to_float(ds.ManufactureMonth, -1)]), + ) + ds["ManufactureSensorType"] = (("traj"), gdp.cut_str(ds.ManufactureSensorType, 20)) + ds["ManufactureVoltage"] = ( + ("traj"), + np.int16([gdp.str_to_float(ds.ManufactureVoltage[:-6], -1)]), + ) # e.g. 56 V + ds["FloatDiameter"] = ( + ("traj"), + [gdp.str_to_float(ds.FloatDiameter[:-3])], + ) # e.g. 35.5 cm + ds["SubsfcFloatPresence"] = ( + ("traj"), + np.array([gdp.str_to_float(ds.SubsfcFloatPresence)], dtype="bool"), + ) + ds["DrogueType"] = (("traj"), gdp.cut_str(ds.DrogueType, 7)) + ds["DrogueLength"] = ( + ("traj"), + [gdp.str_to_float(ds.DrogueLength[:-2])], + ) # e.g. 4.8 m + ds["DrogueBallast"] = ( + ("traj"), + [gdp.str_to_float(ds.DrogueBallast[:-3])], + ) # e.g. 1.4 kg + ds["DragAreaAboveDrogue"] = ( + ("traj"), + [gdp.str_to_float(ds.DragAreaAboveDrogue[:-4])], + ) # 10.66 m^2 + ds["DragAreaOfDrogue"] = ( + ("traj"), + [gdp.str_to_float(ds.DragAreaOfDrogue[:-4])], + ) # e.g. 416.6 m^2 + ds["DragAreaRatio"] = (("traj"), [gdp.str_to_float(ds.DragAreaRatio)]) # e.g. 39.08 + ds["DrogueCenterDepth"] = ( + ("traj"), + [gdp.str_to_float(ds.DrogueCenterDepth[:-2])], + ) # e.g. 20.0 m + ds["DrogueDetectSensor"] = (("traj"), gdp.cut_str(ds.DrogueDetectSensor, 20)) + + # vars attributes + vars_attrs = { + "ID": {"long_name": "Global Drifter Program Buoy ID", "units": "-"}, + "longitude": {"long_name": "Longitude", "units": "degrees_east"}, + "latitude": {"long_name": "Latitude", "units": "degrees_north"}, + "time": {"long_name": "Time", "units": "seconds since 1970-01-01 00:00:00"}, + "ids": { + "long_name": "Global Drifter Program Buoy ID repeated along observations", + "units": "-", + }, + "rowsize": { + "long_name": "Number of observations per trajectory", + "sample_dimension": "obs", + "units": "-", + }, + "location_type": { + "long_name": "Satellite-based location system", + "units": "-", + "comments": "0 (Argos), 1 (GPS)", + }, + "WMO": { + "long_name": "World Meteorological Organization buoy identification number", + "units": "-", + }, + "expno": {"long_name": "Experiment number", "units": "-"}, + "deploy_date": { + "long_name": "Deployment date and time", + "units": "seconds since 1970-01-01 00:00:00", + }, + "deploy_lon": {"long_name": "Deployment longitude", "units": "degrees_east"}, + "deploy_lat": {"long_name": "Deployment latitude", "units": "degrees_north"}, + "start_date": { + "long_name": "First good date and time derived by DAC quality control", + "units": "seconds since 1970-01-01 00:00:00", + }, + "start_lon": { + "long_name": "First good longitude derived by DAC quality control", + "units": "degrees_east", + }, + "start_lat": { + "long_name": "Last good latitude derived by DAC quality control", + "units": "degrees_north", + }, + "end_date": { + "long_name": "Last good date and time derived by DAC quality control", + "units": "seconds since 1970-01-01 00:00:00", + }, + "end_lon": { + "long_name": "Last good longitude derived by DAC quality control", + "units": "degrees_east", + }, + "end_lat": { + "long_name": "Last good latitude derived by DAC quality control", + "units": "degrees_north", + }, + "drogue_lost_date": { + "long_name": "Date and time of drogue loss", + "units": "seconds since 1970-01-01 00:00:00", + }, + "typedeath": { + "long_name": "Type of death", + "units": "-", + "comments": "0 (buoy still alive), 1 (buoy ran aground), 2 (picked up by vessel), 3 (stop transmitting), 4 (sporadic transmissions), 5 (bad batteries), 6 (inactive status)", + }, + "typebuoy": { + "long_name": "Buoy type (see https://www.aoml.noaa.gov/phod/dac/dirall.html)", + "units": "-", + }, + "DeployingShip": {"long_name": "Name of deployment ship", "units": "-"}, + "DeploymentStatus": {"long_name": "Deployment status", "units": "-"}, + "BuoyTypeManufacturer": {"long_name": "Buoy type manufacturer", "units": "-"}, + "BuoyTypeSensorArray": {"long_name": "Buoy type sensor array", "units": "-"}, + "CurrentProgram": { + "long_name": "Current Program", + "units": "-", + "_FillValue": "-1", + }, + "PurchaserFunding": {"long_name": "Purchaser funding", "units": "-"}, + "SensorUpgrade": {"long_name": "Sensor upgrade", "units": "-"}, + "Transmissions": {"long_name": "Transmissions", "units": "-"}, + "DeployingCountry": {"long_name": "Deploying country", "units": "-"}, + "DeploymentComments": {"long_name": "Deployment comments", "units": "-"}, + "ManufactureYear": { + "long_name": "Manufacture year", + "units": "-", + "_FillValue": "-1", + }, + "ManufactureMonth": { + "long_name": "Manufacture month", + "units": "-", + "_FillValue": "-1", + }, + "ManufactureSensorType": {"long_name": "Manufacture Sensor Type", "units": "-"}, + "ManufactureVoltage": { + "long_name": "Manufacture voltage", + "units": "V", + "_FillValue": "-1", + }, + "FloatDiameter": {"long_name": "Diameter of surface floater", "units": "cm"}, + "SubsfcFloatPresence": {"long_name": "Subsurface Float Presence", "units": "-"}, + "DrogueType": {"drogue_type": "Drogue Type", "units": "-"}, + "DrogueLength": {"long_name": "Length of drogue.", "units": "m"}, + "DrogueBallast": { + "long_name": "Weight of the drogue's ballast.", + "units": "kg", + }, + "DragAreaAboveDrogue": {"long_name": "Drag area above drogue.", "units": "m^2"}, + "DragAreaOfDrogue": {"long_name": "Drag area drogue.", "units": "m^2"}, + "DragAreaRatio": {"long_name": "Drag area ratio", "units": "m"}, + "DrogueCenterDepth": { + "long_name": "Average depth of the drogue.", + "units": "m", + }, + "DrogueDetectSensor": {"long_name": "Drogue detection sensor", "units": "-"}, + "ve": {"long_name": "Eastward velocity", "units": "m/s"}, + "vn": {"long_name": "Northward velocity", "units": "m/s"}, + "gap": { + "long_name": "Time interval between previous and next location", + "units": "s", + }, + "err_lat": { + "long_name": "95% confidence interval in latitude", + "units": "degrees_north", + }, + "err_lon": { + "long_name": "95% confidence interval in longitude", + "units": "degrees_east", + }, + "err_ve": { + "long_name": "95% confidence interval in eastward velocity", + "units": "m/s", + }, + "err_vn": { + "long_name": "95% confidence interval in northward velocity", + "units": "m/s", + }, + "drogue_status": { + "long_name": "Status indicating the presence of the drogue", + "units": "-", + "flag_values": "1,0", + "flag_meanings": "drogued, undrogued", + }, + "sst": { + "long_name": "Fitted sea water temperature", + "units": "Kelvin", + "comments": "Estimated near-surface sea water temperature from drifting buoy measurements. It is the sum of the fitted near-surface non-diurnal sea water temperature and fitted diurnal sea water temperature anomaly. Discrepancies may occur because of rounding.", + }, + "sst1": { + "long_name": "Fitted non-diurnal sea water temperature", + "units": "Kelvin", + "comments": "Estimated near-surface non-diurnal sea water temperature from drifting buoy measurements", + }, + "sst2": { + "long_name": "Fitted diurnal sea water temperature anomaly", + "units": "Kelvin", + "comments": "Estimated near-surface diurnal sea water temperature anomaly from drifting buoy measurements", + }, + "err_sst": { + "long_name": "Standard uncertainty of fitted sea water temperature", + "units": "Kelvin", + "comments": "Estimated one standard error of near-surface sea water temperature estimate from drifting buoy measurements", + }, + "err_sst1": { + "long_name": "Standard uncertainty of fitted non-diurnal sea water temperature", + "units": "Kelvin", + "comments": "Estimated one standard error of near-surface non-diurnal sea water temperature estimate from drifting buoy measurements", + }, + "err_sst2": { + "long_name": "Standard uncertainty of fitted diurnal sea water temperature anomaly", + "units": "Kelvin", + "comments": "Estimated one standard error of near-surface diurnal sea water temperature anomaly estimate from drifting buoy measurements", + }, + "flg_sst": { + "long_name": "Fitted sea water temperature quality flag", + "units": "-", + "flag_values": "0, 1, 2, 3, 4, 5", + "flag_meanings": "no-estimate, no-uncertainty-estimate, estimate-not-in-range-uncertainty-not-in-range, estimate-not-in-range-uncertainty-in-range estimate-in-range-uncertainty-not-in-range, estimate-in-range-uncertainty-in-range", + }, + "flg_sst1": { + "long_name": "Fitted non-diurnal sea water temperature quality flag", + "units": "-", + "flag_values": "0, 1, 2, 3, 4, 5", + "flag_meanings": "no-estimate, no-uncertainty-estimate, estimate-not-in-range-uncertainty-not-in-range, estimate-not-in-range-uncertainty-in-range estimate-in-range-uncertainty-not-in-range, estimate-in-range-uncertainty-in-range", + }, + "flg_sst2": { + "long_name": "Fitted diurnal sea water temperature anomaly quality flag", + "units": "-", + "flag_values": "0, 1, 2, 3, 4, 5", + "flag_meanings": "no-estimate, no-uncertainty-estimate, estimate-not-in-range-uncertainty-not-in-range, estimate-not-in-range-uncertainty-in-range estimate-in-range-uncertainty-not-in-range, estimate-in-range-uncertainty-in-range", + }, + } + + # global attributes + attrs = { + "title": "Global Drifter Program hourly drifting buoy collection", + "history": f"version {GDP_VERSION}. Metadata from dirall.dat and deplog.dat", + "Conventions": "CF-1.6", + "time_coverage_start": "", + "time_coverage_end": "", + "date_created": datetime.now().isoformat(), + "publisher_name": "GDP Drifter DAC", + "publisher_email": "aoml.dftr@noaa.gov", + "publisher_url": "https://www.aoml.noaa.gov/phod/gdp", + "license": "freely available", + "processing_level": "Level 2 QC by GDP drifter DAC", + "metadata_link": "https://www.aoml.noaa.gov/phod/dac/dirall.html", + "contributor_name": "NOAA Global Drifter Program", + "contributor_role": "Data Acquisition Center", + "institution": "NOAA Atlantic Oceanographic and Meteorological Laboratory", + "acknowledgement": "Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurioni, Luca; Pazos, Mayra (2022). Hourly location, current velocity, and temperature collected from Global Drifter Program drifters world-wide. [indicate subset used]. NOAA National Centers for Environmental Information. Dataset. https://doi.org/10.25921/x46c-3620. Accessed [date]. Elipot et al. (2022): A Dataset of Hourly Sea Surface Temperature From Drifting Buoys, Scientific Data, 9, 567, https://dx.doi.org/10.1038/s41597-022-01670-2. Elipot et al. (2016): A global surface drifter dataset at hourly resolution, J. Geophys. Res.-Oceans, 121, https://dx.doi.org/10.1002/2016JC011716.", + "summary": "Global Drifter Program hourly data", + "doi": "10.25921/x46c-3620", + } + + # set attributes + for var in vars_attrs.keys(): + if var in ds.keys(): + ds[var].attrs = vars_attrs[var] + else: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + ds.attrs = attrs + + # rename variables + ds = ds.rename_vars({"longitude": "lon", "latitude": "lat"}) + + # Cast float64 variables to float32 to reduce memory footprint. + ds = gdp.cast_float64_variables_to_float32(ds) + + return ds
+ + + +
+[docs] +def to_raggedarray( + drifter_ids: Optional[list[int]] = None, + n_random_id: Optional[int] = None, + url: Optional[str] = GDP_DATA_URL, + tmp_path: Optional[str] = None, +) -> RaggedArray: + """Download and process individual GDP hourly files and return a RaggedArray + instance with the data. + + Parameters + ---------- + drifter_ids : list[int], optional + List of drifters to retrieve (Default: all) + n_random_id : list[int], optional + Randomly select n_random_id drifter NetCDF files + url : str, optional + URL from which to download the data (Default: GDP_DATA_URL). + Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL. + tmp_path : str, optional + Path to the directory where the individual NetCDF files are stored + (default varies depending on operating system; /tmp/clouddrift/gdp on Linux) + + Returns + ------- + out : RaggedArray + A RaggedArray instance of the requested dataset + + Examples + -------- + + Invoke `to_raggedarray` without any arguments to download all drifter data + from the 2.01 GDP feed: + + >>> from clouddrift.adapters.gdp1h import to_raggedarray + >>> ra = to_raggedarray() + + To download a random sample of 100 drifters, for example for development + or testing, use the `n_random_id` argument: + + >>> ra = to_raggedarray(n_random_id=100) + + To download a specific list of drifters, use the `drifter_ids` argument: + + >>> ra = to_raggedarray(drifter_ids=[44136, 54680, 83463]) + + To download the experimental 2.01 GDP feed, use the `url` argument to + specify the experimental feed URL: + + >>> from clouddrift.adapters.gdp1h import GDP_DATA_URL_EXPERIMENTAL, to_raggedarray + >>> ra = to_raggedarray(url=GDP_DATA_URL_EXPERIMENTAL) + + Finally, `to_raggedarray` returns a `RaggedArray` instance which provides + a convenience method to emit a `xarray.Dataset` instance: + + >>> ds = ra.to_xarray() + + To write the ragged array dataset to a NetCDF file on disk, do + + >>> ds.to_netcdf("gdp1h.nc", format="NETCDF4") + + Alternatively, to write the ragged array to a Parquet file, first create + it as an Awkward Array: + + >>> arr = ra.to_awkward() + >>> arr.to_parquet("gdp1h.parquet") + """ + + # adjust the tmp_path if using the experimental source + if tmp_path is None: + tmp_path = GDP_TMP_PATH if url == GDP_DATA_URL else GDP_TMP_PATH_EXPERIMENTAL + + ids = download(drifter_ids, n_random_id, url, tmp_path) + + if url == GDP_DATA_URL: + filename_pattern = "drifter_hourly_{id}.nc" + elif url == GDP_DATA_URL_EXPERIMENTAL: + filename_pattern = "drifter_hourly_{id}.nc" + else: + raise ValueError(f"url must be {GDP_DATA_URL} or {GDP_DATA_URL_EXPERIMENTAL}.") + + ra = RaggedArray.from_files( + indices=ids, + preprocess_func=preprocess, + name_coords=gdp.GDP_COORDS, + name_meta=gdp.GDP_METADATA, + name_data=GDP_DATA, + rowsize_func=gdp.rowsize, + filename_pattern=filename_pattern, + tmp_path=tmp_path, + ) + + # set dynamic global attributes + ra.attrs_global[ + "time_coverage_start" + ] = f"{datetime(1970,1,1) + timedelta(seconds=int(np.min(ra.coords['time']))):%Y-%m-%d:%H:%M:%SZ}" + ra.attrs_global[ + "time_coverage_end" + ] = f"{datetime(1970,1,1) + timedelta(seconds=int(np.max(ra.coords['time']))):%Y-%m-%d:%H:%M:%SZ}" + + return ra
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/adapters/gdp6h.html b/_modules/clouddrift/adapters/gdp6h.html new file mode 100644 index 00000000..2b70dfc8 --- /dev/null +++ b/_modules/clouddrift/adapters/gdp6h.html @@ -0,0 +1,978 @@ + + + + + + + + + + clouddrift.adapters.gdp6h — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.adapters.gdp6h

+"""
+This module provides functions and metadata that can be used to convert the
+6-hourly Global Drifter Program (GDP) data to a ``clouddrift.RaggedArray``
+instance.
+"""
+
+import clouddrift.adapters.gdp as gdp
+from clouddrift.adapters.utils import download_with_progress
+from clouddrift.raggedarray import RaggedArray
+from datetime import datetime, timedelta
+import numpy as np
+import urllib.request
+import re
+import tempfile
+from typing import Optional
+import os
+import warnings
+import xarray as xr
+
+GDP_VERSION = "September 2023"
+
+GDP_DATA_URL = "https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/6h/"
+GDP_TMP_PATH = os.path.join(tempfile.gettempdir(), "clouddrift", "gdp6h")
+GDP_DATA = [
+    "lon",
+    "lat",
+    "ve",
+    "vn",
+    "temp",
+    "err_lat",
+    "err_lon",
+    "err_temp",
+    "drogue_status",
+]
+
+
+
+[docs] +def download( + drifter_ids: list = None, + n_random_id: int = None, + url: str = GDP_DATA_URL, + tmp_path: str = GDP_TMP_PATH, +): + """Download individual NetCDF files from the AOML server. + + Parameters + ---------- + drifter_ids : list + List of drifter to retrieve (Default: all) + n_random_id : int + Randomly select n_random_id drifter IDs to download (Default: None) + url : str + URL from which to download the data (Default: GDP_DATA_URL). Alternatively, it can be GDP_DATA_URL_EXPERIMENTAL. + tmp_path : str, optional + Path to the directory where the individual NetCDF files are stored + (default varies depending on operating system; /tmp/clouddrift/gdp6h on Linux) + + Returns + ------- + out : list + List of retrieved drifters + """ + + print(f"Downloading GDP 6-hourly data to {tmp_path}...") + + # Create a temporary directory if doesn't already exists. + os.makedirs(tmp_path, exist_ok=True) + + pattern = "drifter_6h_[0-9]*.nc" + directory_list = [ + "netcdf_1_5000", + "netcdf_5001_10000", + "netcdf_10001_15000", + "netcdf_15001_current", + ] + + # retrieve all drifter ID numbers + if drifter_ids is None: + urlpath = urllib.request.urlopen(url) + string = urlpath.read().decode("utf-8") + drifter_urls = [] + for dir in directory_list: + urlpath = urllib.request.urlopen(os.path.join(url, dir)) + string = urlpath.read().decode("utf-8") + filelist = list(set(re.compile(pattern).findall(string))) + drifter_urls += [os.path.join(url, dir, f) for f in filelist] + + # retrieve only a subset of n_random_id trajectories + if n_random_id: + if n_random_id > len(drifter_urls): + warnings.warn( + f"Retrieving all listed trajectories because {n_random_id} is larger than the {len(drifter_ids)} listed trajectories." + ) + else: + rng = np.random.RandomState(42) + drifter_urls = rng.choice(drifter_urls, n_random_id, replace=False) + + download_with_progress( + [(url, os.path.join(tmp_path, os.path.basename(url))) for url in drifter_urls] + ) + + # Download the metadata so we can order the drifter IDs by end date. + gdp_metadata = gdp.get_gdp_metadata() + drifter_ids = [ + int(os.path.basename(f).split("_")[2].split(".")[0]) for f in drifter_urls + ] + + return gdp.order_by_date(gdp_metadata, drifter_ids)
+ + + +
+[docs] +def preprocess(index: int, **kwargs) -> xr.Dataset: + """Extract and preprocess the Lagrangian data and attributes. + + This function takes an identification number that can be used to create a + file or url pattern or select data from a Dataframe. It then preprocesses + the data and returns a clean Xarray Dataset. + + Parameters + ---------- + index : int + Drifter's identification number + + Returns + ------- + ds : xr.Dataset + Xarray Dataset containing the data and attributes + """ + ds = xr.load_dataset( + os.path.join(kwargs["tmp_path"], kwargs["filename_pattern"].format(id=index)), + decode_times=False, + decode_coords=False, + ) + + # parse the date with custom function + ds["deploy_date"].data = gdp.decode_date(np.array([ds.deploy_date.data[0]])) + ds["end_date"].data = gdp.decode_date(np.array([ds.end_date.data[0]])) + ds["drogue_lost_date"].data = gdp.decode_date( + np.array([ds.drogue_lost_date.data[0]]) + ) + ds["time"].data = gdp.decode_date(np.array([ds.time.data[0]])) + + # convert fill values to nan + for var in [ + "err_lon", + "err_lat", + "temp", + "err_temp", + ]: + try: + ds[var].data = gdp.fill_values(ds[var].data) + except KeyError: + warnings.warn(f"Variable {var} not found; skipping.") + + # fix missing values stored as str + for var in [ + "longitude", + "latitude", + "err_lat", + "err_lon", + "ve", + "vn", + "temp", + "err_temp", + ]: + try: + ds[var].encoding["missing value"] = -1e-34 + except KeyError: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + + # convert type of some variable + target_dtype = { + "ID": "int64", + "WMO": "int32", + "expno": "int32", + "typedeath": "int8", + } + + for var in target_dtype.keys(): + if var in ds.keys(): + ds[var].data = ds[var].data.astype(target_dtype[var]) + else: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + + # new variables + ds["ids"] = (["traj", "obs"], [np.repeat(ds.ID.values, ds.sizes["obs"])]) + ds["drogue_status"] = ( + ["traj", "obs"], + [gdp.drogue_presence(ds.drogue_lost_date.data, ds.time.data[0])], + ) + + # convert attributes to variable + ds["location_type"] = ( + ("traj"), + [False if ds.get("location_type") == "Argos" else True], + ) # 0 for Argos, 1 for GPS + ds["DeployingShip"] = (("traj"), gdp.cut_str(ds.DeployingShip, 20)) + ds["DeploymentStatus"] = (("traj"), gdp.cut_str(ds.DeploymentStatus, 20)) + ds["BuoyTypeManufacturer"] = (("traj"), gdp.cut_str(ds.BuoyTypeManufacturer, 20)) + ds["BuoyTypeSensorArray"] = (("traj"), gdp.cut_str(ds.BuoyTypeSensorArray, 20)) + ds["CurrentProgram"] = ( + ("traj"), + np.int32([gdp.str_to_float(ds.CurrentProgram, -1)]), + ) + ds["PurchaserFunding"] = (("traj"), gdp.cut_str(ds.PurchaserFunding, 20)) + ds["SensorUpgrade"] = (("traj"), gdp.cut_str(ds.SensorUpgrade, 20)) + ds["Transmissions"] = (("traj"), gdp.cut_str(ds.Transmissions, 20)) + ds["DeployingCountry"] = (("traj"), gdp.cut_str(ds.DeployingCountry, 20)) + ds["DeploymentComments"] = ( + ("traj"), + gdp.cut_str( + ds.DeploymentComments.encode("ascii", "ignore").decode("ascii"), 20 + ), + ) # remove non ascii char + ds["ManufactureYear"] = ( + ("traj"), + np.int16([gdp.str_to_float(ds.ManufactureYear, -1)]), + ) + ds["ManufactureMonth"] = ( + ("traj"), + np.int16([gdp.str_to_float(ds.ManufactureMonth, -1)]), + ) + ds["ManufactureSensorType"] = (("traj"), gdp.cut_str(ds.ManufactureSensorType, 20)) + ds["ManufactureVoltage"] = ( + ("traj"), + np.int16([gdp.str_to_float(ds.ManufactureVoltage[:-6], -1)]), + ) # e.g. 56 V + ds["FloatDiameter"] = ( + ("traj"), + [gdp.str_to_float(ds.FloatDiameter[:-3])], + ) # e.g. 35.5 cm + ds["SubsfcFloatPresence"] = ( + ("traj"), + np.array([gdp.str_to_float(ds.SubsfcFloatPresence)], dtype="bool"), + ) + ds["DrogueType"] = (("traj"), gdp.cut_str(ds.DrogueType, 7)) + ds["DrogueLength"] = ( + ("traj"), + [gdp.str_to_float(ds.DrogueLength[:-2])], + ) # e.g. 4.8 m + ds["DrogueBallast"] = ( + ("traj"), + [gdp.str_to_float(ds.DrogueBallast[:-3])], + ) # e.g. 1.4 kg + ds["DragAreaAboveDrogue"] = ( + ("traj"), + [gdp.str_to_float(ds.DragAreaAboveDrogue[:-4])], + ) # 10.66 m^2 + ds["DragAreaOfDrogue"] = ( + ("traj"), + [gdp.str_to_float(ds.DragAreaOfDrogue[:-4])], + ) # e.g. 416.6 m^2 + ds["DragAreaRatio"] = (("traj"), [gdp.str_to_float(ds.DragAreaRatio)]) # e.g. 39.08 + ds["DrogueCenterDepth"] = ( + ("traj"), + [gdp.str_to_float(ds.DrogueCenterDepth[:-2])], + ) # e.g. 20.0 m + ds["DrogueDetectSensor"] = (("traj"), gdp.cut_str(ds.DrogueDetectSensor, 20)) + + # vars attributes + vars_attrs = { + "ID": {"long_name": "Global Drifter Program Buoy ID", "units": "-"}, + "longitude": {"long_name": "Longitude", "units": "degrees_east"}, + "latitude": {"long_name": "Latitude", "units": "degrees_north"}, + "time": {"long_name": "Time", "units": "seconds since 1970-01-01 00:00:00"}, + "ids": { + "long_name": "Global Drifter Program Buoy ID repeated along observations", + "units": "-", + }, + "rowsize": { + "long_name": "Number of observations per trajectory", + "sample_dimension": "obs", + "units": "-", + }, + "location_type": { + "long_name": "Satellite-based location system", + "units": "-", + "comments": "0 (Argos), 1 (GPS)", + }, + "WMO": { + "long_name": "World Meteorological Organization buoy identification number", + "units": "-", + }, + "expno": {"long_name": "Experiment number", "units": "-"}, + "deploy_date": { + "long_name": "Deployment date and time", + "units": "seconds since 1970-01-01 00:00:00", + }, + "deploy_lon": {"long_name": "Deployment longitude", "units": "degrees_east"}, + "deploy_lat": {"long_name": "Deployment latitude", "units": "degrees_north"}, + "end_date": { + "long_name": "End date and time", + "units": "seconds since 1970-01-01 00:00:00", + }, + "end_lon": {"long_name": "End latitude", "units": "degrees_north"}, + "end_lat": {"long_name": "End longitude", "units": "degrees_east"}, + "drogue_lost_date": { + "long_name": "Date and time of drogue loss", + "units": "seconds since 1970-01-01 00:00:00", + }, + "typedeath": { + "long_name": "Type of death", + "units": "-", + "comments": "0 (buoy still alive), 1 (buoy ran aground), 2 (picked up by vessel), 3 (stop transmitting), 4 (sporadic transmissions), 5 (bad batteries), 6 (inactive status)", + }, + "typebuoy": { + "long_name": "Buoy type (see https://www.aoml.noaa.gov/phod/dac/dirall.html)", + "units": "-", + }, + "DeployingShip": {"long_name": "Name of deployment ship", "units": "-"}, + "DeploymentStatus": {"long_name": "Deployment status", "units": "-"}, + "BuoyTypeManufacturer": {"long_name": "Buoy type manufacturer", "units": "-"}, + "BuoyTypeSensorArray": {"long_name": "Buoy type sensor array", "units": "-"}, + "CurrentProgram": { + "long_name": "Current Program", + "units": "-", + "_FillValue": "-1", + }, + "PurchaserFunding": {"long_name": "Purchaser funding", "units": "-"}, + "SensorUpgrade": {"long_name": "Sensor upgrade", "units": "-"}, + "Transmissions": {"long_name": "Transmissions", "units": "-"}, + "DeployingCountry": {"long_name": "Deploying country", "units": "-"}, + "DeploymentComments": {"long_name": "Deployment comments", "units": "-"}, + "ManufactureYear": { + "long_name": "Manufacture year", + "units": "-", + "_FillValue": "-1", + }, + "ManufactureMonth": { + "long_name": "Manufacture month", + "units": "-", + "_FillValue": "-1", + }, + "ManufactureSensorType": {"long_name": "Manufacture Sensor Type", "units": "-"}, + "ManufactureVoltage": { + "long_name": "Manufacture voltage", + "units": "V", + "_FillValue": "-1", + }, + "FloatDiameter": {"long_name": "Diameter of surface floater", "units": "cm"}, + "SubsfcFloatPresence": {"long_name": "Subsurface Float Presence", "units": "-"}, + "DrogueType": {"drogue_type": "Drogue Type", "units": "-"}, + "DrogueLength": {"long_name": "Length of drogue.", "units": "m"}, + "DrogueBallast": { + "long_name": "Weight of the drogue's ballast.", + "units": "kg", + }, + "DragAreaAboveDrogue": {"long_name": "Drag area above drogue.", "units": "m^2"}, + "DragAreaOfDrogue": {"long_name": "Drag area drogue.", "units": "m^2"}, + "DragAreaRatio": {"long_name": "Drag area ratio", "units": "m"}, + "DrogueCenterDepth": { + "long_name": "Average depth of the drogue.", + "units": "m", + }, + "DrogueDetectSensor": {"long_name": "Drogue detection sensor", "units": "-"}, + "ve": {"long_name": "Eastward velocity", "units": "m/s"}, + "vn": {"long_name": "Northward velocity", "units": "m/s"}, + "err_lat": { + "long_name": "95% confidence interval in latitude", + "units": "degrees_north", + }, + "err_lon": { + "long_name": "95% confidence interval in longitude", + "units": "degrees_east", + }, + "drogue_status": { + "long_name": "Status indicating the presence of the drogue", + "units": "-", + "flag_values": "1,0", + "flag_meanings": "drogued, undrogued", + }, + "temp": { + "long_name": "Fitted sea water temperature", + "units": "Kelvin", + "comments": "Estimated near-surface sea water temperature from drifting buoy measurements. It is the sum of the fitted near-surface non-diurnal sea water temperature and fitted diurnal sea water temperature anomaly. Discrepancies may occur because of rounding.", + }, + "err_temp": { + "long_name": "Standard uncertainty of fitted sea water temperature", + "units": "Kelvin", + "comments": "Estimated one standard error of near-surface sea water temperature estimate from drifting buoy measurements", + }, + } + + # global attributes + attrs = { + "title": "Global Drifter Program drifting buoy collection", + "history": f"version {GDP_VERSION}. Metadata from dirall.dat and deplog.dat", + "Conventions": "CF-1.6", + "time_coverage_start": "", + "time_coverage_end": "", + "date_created": datetime.now().isoformat(), + "publisher_name": "GDP Drifter DAC", + "publisher_email": "aoml.dftr@noaa.gov", + "publisher_url": "https://www.aoml.noaa.gov/phod/gdp", + "license": "freely available", + "processing_level": "Level 2 QC by GDP drifter DAC", + "metadata_link": "https://www.aoml.noaa.gov/phod/dac/dirall.html", + "contributor_name": "NOAA Global Drifter Program", + "contributor_role": "Data Acquisition Center", + "institution": "NOAA Atlantic Oceanographic and Meteorological Laboratory", + "acknowledgement": f"Lumpkin, Rick; Centurioni, Luca (2019). NOAA Global Drifter Program quality-controlled 6-hour interpolated data from ocean surface drifting buoys. [indicate subset used]. NOAA National Centers for Environmental Information. Dataset. https://doi.org/10.25921/7ntx-z961. Accessed {datetime.utcnow().strftime('%d %B %Y')}.", + "summary": "Global Drifter Program six-hourly data", + "doi": "10.25921/7ntx-z961", + } + + # set attributes + for var in vars_attrs.keys(): + if var in ds.keys(): + ds[var].attrs = vars_attrs[var] + else: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + ds.attrs = attrs + + # rename variables + ds = ds.rename_vars({"longitude": "lon", "latitude": "lat"}) + + # Cast float64 variables to float32 to reduce memory footprint. + ds = gdp.cast_float64_variables_to_float32(ds) + + return ds
+ + + +
+[docs] +def to_raggedarray( + drifter_ids: Optional[list[int]] = None, + n_random_id: Optional[int] = None, + tmp_path: Optional[str] = GDP_TMP_PATH, +) -> RaggedArray: + """Download and process individual GDP 6-hourly files and return a + RaggedArray instance with the data. + + Parameters + ---------- + drifter_ids : list[int], optional + List of drifters to retrieve (Default: all) + n_random_id : list[int], optional + Randomly select n_random_id drifter NetCDF files + tmp_path : str, optional + Path to the directory where the individual NetCDF files are stored + (default varies depending on operating system; /tmp/clouddrift/gdp6h on Linux) + + Returns + ------- + out : RaggedArray + A RaggedArray instance of the requested dataset + + Examples + -------- + + Invoke `to_raggedarray` without any arguments to download all drifter data + from the 6-hourly GDP feed: + + >>> from clouddrift.adapters.gdp6h import to_raggedarray + >>> ra = to_raggedarray() + + To download a random sample of 100 drifters, for example for development + or testing, use the `n_random_id` argument: + + >>> ra = to_raggedarray(n_random_id=100) + + To download a specific list of drifters, use the `drifter_ids` argument: + + >>> ra = to_raggedarray(drifter_ids=[54375, 114956, 126934]) + + Finally, `to_raggedarray` returns a `RaggedArray` instance which provides + a convenience method to emit a `xarray.Dataset` instance: + + >>> ds = ra.to_xarray() + + To write the ragged array dataset to a NetCDF file on disk, do + + >>> ds.to_netcdf("gdp6h.nc", format="NETCDF4") + + Alternatively, to write the ragged array to a Parquet file, first create + it as an Awkward Array: + + >>> arr = ra.to_awkward() + >>> arr.to_parquet("gdp6h.parquet") + """ + ids = download(drifter_ids, n_random_id, GDP_DATA_URL, tmp_path) + + ra = RaggedArray.from_files( + indices=ids, + preprocess_func=preprocess, + name_coords=gdp.GDP_COORDS, + name_meta=gdp.GDP_METADATA, + name_data=GDP_DATA, + rowsize_func=gdp.rowsize, + filename_pattern="drifter_6h_{id}.nc", + tmp_path=tmp_path, + ) + + # update dynamic global attributes + ra.attrs_global[ + "time_coverage_start" + ] = f"{datetime(1970,1,1) + timedelta(seconds=int(np.min(ra.coords['time']))):%Y-%m-%d:%H:%M:%SZ}" + ra.attrs_global[ + "time_coverage_end" + ] = f"{datetime(1970,1,1) + timedelta(seconds=int(np.max(ra.coords['time']))):%Y-%m-%d:%H:%M:%SZ}" + + return ra
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/adapters/glad.html b/_modules/clouddrift/adapters/glad.html new file mode 100644 index 00000000..432ee5df --- /dev/null +++ b/_modules/clouddrift/adapters/glad.html @@ -0,0 +1,591 @@ + + + + + + + + + + clouddrift.adapters.glad — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.adapters.glad

+"""
+This module defines functions used to adapt the Grand LAgrangian Deployment
+(GLAD) dataset as a ragged-array Xarray Dataset.
+
+The dataset and its description are hosted at https://doi.org/10.7266/N7VD6WC8.
+
+Example
+-------
+>>> from clouddrift.adapters import glad
+>>> ds = glad.to_xarray()
+
+Reference
+---------
+Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012. Distributed by: Gulf of Mexico Research Initiative Information and Data Cooperative (GRIIDC), Harte Research Institute, Texas A&M University–Corpus Christi. doi:10.7266/N7VD6WC8
+"""
+from clouddrift.adapters.utils import download_with_progress
+from io import BytesIO
+import numpy as np
+import pandas as pd
+import xarray as xr
+
+
+
+[docs] +def get_dataframe() -> pd.DataFrame: + """Get the GLAD dataset as a pandas DataFrame.""" + url = "https://data.gulfresearchinitiative.org/pelagos-symfony/api/file/download/169841" + # GRIIDC server doesn't provide Content-Length header, so we'll hardcode + # the expected data length here. + file_size = 155330876 + buf = BytesIO(b"") + download_with_progress([(url, buf)]) + buf.seek(0) + column_names = [ + "id", + "date", + "time", + "latitude", + "longitude", + "position_error", + "u", + "v", + "velocity_error", + ] + df = pd.read_csv(buf, delim_whitespace=True, skiprows=5, names=column_names) + df["obs"] = pd.to_datetime(df["date"] + " " + df["time"]) + df.drop(["date", "time"], axis=1, inplace=True) + return df
+ + + +
+[docs] +def to_xarray() -> xr.Dataset: + """Return the GLAD data as a ragged-array Xarray Dataset.""" + df = get_dataframe() + ds = df.to_xarray() + + traj, rowsize = np.unique(ds.id, return_counts=True) + + # Make the dataset compatible with clouddrift functions. + ds = ( + ds.swap_dims({"index": "obs"}) + .drop_vars(["id", "index"]) + .assign_coords(traj=traj) + .assign({"rowsize": ("traj", rowsize)}) + .rename_vars({"obs": "time", "traj": "id"}) + ) + + # Cast double floats to singles + for var in ds.variables: + if ds[var].dtype == "float64": + ds[var] = ds[var].astype("float32") + + # Set variable attributes + ds["longitude"].attrs = { + "long_name": "longitude", + "standard_name": "longitude", + "units": "degrees_east", + } + + ds["latitude"].attrs = { + "long_name": "latitude", + "standard_name": "latitude", + "units": "degrees_north", + } + + ds["position_error"].attrs = { + "long_name": "position_error", + "units": "m", + } + + ds["u"].attrs = { + "long_name": "eastward_sea_water_velocity", + "standard_name": "eastward_sea_water_velocity", + "units": "m s-1", + } + + ds["v"].attrs = { + "long_name": "northward_sea_water_velocity", + "standard_name": "northward_sea_water_velocity", + "units": "m s-1", + } + + ds["velocity_error"].attrs = { + "long_name": "velocity_error", + "units": "m s-1", + } + + # Set global attributes + ds.attrs = { + "title": "GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012", + "institution": "Consortium for Advanced Research on Transport of Hydrocarbon in the Environment (CARTHE)", + "source": "CODE-style drifters", + "history": "Downloaded from https://data.gulfresearchinitiative.org/data/R1.x134.073:0004 and post-processed into a ragged-array Xarray Dataset by CloudDrift", + "references": "Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012. Distributed by: Gulf of Mexico Research Initiative Information and Data Cooperative (GRIIDC), Harte Research Institute, Texas A&M University–Corpus Christi. doi:10.7266/N7VD6WC8", + } + + return ds
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/adapters/mosaic.html b/_modules/clouddrift/adapters/mosaic.html new file mode 100644 index 00000000..dedb5364 --- /dev/null +++ b/_modules/clouddrift/adapters/mosaic.html @@ -0,0 +1,633 @@ + + + + + + + + + + clouddrift.adapters.mosaic — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.adapters.mosaic

+"""
+This module defines functions used to adapt the MOSAiC sea-ice drift dataset as
+a ragged-array dataset.
+
+The dataset is hosted at https://doi.org/10.18739/A2KP7TS83.
+
+Reference: Angela Bliss, Jennifer Hutchings, Philip Anderson, Philipp Anhaus,
+Hans Jakob Belter, Jørgen Berge, Vladimir Bessonov, Bin Cheng, Sylvia Cole,
+Dave Costa, Finlo Cottier, Christopher J Cox, Pedro R De La Torre, Dmitry V Divine,
+Gilbert Emzivat, Ying-Chih Fang, Steven Fons, Michael Gallagher, Maxime Geoffrey,
+Mats A Granskog, ... Guangyu Zuo. (2022). Sea ice drift tracks from the Distributed
+Network of autonomous buoys deployed during the Multidisciplinary drifting Observatory
+for the Study of Arctic Climate (MOSAiC) expedition 2019 - 2021. Arctic Data Center.
+doi:10.18739/A2KP7TS83.
+
+Example
+-------
+>>> from clouddrift.adapters import mosaic
+>>> ds = mosaic.to_xarray()
+"""
+from datetime import datetime
+from io import BytesIO
+import numpy as np
+import pandas as pd
+import requests
+from tqdm import tqdm
+import xarray as xr
+import xml.etree.ElementTree as ET
+
+from clouddrift.adapters.utils import download_with_progress
+
+MOSAIC_VERSION = "2022"
+
+
+
+[docs] +def get_dataframes() -> tuple[pd.DataFrame, pd.DataFrame]: + """Get the MOSAiC data (obs dimension in the target Dataset) and metadata + (traj dimension in the target dataset ) as pandas DataFrames.""" + xml = get_repository_metadata() + filenames, urls = get_file_urls(xml) + exclude_patterns = ["site_buoy_summary", "buoy_list"] + data_filenames = [ + f for f in filenames if not any([s in f for s in exclude_patterns]) + ] + data_urls = [ + f + for n, f in enumerate(urls) + if not any([s in filenames[n] for s in exclude_patterns]) + ] + sensor_ids = [f.split("_")[-1].rstrip(".csv") for f in data_filenames] + sensor_list_url = urls[ + filenames.index([f for f in filenames if "buoy_list" in f].pop()) + ] + sensors = pd.read_csv(sensor_list_url) + + # Sort the urls by the order of sensor IDs in the sensor list + order_index = {id: n for n, id in enumerate(sensors["Sensor ID"])} + sorted_indices = sorted( + range(len(sensor_ids)), key=lambda k: order_index[sensor_ids[k]] + ) + sorted_data_urls = [data_urls[i] for i in sorted_indices] + buffers = [BytesIO(b"") * len(sorted_data_urls)] + + download_with_progress(zip(sorted_data_urls, buffers), desc="Downloading data") + dfs = [pd.read_csv(b) for b in buffers] + obs_df = pd.concat(dfs) + + # Use the index of the concatenated DataFrame to determine the count/rowsize + zero_indices = [n for n, val in enumerate(list(obs_df.index)) if val == 0] + sensors["rowsize"] = np.diff(zero_indices + [len(obs_df)]) + + # Make the time column the index of the DataFrame, which will make it a + # coordinate in the xarray Dataset. + obs_df.set_index("datetime", inplace=True) + sensors.set_index("Sensor ID", inplace=True) + + return obs_df, sensors
+ + + +
+[docs] +def get_file_urls(xml: str) -> list[str]: + """Pass the MOSAiC XML string and return the list of filenames and URLs.""" + filenames = [ + tag.text + for tag in ET.fromstring(xml).findall("./dataset/dataTable/physical/objectName") + ] + urls = [ + tag.text + for tag in ET.fromstring(xml).findall( + "./dataset/dataTable/physical/distribution/online/url" + ) + ] + return filenames, urls
+ + + +
+[docs] +def get_repository_metadata() -> str: + """Get the MOSAiC repository metadata as an XML string. + Pass this string to other get_* functions to extract the data you need. + """ + url = "https://arcticdata.io/metacat/d1/mn/v2/object/doi:10.18739/A2KP7TS83" + r = requests.get(url) + return r.content
+ + + +
+[docs] +def to_xarray(): + """Return the MOSAiC data as an ragged-array Xarray Dataset.""" + + # Download the data and metadata as pandas DataFrames. + obs_df, traj_df = get_dataframes() + + # Dates and datetimes are strings; convert them to datetime64 instances + # for compatibility with CloudDrift's analysis functions. + obs_df.index = pd.to_datetime(obs_df.index) + for col in [ + "Deployment Date", + "Deployment Datetime", + "First Data Datetime", + "Last Data Datetime", + ]: + traj_df[col] = pd.to_datetime(traj_df[col]) + + # Merge into an Xarray Dataset and rename the dimensions and variables to + # follow the CloudDrift convention. + ds = xr.merge([obs_df.to_xarray(), traj_df.to_xarray()]) + ds = ds.rename_dims({"datetime": "obs", "Sensor ID": "traj"}).rename_vars( + {"datetime": "time", "Sensor ID": "id"} + ) + + # Set variable attributes + ds["longitude"].attrs = { + "long_name": "longitude", + "standard_name": "longitude", + "units": "degrees_east", + } + + ds["latitude"].attrs = { + "long_name": "latitude", + "standard_name": "latitude", + "units": "degrees_north", + } + + # global attributes + ds.attrs = { + "title": "Multidisciplinary drifting Observatory for the Study of Arctic Climate (MOSAiC) expedition 2019 - 2021", + "history": f"Dataset updated in {MOSAIC_VERSION}", + "date_created": datetime.now().isoformat(), + "publisher_name": "NSF Arctic Data Center", + "publisher_url": "https://arcticdata.io/catalog/view/doi:10.18739/A2KP7TS83", + "license": "Creative Commons Attribution 4.0 International License (http://creativecommons.org/licenses/by/4.0/)", + } + + return ds
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/adapters/subsurface_floats.html b/_modules/clouddrift/adapters/subsurface_floats.html new file mode 100644 index 00000000..94d3bde4 --- /dev/null +++ b/_modules/clouddrift/adapters/subsurface_floats.html @@ -0,0 +1,674 @@ + + + + + + + + + + clouddrift.adapters.subsurface_floats — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.adapters.subsurface_floats

+"""
+This module defines functions to adapt as a ragged-array dataset a collection of data 
+from 2193 trajectories of SOFAR, APEX, and RAFOS subsurface floats from 52 experiments 
+across the world between 1989 and 2015.
+
+The dataset is hosted at https://www.aoml.noaa.gov/phod/float_traj/index.php
+
+Example
+-------
+>>> from clouddrift.adapters import subsurface_floats
+>>> ds = subsurface_floats.to_xarray()
+"""
+
+from datetime import datetime
+import numpy as np
+import os
+import pandas as pd
+import scipy.io
+import tempfile
+import xarray as xr
+import warnings
+
+from clouddrift.adapters.utils import download_with_progress
+
+SUBSURFACE_FLOATS_DATA_URL = (
+    "https://www.aoml.noaa.gov/phod/float_traj/files/allFloats_12122017.mat"
+)
+SUBSURFACE_FLOATS_VERSION = "December 2017 (version 2)"
+SUBSURFACE_FLOATS_TMP_PATH = os.path.join(
+    tempfile.gettempdir(), "clouddrift", "subsurface_floats"
+)
+
+
+
+[docs] +def download(file: str): + download_with_progress([(SUBSURFACE_FLOATS_DATA_URL, file)])
+ + + +
+[docs] +def to_xarray( + tmp_path: str = None, +): + if tmp_path is None: + tmp_path = SUBSURFACE_FLOATS_TMP_PATH + os.makedirs(tmp_path, exist_ok=True) + + local_file = f"{tmp_path}/{SUBSURFACE_FLOATS_DATA_URL.split('/')[-1]}" + download(local_file) + source_data = scipy.io.loadmat(local_file) + + # metadata + meta_variables = [ + "expList", + "expName", + "expOrg", + "expPI", + "fltType", + "indexExp", + "indexFlt", + ] + + metadata = {} + for var in meta_variables: + metadata[var] = np.array([v.flatten()[0] for v in source_data[var].flatten()]) + + # bring the expList to the "traj" dimension + _, float_per_exp = np.unique(metadata["indexExp"], return_counts=True) + metadata["expList"] = np.repeat(metadata["expList"], float_per_exp) + + # data + data_variables = ["dtnum", "lon", "lat", "p", "t", "u", "v"] + data = {} + for var in data_variables: + data[var] = np.concatenate([v.flatten() for v in source_data[var].flatten()]) + + # create rowsize variable + rowsize = np.array([len(v) for v in source_data["dtnum"].flatten()]) + assert np.sum(rowsize) == len(data["dtnum"]) + + # Unix epoch start (1970-01-01) + origin_datenum = 719529 + + ds = xr.Dataset( + { + "expList": (["traj"], metadata["expList"]), + "expName": (["traj"], metadata["expName"]), + "expOrg": (["traj"], metadata["expOrg"]), + "expPI": (["traj"], metadata["expPI"]), + "indexExp": (["traj"], metadata["indexExp"]), + "fltType": (["traj"], metadata["fltType"]), + "id": (["traj"], metadata["indexFlt"]), + "rowsize": (["traj"], rowsize), + "time": ( + ["obs"], + pd.to_datetime(data["dtnum"] - origin_datenum, unit="D"), + ), + "lon": (["obs"], data["lon"]), + "lat": (["obs"], data["lat"]), + "pres": (["obs"], data["p"]), + "temp": (["obs"], data["t"]), + "ve": (["obs"], data["u"]), + "vn": (["obs"], data["v"]), + } + ) + + # Cast double floats to singles + double_vars = ["lat", "lon"] + for var in [v for v in ds.variables if v not in double_vars]: + if ds[var].dtype == "float64": + ds[var] = ds[var].astype("float32") + + # define attributes + vars_attrs = { + "expList": { + "long_name": "Experiment list", + "units": "-", + }, + "expName": { + "long_name": "Experiment name", + "units": "-", + }, + "expOrg": { + "long_name": "Experiment organization", + "units": "-", + }, + "expPI": { + "long_name": "Experiment principal investigator", + "units": "-", + }, + "indexExp": { + "long_name": "Experiment index number", + "units": "-", + "comment": "The index matches the float with its experiment metadata", + }, + "fltType": { + "long_name": "Float type", + "units": "-", + }, + "id": {"long_name": "Float ID", "units": "-"}, + "lon": { + "long_name": "Longitude", + "standard_name": "longitude", + "units": "degrees_east", + }, + "lat": { + "long_name": "Latitude", + "standard_name": "latitude", + "units": "degrees_north", + }, + "rowsize": { + "long_name": "Number of observations per trajectory", + "sample_dimension": "obs", + "units": "-", + }, + "pres": { + "long_name": "Pressure", + "standard_name": "sea_water_pressure", + "units": "dbar", + }, + "temp": { + "long_name": "Temperature", + "standard_name": "sea_water_temperature", + "units": "degree_C", + }, + "ve": { + "long_name": "Eastward velocity", + "standard_name": "eastward_sea_water_velocity", + "units": "m s-1", + }, + "vn": { + "long_name": "Northward velocity", + "standard_name": "northward_sea_water_velocity", + "units": "m s-1", + }, + } + + # global attributes + attrs = { + "title": "Subsurface float trajectories dataset", + "history": SUBSURFACE_FLOATS_VERSION, + "date_created": datetime.now().isoformat(), + "publisher_name": "WOCE Subsurface Float Data Assembly Center and NOAA AOML", + "publisher_url": "https://www.aoml.noaa.gov/phod/float_traj/data.php", + "license": "freely available", + "acknowledgement": f"Maintained by Andree Ramsey and Heather Furey from the Woods Hole Oceanographic Institution", + } + + # set attributes + for var in vars_attrs.keys(): + if var in ds.keys(): + ds[var].attrs = vars_attrs[var] + else: + warnings.warn(f"Variable {var} not found in upstream data; skipping.") + ds.attrs = attrs + + # set coordinates + ds = ds.set_coords(["time", "id"]) + + return ds
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/datasets.html b/_modules/clouddrift/datasets.html new file mode 100644 index 00000000..fa584a16 --- /dev/null +++ b/_modules/clouddrift/datasets.html @@ -0,0 +1,1102 @@ + + + + + + + + + + clouddrift.datasets — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.datasets

+"""
+This module provides functions to easily access ragged array datasets. If the datasets are 
+not accessed via cloud storage platforms or are not found on the local filesystem,
+they will be downloaded from their upstream repositories and stored for later access 
+(~/.clouddrift for UNIX-based systems).
+"""
+from io import BufferedReader, BytesIO
+from clouddrift import adapters
+import os
+import platform
+import xarray as xr
+
+
+
+[docs] +def gdp1h(decode_times: bool = True) -> xr.Dataset: + """Returns the latest version of the NOAA Global Drifter Program (GDP) hourly + dataset as a ragged array Xarray dataset. + + The data is accessed from zarr archive hosted on a public AWS S3 bucket accessible at + https://registry.opendata.aws/noaa-oar-hourly-gdp/. Original data source from NOAA NCEI + is https://doi.org/10.25921/x46c-3620). + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + Hourly GDP dataset as a ragged array + + Examples + -------- + >>> from clouddrift.datasets import gdp1h + >>> ds = gdp1h() + >>> ds + <xarray.Dataset> + Dimensions: (traj: 19396, obs: 197214787) + Coordinates: + id (traj) int64 ... + time (obs) datetime64[ns] ... + Dimensions without coordinates: traj, obs + Data variables: (12/60) + BuoyTypeManufacturer (traj) |S20 ... + BuoyTypeSensorArray (traj) |S20 ... + CurrentProgram (traj) float32 ... + DeployingCountry (traj) |S20 ... + DeployingShip (traj) |S20 ... + DeploymentComments (traj) |S20 ... + ... ... + start_lat (traj) float32 ... + start_lon (traj) float32 ... + typebuoy (traj) |S10 ... + typedeath (traj) int8 ... + ve (obs) float32 ... + vn (obs) float32 ... + Attributes: (12/16) + Conventions: CF-1.6 + acknowledgement: Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurio... + contributor_name: NOAA Global Drifter Program + contributor_role: Data Acquisition Center + date_created: 2023-09-08T17:05:12.130123 + doi: 10.25921/x46c-3620 + ... ... + processing_level: Level 2 QC by GDP drifter DAC + publisher_email: aoml.dftr@noaa.gov + publisher_name: GDP Drifter DAC + publisher_url: https://www.aoml.noaa.gov/phod/gdp + summary: Global Drifter Program hourly data + title: Global Drifter Program hourly drifting buoy collection + + See Also + -------- + :func:`gdp6h` + """ + url = "https://noaa-oar-hourly-gdp-pds.s3.amazonaws.com/latest/gdp-v2.01.zarr" + ds = xr.open_dataset(url, engine="zarr", decode_times=decode_times) + ds = ds.rename_vars({"ID": "id"}).assign_coords({"id": ds.ID}).drop_vars(["ids"]) + return ds
+ + + +
+[docs] +def gdp6h(decode_times: bool = True) -> xr.Dataset: + """Returns the NOAA Global Drifter Program (GDP) 6-hourly dataset as a ragged array + Xarray dataset. + + The data is accessed from a public HTTPS server at NOAA's Atlantic + Oceanographic and Meteorological Laboratory (AOML) accessible at + https://www.aoml.noaa.gov/phod/gdp/index.php. It should be noted that the data loading + method is platform dependent. Linux and Darwin (macOS) machines lazy load the datasets leveraging the + byte-range feature of the netCDF-c library (dataset loading engine used by xarray). + Windows machines download the entire dataset into a memory buffer which is then passed + to xarray. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + 6-hourly GDP dataset as a ragged array + + Examples + -------- + >>> from clouddrift.datasets import gdp6h + >>> ds = gdp6h() + >>> ds + <xarray.Dataset> + Dimensions: (traj: 27647, obs: 46535470) + Coordinates: + ids (obs) int64 7702204 7702204 ... 300234061198840 + time (obs) float64 2.879e+08 2.879e+08 ... 1.697e+09 + Dimensions without coordinates: traj, obs + Data variables: (12/50) + ID (traj) int64 7702204 7702201 ... 300234061198840 + rowsize (traj) int32 92 1747 1943 1385 1819 ... 54 53 51 28 + WMO (traj) int32 0 0 0 0 ... 6203890 6203888 4101885 + expno (traj) int32 40 40 40 40 ... 31412 21421 21421 31412 + deploy_date (traj) float32 2.878e+08 2.878e+08 ... 1.696e+09 nan + deploy_lat (traj) float32 -7.798 -4.9 -3.18 ... 9.9 11.9 nan + ... ... + vn (obs) float32 nan 0.1056 0.04974 ... 0.7384 nan + temp (obs) float32 28.35 28.3 nan ... 29.08 28.97 28.92 + err_lat (obs) float32 0.009737 0.007097 ... 0.001659 0.001687 + err_lon (obs) float32 0.00614 0.004583 ... 0.002471 0.002545 + err_temp (obs) float32 0.08666 0.08757 ... 0.03665 0.03665 + drogue_status (obs) bool False False False False ... True True True + Attributes: (12/18) + title: Global Drifter Program drifting buoy collection + history: version September 2023. Metadata from dirall.dat an... + Conventions: CF-1.6 + time_coverage_start: 1979-02-15:00:00:00Z + time_coverage_end: 2023-10-18:18:00:00Z + date_created: 2023-12-22T17:50:22.242943 + ... ... + contributor_name: NOAA Global Drifter Program + contributor_role: Data Acquisition Center + institution: NOAA Atlantic Oceanographic and Meteorological Labo... + acknowledgement: Lumpkin, Rick; Centurioni, Luca (2019). NOAA Global... + summary: Global Drifter Program six-hourly data + doi: 10.25921/7ntx-z961 + + See Also + -------- + :func:`gdp1h` + """ + url = "https://www.aoml.noaa.gov/ftp/pub/phod/buoydata/gdp6h_ragged_may23.nc#mode=bytes" + + if platform.system() == "Windows": + buffer = BytesIO() + adapters.utils.download_with_progress([(f"{url}#mode=bytes", buffer)]) + reader = BufferedReader(buffer) + ds = xr.open_dataset(reader, decode_times=decode_times) + else: + ds = xr.open_dataset(f"{url}", decode_times=decode_times) + + ds = ds.rename_vars({"ID": "id"}).assign_coords({"id": ds.ID}).drop_vars(["ids"]) + return ds
+ + + +
+[docs] +def glad(decode_times: bool = True) -> xr.Dataset: + """Returns the Grand LAgrangian Deployment (GLAD) dataset as a ragged array + Xarray dataset. + + The function will first look for the ragged-array dataset on the local + filesystem. If it is not found, the dataset will be downloaded using the + corresponding adapter function and stored for later access. + + The upstream data is available at https://doi.org/10.7266/N7VD6WC8. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + GLAD dataset as a ragged array + + Examples + -------- + >>> from clouddrift.datasets import glad + >>> ds = glad() + >>> ds + <xarray.Dataset> + Dimensions: (obs: 1602883, traj: 297) + Coordinates: + time (obs) datetime64[ns] ... + id (traj) object ... + Data variables: + latitude (obs) float32 ... + longitude (obs) float32 ... + position_error (obs) float32 ... + u (obs) float32 ... + v (obs) float32 ... + velocity_error (obs) float32 ... + rowsize (traj) int64 ... + Attributes: + title: GLAD experiment CODE-style drifter trajectories (low-pass f... + institution: Consortium for Advanced Research on Transport of Hydrocarbo... + source: CODE-style drifters + history: Downloaded from https://data.gulfresearchinitiative.org/dat... + references: Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter t... + + Reference + --------- + Özgökmen, Tamay. 2013. GLAD experiment CODE-style drifter trajectories (low-pass filtered, 15 minute interval records), northern Gulf of Mexico near DeSoto Canyon, July-October 2012. Distributed by: Gulf of Mexico Research Initiative Information and Data Cooperative (GRIIDC), Harte Research Institute, Texas A&M University–Corpus Christi. doi:10.7266/N7VD6WC8 + """ + clouddrift_path = ( + os.path.expanduser("~/.clouddrift") + if not os.getenv("CLOUDDRIFT_PATH") + else os.getenv("CLOUDDRIFT_PATH") + ) + glad_path = f"{clouddrift_path}/data/glad.nc" + if not os.path.exists(glad_path): + print(f"{glad_path} not found; download from upstream repository.") + ds = adapters.glad.to_xarray() + os.makedirs(os.path.dirname(glad_path), exist_ok=True) + ds.to_netcdf(glad_path) + else: + ds = xr.open_dataset(glad_path, decode_times=decode_times) + return ds
+ + + +
+[docs] +def mosaic(decode_times: bool = True) -> xr.Dataset: + """Returns the MOSAiC sea-ice drift dataset as a ragged array Xarray dataset. + + The function will first look for the ragged-array dataset on the local + filesystem. If it is not found, the dataset will be downloaded using the + corresponding adapter function and stored for later access. + + The upstream data is available at https://arcticdata.io/catalog/view/doi:10.18739/A2KP7TS83. + + Reference + --------- + Angela Bliss, Jennifer Hutchings, Philip Anderson, Philipp Anhaus, + Hans Jakob Belter, Jørgen Berge, Vladimir Bessonov, Bin Cheng, Sylvia Cole, + Dave Costa, Finlo Cottier, Christopher J Cox, Pedro R De La Torre, Dmitry V Divine, + Gilbert Emzivat, Ying-Chih Fang, Steven Fons, Michael Gallagher, Maxime Geoffrey, + Mats A Granskog, ... Guangyu Zuo. (2022). Sea ice drift tracks from the Distributed + Network of autonomous buoys deployed during the Multidisciplinary drifting Observatory + for the Study of Arctic Climate (MOSAiC) expedition 2019 - 2021. Arctic Data Center. + doi:10.18739/A2KP7TS83. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + MOSAiC sea-ice drift dataset as a ragged array + + Examples + -------- + >>> from clouddrift.datasets import mosaic + >>> ds = mosaic() + >>> ds + <xarray.Dataset> + Dimensions: (obs: 1926226, traj: 216) + Coordinates: + time (obs) datetime64[ns] ... + id (traj) object ... + Dimensions without coordinates: obs, traj + Data variables: (12/19) + latitude (obs) float64 ... + longitude (obs) float64 ... + Deployment Leg (traj) int64 ... + DN Station ID (traj) object ... + IMEI (traj) object ... + Deployment Date (traj) datetime64[ns] ... + ... ... + Buoy Type (traj) object ... + Manufacturer (traj) object ... + Model (traj) object ... + PI (traj) object ... + Data Authors (traj) object ... + rowsize (traj) int64 ... + """ + clouddrift_path = ( + os.path.expanduser("~/.clouddrift") + if not os.getenv("CLOUDDRIFT_PATH") + else os.getenv("CLOUDDRIFT_PATH") + ) + mosaic_path = f"{clouddrift_path}/data/mosaic.nc" + if not os.path.exists(mosaic_path): + print(f"{mosaic_path} not found; download from upstream repository.") + ds = adapters.mosaic.to_xarray() + os.makedirs(os.path.dirname(mosaic_path), exist_ok=True) + ds.to_netcdf(mosaic_path) + else: + ds = xr.open_dataset(mosaic_path, decode_times=decode_times) + return ds
+ + + +
+[docs] +def spotters(decode_times: bool = True) -> xr.Dataset: + """Returns the Sofar Ocean Spotter drifters ragged array dataset as an Xarray dataset. + + The data is accessed from a zarr archive hosted on a public AWS S3 bucket accessible + at https://sofar-spotter-archive.s3.amazonaws.com/spotter_data_bulk_zarr. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + Sofar ocean floats dataset as a ragged array + + Examples + -------- + >>> from clouddrift.datasets import spotters + >>> ds = spotters() + >>> ds + <xarray.Dataset> + Dimensions: (index: 6390651, trajectory: 871) + Coordinates: + time (index) datetime64[ns] ... + * trajectory (trajectory) object 'SPOT-010001' ... 'SPOT-1975' + Dimensions without coordinates: index + Data variables: + latitude (index) float64 ... + longitude (index) float64 ... + meanDirection (index) float64 ... + meanDirectionalSpread (index) float64 ... + meanPeriod (index) float64 ... + peakDirection (index) float64 ... + peakDirectionalSpread (index) float64 ... + peakPeriod (index) float64 ... + rowsize (trajectory) int64 ... + significantWaveHeight (index) float64 ... + Attributes: + author: Isabel A. Houghton + creation_date: 2023-10-18 00:43:55.333537 + email: isabel.houghton@sofarocean.com + institution: Sofar Ocean + references: https://content.sofarocean.com/hubfs/Spotter%20product%20... + source: Spotter wave buoy + title: Sofar Spotter Data Archive - Bulk Wave Parameters + """ + url = "https://sofar-spotter-archive.s3.amazonaws.com/spotter_data_bulk_zarr" + return xr.open_dataset(url, engine="zarr", decode_times=decode_times)
+ + + +
+[docs] +def subsurface_floats(decode_times: bool = True) -> xr.Dataset: + """Returns the subsurface floats dataset as a ragged array Xarray dataset. + + The data is accessed from a public HTTPS server at NOAA's Atlantic + Oceanographic and Meteorological Laboratory (AOML) accessible at + https://www.aoml.noaa.gov/phod/gdp/index.php. + + The upstream data is available at + https://www.aoml.noaa.gov/phod/float_traj/files/allFloats_12122017.mat. + + This dataset of subsurface float observations was compiled by the WOCE Subsurface + Float Data Assembly Center (WFDAC) in Woods Hole maintained by Andree Ramsey and + Heather Furey and copied to NOAA/AOML in October 2014 (version 1) and in December + 2017 (version 2). Subsequent updates will be included as additional appropriate + float data, quality controlled by the appropriate principal investigators, is + submitted for inclusion. + + Note that these observations are collected by ALACE/RAFOS/Eurofloat-style + acoustically-tracked, neutrally-buoyant subsurface floats which collect data while + drifting beneath the ocean surface. These data are the result of the effort and + resources of many individuals and institutions. You are encouraged to acknowledge + the work of the data originators and Data Centers in publications arising from use + of these data. + + The float data were originally divided by project at the WFDAC. Here they have been + compiled in a single Matlab data set. See here for more information on the variables + contained in these files. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + Subsurface floats dataset as a ragged array + + Examples + -------- + >>> from clouddrift.datasets import subsurface_floats + >>> ds = subsurface_floats() + >>> ds + <xarray.Dataset> + Dimensions: (traj: 2193, obs: 1402840) + Coordinates: + id (traj) uint16 ... + time (obs) datetime64[ns] ... + Dimensions without coordinates: traj, obs + Data variables: (12/13) + expList (traj) object ... + expName (traj) object ... + expOrg (traj) object ... + expPI (traj) object ... + indexExp (traj) uint8 ... + fltType (traj) object ... + ... ... + lon (obs) float64 ... + lat (obs) float64 ... + pres (obs) float64 ... + temp (obs) float64 ... + ve (obs) float64 ... + vn (obs) float64 ... + Attributes: + title: Subsurface float trajectories dataset + history: December 2017 (version 2) + date_created: 2023-11-14T22:30:38.831656 + publisher_name: WOCE Subsurface Float Data Assembly Center and NOAA AOML + publisher_url: https://www.aoml.noaa.gov/phod/float_traj/data.php + license: freely available + acknowledgement: Maintained by Andree Ramsey and Heather Furey from the ... + + References + ---------- + WOCE Subsurface Float Data Assembly Center (WFDAC) https://www.aoml.noaa.gov/phod/float_traj/index.php + """ + + clouddrift_path = ( + os.path.expanduser("~/.clouddrift") + if not os.getenv("CLOUDDRIFT_PATH") + else os.getenv("CLOUDDRIFT_PATH") + ) + + local_file = f"{clouddrift_path}/data/subsurface_floats.nc" + if not os.path.exists(local_file): + print(f"{local_file} not found; download from upstream repository.") + ds = adapters.subsurface_floats.to_xarray() + else: + ds = xr.open_dataset(local_file, decode_times=decode_times) + return ds
+ + + +
+[docs] +def yomaha(decode_times: bool = True) -> xr.Dataset: + """Returns the YoMaHa dataset as a ragged array Xarray dataset. + + The function will first look for the ragged-array dataset on the local + filesystem. If it is not found, the dataset will be downloaded using the + corresponding adapter function and stored for later access. The upstream + data is available at http://apdrc.soest.hawaii.edu/projects/yomaha/. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + YoMaHa'07 dataset as a ragged array + + Examples + -------- + + >>> from clouddrift.datasets import yomaha + >>> ds = yomaha() + >>> ds + <xarray.Dataset> + Dimensions: (obs: 1926743, traj: 12196) + Coordinates: + time_d (obs) datetime64[ns] ... + time_s (obs) datetime64[ns] ... + time_lp (obs) datetime64[ns] ... + time_lc (obs) datetime64[ns] ... + id (traj) int64 ... + Dimensions without coordinates: obs, traj + Data variables: (12/27) + lon_d (obs) float64 ... + lat_d (obs) float64 ... + pres_d (obs) float32 ... + ve_d (obs) float32 ... + vn_d (obs) float32 ... + err_ve_d (obs) float32 ... + ... ... + cycle (obs) int64 ... + time_inv (obs) int64 ... + rowsize (traj) int64 ... + wmo_id (traj) int64 ... + dac_id (traj) int64 ... + float_type (traj) int64 ... + Attributes: + title: YoMaHa'07: Velocity data assessed from trajectories of A... + history: Dataset updated on Tue Jun 28 03:14:34 HST 2022 + date_created: 2023-12-08T00:52:08.478075 + publisher_name: Asia-Pacific Data Research Center + publisher_url: http://apdrc.soest.hawaii.edu/index.php + license: Creative Commons Attribution 4.0 International License.. + + Reference + --------- + Lebedev, K. V., Yoshinari, H., Maximenko, N. A., & Hacker, P. W. (2007). Velocity data + assessed from trajectories of Argo floats at parking level and at the sea + surface. IPRC Technical Note, 4(2), 1-16. + """ + clouddrift_path = ( + os.path.expanduser("~/.clouddrift") + if not os.getenv("CLOUDDRIFT_PATH") + else os.getenv("CLOUDDRIFT_PATH") + ) + local_file = f"{clouddrift_path}/data/yomaha.nc" + if not os.path.exists(local_file): + print(f"{local_file} not found; download from upstream repository.") + ds = adapters.yomaha.to_xarray() + os.makedirs(os.path.dirname(local_file), exist_ok=True) + ds.to_netcdf(local_file) + else: + ds = xr.open_dataset(local_file, decode_times=decode_times) + return ds
+ + + +
+[docs] +def andro(decode_times: bool = True) -> xr.Dataset: + """Returns the ANDRO as a ragged array Xarray dataset. + + The function will first look for the ragged-array dataset on the local + filesystem. If it is not found, the dataset will be downloaded using the + corresponding adapter function and stored for later access. The upstream + data is available at https://www.seanoe.org/data/00360/47077/. + + Parameters + ---------- + decode_times : bool, optional + If True, decode the time coordinate into a datetime object. If False, the time + coordinate will be an int64 or float64 array of increments since the origin + time indicated in the units attribute. Default is True. + + Returns + ------- + xarray.Dataset + ANDRO dataset as a ragged array + Examples + -------- + >>> from clouddrift.datasets import andro + >>> ds = andro() + >>> ds + <xarray.Dataset> + Dimensions: (obs: 1360753, traj: 9996) + Coordinates: + time_d (obs) datetime64[ns] ... + time_s (obs) datetime64[ns] ... + time_lp (obs) datetime64[ns] ... + time_lc (obs) datetime64[ns] ... + id (traj) int64 ... + Dimensions without coordinates: obs, traj + Data variables: (12/33) + lon_d (obs) float64 ... + lat_d (obs) float64 ... + pres_d (obs) float32 ... + temp_d (obs) float32 ... + sal_d (obs) float32 ... + ve_d (obs) float32 ... + ... ... + lon_lc (obs) float64 ... + lat_lc (obs) float64 ... + surf_fix (obs) int64 ... + cycle (obs) int64 ... + profile_id (obs) float32 ... + rowsize (traj) int64 ... + Attributes: + title: ANDRO: An Argo-based deep displacement dataset + history: 2022-03-04 + date_created: 2023-12-08T00:52:00.937120 + publisher_name: SEANOE (SEA scieNtific Open data Edition) + publisher_url: https://www.seanoe.org/data/00360/47077/ + license: freely available + + Reference + --------- + Ollitrault Michel, Rannou Philippe, Brion Emilie, Cabanes Cecile, Piron Anne, Reverdin Gilles, + Kolodziejczyk Nicolas (2022). ANDRO: An Argo-based deep displacement dataset. + SEANOE. https://doi.org/10.17882/47077 + """ + clouddrift_path = ( + os.path.expanduser("~/.clouddrift") + if not os.getenv("CLOUDDRIFT_PATH") + else os.getenv("CLOUDDRIFT_PATH") + ) + local_file = f"{clouddrift_path}/data/andro.nc" + if not os.path.exists(local_file): + print(f"{local_file} not found; download from upstream repository.") + ds = adapters.andro.to_xarray() + os.makedirs(os.path.dirname(local_file), exist_ok=True) + ds.to_netcdf(local_file) + else: + ds = xr.open_dataset(local_file, decode_times=decode_times) + return ds
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/kinematics.html b/_modules/clouddrift/kinematics.html new file mode 100644 index 00000000..1e82f785 --- /dev/null +++ b/_modules/clouddrift/kinematics.html @@ -0,0 +1,1418 @@ + + + + + + + + + + clouddrift.kinematics — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.kinematics

+"""
+Functions for kinematic computations.
+"""
+
+import numpy as np
+import pandas as pd
+from typing import Optional, Tuple, Union
+import xarray as xr
+from clouddrift.sphere import (
+    EARTH_RADIUS_METERS,
+    bearing,
+    cartesian_to_spherical,
+    cartesian_to_tangentplane,
+    coriolis_frequency,
+    distance,
+    position_from_distance_and_bearing,
+    recast_lon360,
+    spherical_to_cartesian,
+)
+from clouddrift.wavelet import morse_logspace_freq, morse_wavelet, wavelet_transform
+
+
+
+[docs] +def kinetic_energy( + u: Union[float, list, np.ndarray, xr.DataArray, pd.Series], + v: Optional[Union[float, list, np.ndarray, xr.DataArray, pd.Series]] = None, +) -> Union[float, np.ndarray, xr.DataArray]: + """Compute kinetic energy from zonal and meridional velocities. + + Parameters + ---------- + u : float or array-like + Zonal velocity. + v : float or array-like, optional. + Meridional velocity. If not provided, the flow is assumed one-dimensional + in time and defined by ``u``. + + Returns + ------- + ke : float or array-like + Kinetic energy. + + Examples + -------- + >>> import numpy as np + >>> from clouddrift.kinematics import kinetic_energy + >>> u = np.array([1., 2., 3., 4.]) + >>> v = np.array([1., 1., 1., 1.]) + >>> kinetic_energy(u, v) + array([1. , 2.5, 5. , 8.5]) + + >>> u = np.reshape(np.tile([1., 2., 3., 4.], 2), (2, 4)) + >>> v = np.reshape(np.tile([1., 1., 1., 1.], 2), (2, 4)) + >>> kinetic_energy(u, v) + array([[1. , 2.5, 5. , 8.5], + [1. , 2.5, 5. , 8.5]]) + """ + if v is None: + v = np.zeros_like(u) + ke = (u**2 + v**2) / 2 + return ke
+ + + +
+[docs] +def inertial_oscillation_from_position( + longitude: np.ndarray, + latitude: np.ndarray, + relative_bandwidth: Optional[float] = None, + wavelet_duration: Optional[float] = None, + time_step: Optional[float] = 3600.0, + relative_vorticity: Optional[Union[float, np.ndarray]] = 0.0, +) -> np.ndarray: + """Extract inertial oscillations from consecutive geographical positions. + + This function acts by performing a time-frequency analysis of horizontal displacements + with analytic Morse wavelets. It extracts the portion of the wavelet transform signal + that follows the inertial frequency (opposite of Coriolis frequency) as a function of time, + potentially shifted in frequency by a measure of relative vorticity. The result is a pair + of zonal and meridional relative displacements in meters. + + This function is equivalent to a bandpass filtering of the horizontal displacements. The characteristics + of the filter are defined by the relative bandwidth of the wavelet transform or by the duration of the wavelet, + see the parameters below. + + Parameters + ---------- + longitude : array-like + Longitude sequence. Unidimensional array input. + latitude : array-like + Latitude sequence. Unidimensional array input. + relative_bandwidth : float, optional + Bandwidth of the frequency-domain equivalent filter for the extraction of the inertial + oscillations; a number less or equal to one which is a fraction of the inertial frequency. + A value of 0.1 leads to a bandpass filter equivalent of +/- 10 percent of the inertial frequency. + wavelet_duration : float, optional + Duration of the wavelet, or inverse of the relative bandwidth, which can be passed instead of the + relative bandwidth. + time_step : float, optional + The constant time interval between data points in seconds. Default is 3600. + relative_vorticity: Optional, float or array-like + Relative vorticity adding to the local Coriolis frequency. If "f" is the Coriolis + frequency then "f" + `relative_vorticity` will be the effective Coriolis frequency as defined by Kunze (1985). + Positive values correspond to cyclonic vorticity, irrespectively of the latitudes of the data + points. + + Returns + ------- + xhat : array-like + Zonal relative displacement in meters from inertial oscillations. + yhat : array-like + Meridional relative displacement in meters from inertial oscillations. + + Examples + -------- + To extract displacements from inertial oscillations from sequences of longitude + and latitude values, equivalent to bandpass around 20 percent of the local inertial frequency: + + >>> xhat, yhat = inertial_oscillation_from_position(longitude, latitude, relative_bandwidth=0.2) + + The same result can be obtained by specifying the wavelet duration instead of the relative bandwidth: + + >>> xhat, yhat = inertial_oscillation_from_position(longitude, latitude, wavelet_duration=5) + + Next, the residual positions from the inertial displacements can be obtained with another function: + + >>> residual_longitudes, residual_latitudes = residual_position_from_displacement(longitude, latitude, xhat, yhat) + + Raises + ------ + ValueError + If longitude and latitude arrays do not have the same shape. + If relative_vorticity is an array and does not have the same shape as longitude and latitude. + If time_step is not a float. + If both relative_bandwidth and wavelet_duration are specified. + If neither relative_bandwidth nor wavelet_duration are specified. + If the absolute value of relative_bandwidth is not in the range (0,1]. + If the wavelet duration is not greater than or equal to 1. + + See Also + -------- + :func:`residual_position_from_displacement`, `wavelet_transform`, `morse_wavelet` + + """ + if longitude.shape != latitude.shape: + raise ValueError("longitude and latitude arrays must have the same shape.") + + if relative_bandwidth is not None and wavelet_duration is not None: + raise ValueError( + "Only one of 'relative_bandwidth' and 'wavelet_duration' can be specified" + ) + elif relative_bandwidth is None and wavelet_duration is None: + raise ValueError( + "One of 'relative_bandwidth' and 'wavelet_duration' must be specified" + ) + + # length of data sequence + data_length = longitude.shape[0] + + if isinstance(relative_vorticity, float): + relative_vorticity = np.full_like(longitude, relative_vorticity) + elif isinstance(relative_vorticity, np.ndarray): + if not relative_vorticity.shape == longitude.shape: + raise ValueError( + "relative_vorticity must be a float or the same shape as longitude and latitude." + ) + if relative_bandwidth is not None: + if not 0 < np.abs(relative_bandwidth) <= 1: + raise ValueError("relative_bandwidth must be in the (0, 1]) range") + + if wavelet_duration is not None: + if not wavelet_duration >= 1: + raise ValueError("wavelet_duration must be greater than or equal to 1") + + # wavelet parameters are gamma and beta + gamma = 3 # symmetric wavelet + density = 16 # results relative insensitive to this parameter + # calculate beta from wavelet duration or from relative bandwidth + if relative_bandwidth is not None: + wavelet_duration = 1 / np.abs(relative_bandwidth) # P parameter + beta = wavelet_duration**2 / gamma + + if isinstance(latitude, xr.DataArray): + latitude = latitude.to_numpy() + if isinstance(longitude, xr.DataArray): + longitude = longitude.to_numpy() + + # Instantaneous absolute frequency of oscillations along trajectory in radian per second + cor_freq = np.abs( + coriolis_frequency(latitude) + relative_vorticity * np.sign(latitude) + ) + cor_freq_max = np.max(cor_freq * 1.05) + cor_freq_min = np.max( + [np.min(cor_freq * 0.95), 2 * np.pi / (time_step * data_length)] + ) + + # logarithmically distributed frequencies for wavelet analysis + radian_frequency = morse_logspace_freq( + gamma, + beta, + data_length, + (0.05, cor_freq_max * time_step), + (5, cor_freq_min * time_step), + density, + ) # frequencies in radian per unit time + + # wavelet transform on a sphere + # unwrap longitude recasted in [0,360) + longitude_unwrapped = np.unwrap(recast_lon360(longitude), period=360) + + # convert lat/lon to Cartesian coordinates x, y , z + x, y, z = spherical_to_cartesian(longitude_unwrapped, latitude) + + # wavelet transform of x, y, z + wavelet, _ = morse_wavelet(data_length, gamma, beta, radian_frequency) + wx = wavelet_transform(x, wavelet, boundary="mirror") + wy = wavelet_transform(y, wavelet, boundary="mirror") + wz = wavelet_transform(z, wavelet, boundary="mirror") + + longitude_new, latitude_new = cartesian_to_spherical( + x - np.real(wx), y - np.real(wy), z - np.real(wz) + ) + + # convert transforms to horizontal displacements on tangent plane + wxh, wyh = cartesian_to_tangentplane(wx, wy, wz, longitude_new, latitude_new) + + # rotary wavelet transforms to select inertial component; need to divide by sqrt(2) + wp = (wxh + 1j * wyh) / np.sqrt(2) + wn = (wxh - 1j * wyh) / np.sqrt(2) + + # find the values of radian_frequency/dt that most closely match cor_freq + frequency_bins = [ + np.argmin(np.abs(cor_freq[i] - radian_frequency / time_step)) + for i in range(data_length) + ] + + # get the transform at the inertial and "anti-inertial" frequencies + # extract the values of wp and wn at the calculated index as a function of time + # positive is anticyclonic (inertial) in the southern hemisphere + # negative is anticyclonic (inertial) in the northern hemisphere + wp = wp[frequency_bins, np.arange(0, data_length)] + wn = wn[frequency_bins, np.arange(0, data_length)] + + # indices of northern latitude points + north = latitude >= 0 + + # initialize the zonal and meridional components of inertial displacements + wxhat = np.zeros_like(latitude, dtype=np.complex64) + wyhat = np.zeros_like(latitude, dtype=np.complex64) + # equations are x+ = 0.5*(z+ + z-) and y+ = -0.5*1j*(z+ - z-) + if any(north): + wxhat[north] = wn[north] / np.sqrt(2) + wyhat[north] = 1j * wn[north] / np.sqrt(2) + if any(~north): + wxhat[~north] = wp[~north] / np.sqrt(2) + wyhat[~north] = -1j * wp[~north] / np.sqrt(2) + + # inertial displacement in meters + xhat = np.real(wxhat) + yhat = np.real(wyhat) + + return xhat, yhat
+ + + +
+[docs] +def residual_position_from_displacement( + longitude: Union[float, np.ndarray, xr.DataArray], + latitude: Union[float, np.ndarray, xr.DataArray], + x: Union[float, np.ndarray], + y: Union[float, np.ndarray], +) -> Union[Tuple[float], Tuple[np.ndarray]]: + """ + Return residual longitudes and latitudes along a trajectory on the spherical Earth + after correcting for zonal and meridional displacements x and y in meters. + + This is applicable as an example when one seeks to correct a trajectory for + horizontal oscillations due to inertial motions, tides, etc. + + Parameters + ---------- + longitude : float or array-like + Longitude in degrees. + latitude : float or array-like + Latitude in degrees. + x : float or np.ndarray + Zonal displacement in meters. + y : float or np.ndarray + Meridional displacement in meters. + + Returns + ------- + residual_longitude : float or np.ndarray + Residual longitude after correcting for zonal displacement, in degrees. + residual_latitude : float or np.ndarray + Residual latitude after correcting for meridional displacement, in degrees. + + Examples + -------- + Obtain the new geographical position for a displacement of 1/360-th of the + circumference of the Earth from original position (longitude,latitude) = (1,0): + + >>> from clouddrift.sphere import EARTH_RADIUS_METERS + >>> residual_position_from_displacement(1,0,2 * np.pi * EARTH_RADIUS_METERS / 360,0) + (0.0, 0.0) + """ + # convert to numpy arrays to insure consistent outputs + if isinstance(longitude, xr.DataArray): + longitude = longitude.to_numpy() + if isinstance(latitude, xr.DataArray): + latitude = latitude.to_numpy() + + latitudehat = 180 / np.pi * y / EARTH_RADIUS_METERS + longitudehat = ( + 180 / np.pi * x / (EARTH_RADIUS_METERS * np.cos(np.radians(latitude))) + ) + + residual_latitude = latitude - latitudehat + residual_longitude = recast_lon360( + np.degrees(np.angle(np.exp(1j * np.radians(longitude - longitudehat)))) + ) + + return residual_longitude, residual_latitude
+ + + +
+[docs] +def position_from_velocity( + u: np.ndarray, + v: np.ndarray, + time: np.ndarray, + x_origin: float, + y_origin: float, + coord_system: Optional[str] = "spherical", + integration_scheme: Optional[str] = "forward", + time_axis: Optional[int] = -1, +) -> Tuple[np.ndarray, np.ndarray]: + """Compute positions from arrays of velocities and time and a pair of origin + coordinates. + + The units of the result are degrees if ``coord_system == "spherical"`` (default). + If ``coord_system == "cartesian"``, the units of the result are equal to the + units of the input velocities multiplied by the units of the input time. + For example, if the input velocities are in meters per second and the input + time is in seconds, the units of the result will be meters. + + Integration scheme can take one of three values: + + 1. "forward" (default): integration from x[i] to x[i+1] is performed + using the velocity at x[i]. + 2. "backward": integration from x[i] to x[i+1] is performed using the + velocity at x[i+1]. + 3. "centered": integration from x[i] to x[i+1] is performed using the + arithmetic average of the velocities at x[i] and x[i+1]. Note that + this method introduces some error due to the averaging. + + u, v, and time can be multi-dimensional arrays. If the time axis, along + which the finite differencing is performed, is not the last one (i.e. + x.shape[-1]), use the ``time_axis`` optional argument to specify along which + axis should the differencing be done. ``x``, ``y``, and ``time`` must have + the same shape. + + This function will not do any special handling of longitude ranges. If the + integrated trajectory crosses the antimeridian (dateline) in either direction, the + longitude values will not be adjusted to stay in any specific range such + as [-180, 180] or [0, 360]. If you need your longitudes to be in a specific + range, recast the resulting longitude from this function using the function + :func:`clouddrift.sphere.recast_lon`. + + Parameters + ---------- + u : np.ndarray + An array of eastward velocities. + v : np.ndarray + An array of northward velocities. + time : np.ndarray + An array of time values. + x_origin : float + Origin x-coordinate or origin longitude. + y_origin : float + Origin y-coordinate or origin latitude. + coord_system : str, optional + The coordinate system of the input. Can be "spherical" or "cartesian". + Default is "spherical". + integration_scheme : str, optional + The difference scheme to use for computing the position. Can be + "forward" or "backward". Default is "forward". + time_axis : int, optional + The axis of the time array. Default is -1, which corresponds to the + last axis. + + Returns + ------- + x : np.ndarray + An array of zonal displacements or longitudes. + y : np.ndarray + An array of meridional displacements or latitudes. + + Examples + -------- + + Simple integration on a plane, using the forward scheme by default: + + >>> import numpy as np + >>> from clouddrift.analysis import position_from_velocity + >>> u = np.array([1., 2., 3., 4.]) + >>> v = np.array([1., 1., 1., 1.]) + >>> time = np.array([0., 1., 2., 3.]) + >>> x, y = position_from_velocity(u, v, time, 0, 0, coord_system="cartesian") + >>> x + array([0., 1., 3., 6.]) + >>> y + array([0., 1., 2., 3.]) + + As above, but using centered scheme: + + >>> x, y = position_from_velocity(u, v, time, 0, 0, coord_system="cartesian", integration_scheme="centered") + >>> x + array([0., 1.5, 4., 7.5]) + >>> y + array([0., 1., 2., 3.]) + + Simple integration on a sphere (default): + + >>> u = np.array([1., 2., 3., 4.]) + >>> v = np.array([1., 1., 1., 1.]) + >>> time = np.array([0., 1., 2., 3.]) * 1e5 + >>> x, y = position_from_velocity(u, v, time, 0, 0) + >>> x + array([0. , 0.89839411, 2.69584476, 5.39367518]) + >>> y + array([0. , 0.89828369, 1.79601515, 2.69201609]) + + Integrating across the antimeridian (dateline) by default does not + recast the resulting longitude: + + >>> u = np.array([1., 1.]) + >>> v = np.array([0., 0.]) + >>> time = np.array([0, 1e5]) + >>> x, y = position_from_velocity(u, v, time, 179.5, 0) + >>> x + array([179.5 , 180.3983205]) + >>> y + array([0., 0.]) + + Use the ``clouddrift.sphere.recast_lon`` function to recast the longitudes + to the desired range: + + >>> from clouddrift.sphere import recast_lon + >>> recast_lon(x, -180) + array([ 179.5 , -179.6016795]) + + Raises + ------ + ValueError + If u and v do not have the same shape. + If the time axis is outside of the valid range ([-1, N-1]). + If lengths of x, y, and time along time_axis are not equal. + If the input coordinate system is not "spherical" or "cartesian". + If the input integration scheme is not "forward", "backward", or "centered" + + See Also + -------- + :func:`velocity_from_position` + """ + # Velocity arrays must have the same shape. + # Although the exception would be raised further down in the function, + # we do the check here for a clearer error message. + if not u.shape == v.shape: + raise ValueError("u and v must have the same shape.") + + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(u.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(x.shape) - 1}])." + ) + + # Input arrays must have the same length along the time axis. + if not u.shape[time_axis] == v.shape[time_axis] == time.shape[time_axis]: + raise ValueError( + f"u, v, and time must have the same length along the time axis " + f"({time_axis})." + ) + + # Swap axes so that we can differentiate along the last axis. + # This is a syntax convenience rather than memory access optimization: + # np.swapaxes returns a view of the array, not a copy, if the input is a + # NumPy array. Otherwise, it returns a copy. For readability, introduce new + # variable names so that we can more easily differentiate between the + # original arrays and those with swapped axes. + u_ = np.swapaxes(u, time_axis, -1) + v_ = np.swapaxes(v, time_axis, -1) + time_ = np.swapaxes(time, time_axis, -1) + + x = np.zeros(u_.shape, dtype=u.dtype) + y = np.zeros(v_.shape, dtype=v.dtype) + + dt = np.diff(time_) + + if integration_scheme.lower() == "forward": + x[..., 1:] = np.cumsum(u_[..., :-1] * dt, axis=-1) + y[..., 1:] = np.cumsum(v_[..., :-1] * dt, axis=-1) + elif integration_scheme.lower() == "backward": + x[..., 1:] = np.cumsum(u_[1:] * dt, axis=-1) + y[..., 1:] = np.cumsum(v_[1:] * dt, axis=-1) + elif integration_scheme.lower() == "centered": + x[..., 1:] = np.cumsum(0.5 * (u_[..., :-1] + u_[..., 1:]) * dt, axis=-1) + y[..., 1:] = np.cumsum(0.5 * (v_[..., :-1] + v_[..., 1:]) * dt, axis=-1) + else: + raise ValueError( + 'integration_scheme must be "forward", "backward", or "centered".' + ) + + if coord_system.lower() == "cartesian": + x += x_origin + y += y_origin + elif coord_system.lower() == "spherical": + dx = np.diff(x) + dy = np.diff(y) + distances = np.sqrt(dx**2 + dy**2) + bearings = np.arctan2(dy, dx) + x[..., 0], y[..., 0] = x_origin, y_origin + for n in range(distances.shape[-1]): + x[..., n + 1], y[..., n + 1] = position_from_distance_and_bearing( + x[..., n], y[..., n], distances[..., n], bearings[..., n] + ) + else: + raise ValueError('coord_system must be "spherical" or "cartesian".') + + return np.swapaxes(x, time_axis, -1), np.swapaxes(y, time_axis, -1)
+ + + +
+[docs] +def velocity_from_position( + x: np.ndarray, + y: np.ndarray, + time: np.ndarray, + coord_system: Optional[str] = "spherical", + difference_scheme: Optional[str] = "forward", + time_axis: Optional[int] = -1, +) -> Tuple[xr.DataArray, xr.DataArray]: + """Compute velocity from arrays of positions and time. + + x and y can be provided as longitude and latitude in degrees if + coord_system == "spherical" (default), or as easting and northing if + coord_system == "cartesian". + + The units of the result are meters per unit of time if + coord_system == "spherical". For example, if the time is provided in the + units of seconds, the resulting velocity is in the units of meters per + second. Otherwise, if coord_system == "cartesian", the units of the + resulting velocity correspond to the units of the input. For example, + if zonal and meridional displacements are in the units of kilometers and + time is in the units of hours, the resulting velocity is in the units of + kilometers per hour. + + x, y, and time can be multi-dimensional arrays. If the time axis, along + which the finite differencing is performed, is not the last one (i.e. + x.shape[-1]), use the time_axis optional argument to specify along which + axis should the differencing be done. x, y, and time must have the same + shape. + + Difference scheme can take one of three values: + + #. "forward" (default): finite difference is evaluated as ``dx[i] = dx[i+1] - dx[i]``; + #. "backward": finite difference is evaluated as ``dx[i] = dx[i] - dx[i-1]``; + #. "centered": finite difference is evaluated as ``dx[i] = (dx[i+1] - dx[i-1]) / 2``. + + Forward and backward schemes are effectively the same except that the + position at which the velocity is evaluated is shifted one element down in + the backward scheme relative to the forward scheme. In the case of a + forward or backward difference scheme, the last or first element of the + velocity, respectively, is extrapolated from its neighboring point. In the + case of a centered difference scheme, the start and end boundary points are + evaluated using the forward and backward difference scheme, respectively. + + Parameters + ---------- + x : array_like + An N-d array of x-positions (longitude in degrees or zonal displacement in any unit) + y : array_like + An N-d array of y-positions (latitude in degrees or meridional displacement in any unit) + time : array_like + An N-d array of times as floating point values (in any unit) + coord_system : str, optional + Coordinate system that x and y arrays are in; possible values are "spherical" (default) or "cartesian". + difference_scheme : str, optional + Difference scheme to use; possible values are "forward", "backward", and "centered". + time_axis : int, optional + Axis along which to differentiate (default is -1) + + Returns + ------- + u : np.ndarray + Zonal velocity + v : np.ndarray + Meridional velocity + + Raises + ------ + ValueError + If x and y do not have the same shape. + If time_axis is outside of the valid range. + If lengths of x, y, and time along time_axis are not equal. + If coord_system is not "spherical" or "cartesian". + If difference_scheme is not "forward", "backward", or "centered". + + Examples + -------- + Simple integration on a sphere, using the forward scheme by default: + + >>> import numpy as np + >>> from clouddrift.kinematics import velocity_from_position + >>> lon = np.array([0., 1., 3., 6.]) + >>> lat = np.array([0., 1., 2., 3.]) + >>> time = np.array([0., 1., 2., 3.]) * 1e5 + >>> u, v = velocity_from_position(lon, lat, time) + >>> u + array([1.11307541, 2.22513331, 3.33515501, 3.33515501]) + >>> v + array([1.11324496, 1.11409224, 1.1167442 , 1.1167442 ]) + + Integration on a Cartesian plane, using the forward scheme by default: + + >>> x = np.array([0., 1., 3., 6.]) + >>> y = np.array([0., 1., 2., 3.]) + >>> time = np.array([0., 1., 2., 3.]) + >>> u, v = velocity_from_position(x, y, time, coord_system="cartesian") + >>> u + array([1., 2., 3., 3.]) + >>> v + array([1., 1., 1., 1.]) + + See Also + -------- + :func:`position_from_velocity` + """ + + # Position arrays must have the same shape. + # Although the exception would be raised further down in the function, + # we do the check here for a clearer error message. + if not x.shape == y.shape: + raise ValueError("x and y arrays must have the same shape.") + + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(x.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(x.shape) - 1}])." + ) + + # Input arrays must have the same length along the time axis. + if not x.shape[time_axis] == y.shape[time_axis] == time.shape[time_axis]: + raise ValueError( + f"x, y, and time must have the same length along the time axis " + f"({time_axis})." + ) + + # Swap axes so that we can differentiate along the last axis. + # This is a syntax convenience rather than memory access optimization: + # np.swapaxes returns a view of the array, not a copy, if the input is a + # NumPy array. Otherwise, it returns a copy. For readability, introduce new + # variable names so that we can more easily differentiate between the + # original arrays and those with swapped axes. + x_ = np.swapaxes(x, time_axis, -1) + y_ = np.swapaxes(y, time_axis, -1) + time_ = np.swapaxes(time, time_axis, -1) + + dx = np.empty(x_.shape) + dy = np.empty(y_.shape) + dt = np.empty(time_.shape) + + # Compute dx, dy, and dt + if difference_scheme == "forward": + # All values except the ending boundary value are computed using the + # 1st order forward differencing. The ending boundary value is + # computed using the 1st order backward difference. + + # Time + dt[..., :-1] = np.diff(time_) + dt[..., -1] = dt[..., -2] + + # Space + if coord_system == "cartesian": + dx[..., :-1] = np.diff(x_) + dx[..., -1] = dx[..., -2] + dy[..., :-1] = np.diff(y_) + dy[..., -1] = dy[..., -2] + + elif coord_system == "spherical": + distances = distance(x_[..., :-1], y_[..., :-1], x_[..., 1:], y_[..., 1:]) + bearings = bearing(x_[..., :-1], y_[..., :-1], x_[..., 1:], y_[..., 1:]) + dx[..., :-1] = distances * np.cos(bearings) + dx[..., -1] = dx[..., -2] + dy[..., :-1] = distances * np.sin(bearings) + dy[..., -1] = dy[..., -2] + + else: + raise ValueError('coord_system must be "spherical" or "cartesian".') + + elif difference_scheme == "backward": + # All values except the starting boundary value are computed using the + # 1st order backward differencing. The starting boundary value is + # computed using the 1st order forward difference. + + # Time + dt[..., 1:] = np.diff(time_) + dt[..., 0] = dt[..., 1] + + # Space + if coord_system == "cartesian": + dx[..., 1:] = np.diff(x_) + dx[..., 0] = dx[..., 1] + dy[..., 1:] = np.diff(y_) + dy[..., 0] = dy[..., 1] + + elif coord_system == "spherical": + distances = distance(x_[..., :-1], y_[..., :-1], x_[..., 1:], y_[..., 1:]) + bearings = bearing(x_[..., :-1], y_[..., :-1], x_[..., 1:], y_[..., 1:]) + dx[..., 1:] = distances * np.cos(bearings) + dx[..., 0] = dx[..., 1] + dy[..., 1:] = distances * np.sin(bearings) + dy[..., 0] = dy[..., 1] + + else: + raise ValueError('coord_system must be "spherical" or "cartesian".') + + elif difference_scheme == "centered": + # Inner values are computed using the 2nd order centered differencing. + # The start and end boundary values are computed using the 1st order + # forward and backward differencing, respectively. + + # Time + dt[..., 1:-1] = (time_[..., 2:] - time_[..., :-2]) / 2 + dt[..., 0] = time_[..., 1] - time_[..., 0] + dt[..., -1] = time_[..., -1] - time_[..., -2] + + # Space + if coord_system == "cartesian": + dx[..., 1:-1] = (x_[..., 2:] - x_[..., :-2]) / 2 + dx[..., 0] = x_[..., 1] - x_[..., 0] + dx[..., -1] = x_[..., -1] - x_[..., -2] + dy[..., 1:-1] = (y_[..., 2:] - y_[..., :-2]) / 2 + dy[..., 0] = y_[..., 1] - y_[..., 0] + dy[..., -1] = y_[..., -1] - y_[..., -2] + + elif coord_system == "spherical": + # Inner values + y1 = (y_[..., :-2] + y_[..., 1:-1]) / 2 + x1 = (x_[..., :-2] + x_[..., 1:-1]) / 2 + y2 = (y_[..., 2:] + y_[..., 1:-1]) / 2 + x2 = (x_[..., 2:] + x_[..., 1:-1]) / 2 + distances = distance(x1, y1, x2, y2) + bearings = bearing(x1, y1, x2, y2) + dx[..., 1:-1] = distances * np.cos(bearings) + dy[..., 1:-1] = distances * np.sin(bearings) + + # Boundary values + distance1 = distance(x_[..., 0], y_[..., 0], x_[..., 1], y_[..., 1]) + bearing1 = bearing(x_[..., 0], y_[..., 0], x_[..., 1], y_[..., 1]) + dx[..., 0] = distance1 * np.cos(bearing1) + dy[..., 0] = distance1 * np.sin(bearing1) + distance2 = distance(x_[..., -2], y_[..., -2], x_[..., -1], y_[..., -1]) + bearing2 = bearing(x_[..., -2], y_[..., -2], x_[..., -1], y_[..., -1]) + dx[..., -1] = distance2 * np.cos(bearing2) + dy[..., -1] = distance2 * np.sin(bearing2) + + else: + raise ValueError('coord_system must be "spherical" or "cartesian".') + + else: + raise ValueError( + 'difference_scheme must be "forward", "backward", or "centered".' + ) + + # This should avoid an array copy when returning the result + dx /= dt + dy /= dt + + return np.swapaxes(dx, time_axis, -1), np.swapaxes(dy, time_axis, -1)
+ + + +
+[docs] +def spin( + u: np.ndarray, + v: np.ndarray, + time: np.ndarray, + difference_scheme: Optional[str] = "forward", + time_axis: Optional[int] = -1, +) -> Union[float, np.ndarray]: + """Compute spin continuously from velocities and times. + + Spin is traditionally (Sawford, 1999; Veneziani et al., 2005) defined as + (<u'dv' - v'du'>) / (2 dt EKE) where u' and v' are eddy-perturbations of the + velocity field, EKE is eddy kinetic energy, dt is the time step, and du' and + dv' are velocity component increments during dt, and < > denotes ensemble + average. + + To allow computing spin based on full velocity fields, this function does + not do any demeaning of the velocity fields. If you need the spin based on + velocity anomalies, ensure to demean the velocity fields before passing + them to this function. This function also returns instantaneous spin values, + so the rank of the result is not reduced relative to the input. + + ``u``, ``v``, and ``time`` can be multi-dimensional arrays. If the time + axis, along which the finite differencing is performed, is not the last one + (i.e. ``u.shape[-1]``), use the time_axis optional argument to specify along + which the spin should be calculated. u, v, and time must either have the + same shape, or time must be a 1-d array with the same length as + ``u.shape[time_axis]``. + + Difference scheme can be one of three values: + + 1. "forward" (default): finite difference is evaluated as ``dx[i] = dx[i+1] - dx[i]``; + 2. "backward": finite difference is evaluated as ``dx[i] = dx[i] - dx[i-1]``; + 3. "centered": finite difference is evaluated as ``dx[i] = (dx[i+1] - dx[i-1]) / 2``. + + Forward and backward schemes are effectively the same except that the + position at which the velocity is evaluated is shifted one element down in + the backward scheme relative to the forward scheme. In the case of a + forward or backward difference scheme, the last or first element of the + velocity, respectively, is extrapolated from its neighboring point. In the + case of a centered difference scheme, the start and end boundary points are + evaluated using the forward and backward difference scheme, respectively. + + Parameters + ---------- + u : np.ndarray + Zonal velocity + v : np.ndarray + Meridional velocity + time : array-like + Time + difference_scheme : str, optional + Difference scheme to use; possible values are "forward", "backward", and "centered". + time_axis : int, optional + Axis along which the time varies (default is -1) + + Returns + ------- + s : float or np.ndarray + Spin + + Raises + ------ + ValueError + If u and v do not have the same shape. + If the time axis is outside of the valid range ([-1, N-1]). + If lengths of u, v, and time along time_axis are not equal. + If difference_scheme is not "forward", "backward", or "centered". + + Examples + -------- + >>> from clouddrift.kinematics import spin + >>> import numpy as np + >>> u = np.array([1., 2., -1., 4.]) + >>> v = np.array([1., 3., -2., 1.]) + >>> time = np.array([0., 1., 2., 3.]) + >>> spin(u, v, time) + array([ 0.5 , -0.07692308, 1.4 , 0.41176471]) + + Use ``difference_scheme`` to specify an alternative finite difference + scheme for the velocity differences: + + >>> spin(u, v, time, difference_scheme="centered") + array([0.5 , 0. , 0.6 , 0.41176471]) + >>> spin(u, v, time, difference_scheme="backward") + array([ 0.5 , 0.07692308, -0.2 , 0.41176471]) + + References + ---------- + * Sawford, B.L., 1999. Rotation of trajectories in Lagrangian stochastic models of turbulent dispersion. Boundary-layer meteorology, 93, pp.411-424. https://doi.org/10.1023/A:1002114132715 + * Veneziani, M., Griffa, A., Garraffo, Z.D. and Chassignet, E.P., 2005. Lagrangian spin parameter and coherent structures from trajectories released in a high-resolution ocean model. Journal of Marine Research, 63(4), pp.753-788. https://elischolar.library.yale.edu/journal_of_marine_research/100/ + """ + if not u.shape == v.shape: + raise ValueError("u and v arrays must have the same shape.") + + if not time.shape == u.shape: + if not time.size == u.shape[time_axis]: + raise ValueError("time must have the same length as u along time_axis.") + + # axis must be in valid range + if time_axis < -1 or time_axis > len(u.shape) - 1: + raise ValueError( + f"axis ({time_axis}) is outside of the valid range ([-1," + f" {len(u.shape) - 1}])." + ) + + # Swap axes so that we can differentiate along the last axis. + # This is a syntax convenience rather than memory access optimization: + # np.swapaxes returns a view of the array, not a copy, if the input is a + # NumPy array. Otherwise, it returns a copy. + u = np.swapaxes(u, time_axis, -1) + v = np.swapaxes(v, time_axis, -1) + time = np.swapaxes(time, time_axis, -1) + + if not time.shape == u.shape: + # time is 1-d array; broadcast to u.shape. + time = np.broadcast_to(time, u.shape) + + du = np.empty(u.shape) + dv = np.empty(v.shape) + dt = np.empty(time.shape) + + if difference_scheme == "forward": + du[..., :-1] = np.diff(u) + du[..., -1] = du[..., -2] + dv[..., :-1] = np.diff(v) + dv[..., -1] = dv[..., -2] + dt[..., :-1] = np.diff(time) + dt[..., -1] = dt[..., -2] + elif difference_scheme == "backward": + du[..., 1:] = np.diff(u) + du[..., 0] = du[..., 1] + dv[..., 1:] = np.diff(v) + dv[..., 0] = dv[..., 1] + dt[..., 1:] = np.diff(time) + dt[..., 0] = dt[..., 1] + elif difference_scheme == "centered": + du[..., 1:-1] = (u[..., 2:] - u[..., :-2]) / 2 + du[..., 0] = u[..., 1] - u[..., 0] + du[..., -1] = u[..., -1] - u[..., -2] + dv[..., 1:-1] = (v[..., 2:] - v[..., :-2]) / 2 + dv[..., 0] = v[..., 1] - v[..., 0] + dv[..., -1] = v[..., -1] - v[..., -2] + dt[..., 1:-1] = (time[..., 2:] - time[..., :-2]) / 2 + dt[..., 0] = time[..., 1] - time[..., 0] + dt[..., -1] = time[..., -1] - time[..., -2] + else: + raise ValueError( + 'difference_scheme must be "forward", "backward", or "centered".' + ) + + # Compute spin + s = (u * dv - v * du) / (2 * dt * kinetic_energy(u, v)) + + return np.swapaxes(s, time_axis, -1)
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/pairs.html b/_modules/clouddrift/pairs.html new file mode 100644 index 00000000..7126019f --- /dev/null +++ b/_modules/clouddrift/pairs.html @@ -0,0 +1,1007 @@ + + + + + + + + + + clouddrift.pairs — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.pairs

+"""
+Functions to analyze pairs of contiguous data segments.
+"""
+from clouddrift import ragged, sphere
+from concurrent.futures import as_completed, ThreadPoolExecutor
+import itertools
+import numpy as np
+import pandas as pd
+import xarray as xr
+from typing import List, Optional, Tuple, Union
+
+array_like = Union[list[float], np.ndarray[float], pd.Series, xr.DataArray]
+
+
+
+[docs] +def chance_pair( + lon1: array_like, + lat1: array_like, + lon2: array_like, + lat2: array_like, + time1: Optional[array_like] = None, + time2: Optional[array_like] = None, + space_distance: Optional[float] = 0, + time_distance: Optional[float] = 0, +): + """Given two sets of longitudes, latitudes, and times arrays, return in pairs + the indices of collocated data points that are within prescribed distances + in space and time. Also known as chance pairs. + + Parameters + ---------- + lon1 : array_like + First array of longitudes in degrees. + lat1 : array_like + First array of latitudes in degrees. + lon2 : array_like + Second array of longitudes in degrees. + lat2 : array_like + Second array of latitudes in degrees. + time1 : array_like, optional + First array of times. + time2 : array_like, optional + Second array of times. + space_distance : float, optional + Maximum allowable space distance in meters for a pair to qualify as chance pair. + If the separation is within this distance, the pair is considered to be + a chance pair. Default is 0, or no distance, i.e. the positions must be + exactly the same. + time_distance : float, optional + Maximum allowable time distance for a pair to qualify as chance pair. + If a separation is within this distance, and a space distance + condition is satisfied, the pair is considered a chance pair. Default is + 0, or no distance, i.e. the times must be exactly the same. + + Returns + ------- + indices1 : np.ndarray[int] + Indices within the first set of arrays that lead to chance pair. + indices2 : np.ndarray[int] + Indices within the second set of arrays that lead to chance pair. + + Examples + -------- + In the following example, we load the GLAD dataset, extract the first + two trajectories, and find between these the array indices that satisfy + the chance pair criteria of 6 km separation distance and no time separation: + + >>> from clouddrift.datasets import glad + >>> from clouddrift.pairs import chance_pair + >>> from clouddrift.ragged import unpack + >>> ds = glad() + >>> lon1 = unpack(ds["longitude"], ds["rowsize"], rows=0).pop() + >>> lat1 = unpack(ds["latitude"], ds["rowsize"], rows=0).pop() + >>> time1 = unpack(ds["time"], ds["rowsize"], rows=0).pop() + >>> lon2 = unpack(ds["longitude"], ds["rowsize"], rows=1).pop() + >>> lat2 = unpack(ds["latitude"], ds["rowsize"], rows=1).pop() + >>> time2 = unpack(ds["time"], ds["rowsize"], rows=1).pop() + >>> i1, i2 = chance_pair(lon1, lat1, lon2, lat2, time1, time2, 6000, np.timedelta64(0)) + >>> i1, i2 + (array([177, 180, 183, 186, 189, 192]), array([166, 169, 172, 175, 178, 181])) + + Check to ensure our collocation in space worked by calculating the distance + between the identified pairs: + + >>> sphere.distance(lon1[i1], lat1[i1], lon2[i2], lat2[i2]) + array([5967.4844, 5403.253 , 5116.9136, 5185.715 , 5467.8555, 5958.4917], + dtype=float32) + + Check the collocation in time: + + >>> time1[i1] - time2[i2] + <xarray.DataArray 'time' (obs: 6)> + array([0, 0, 0, 0, 0, 0], dtype='timedelta64[ns]') + Coordinates: + time (obs) datetime64[ns] 2012-07-21T21:30:00.524160 ... 2012-07-22T0... + Dimensions without coordinates: obs + + Raises + ------ + ValueError + If ``time1`` and ``time2`` are not both provided or both omitted. + """ + if (time1 is None and time2 is not None) or (time1 is not None and time2 is None): + raise ValueError( + "Both time1 and time2 must be provided or both must be omitted." + ) + + time_present = time1 is not None and time2 is not None + + if time_present: + # If time is provided, subset the trajectories to the overlapping times. + overlap1, overlap2 = pair_time_overlap(time1, time2, time_distance) + else: + # Otherwise, initialize the overlap indices to the full length of the + # trajectories. + overlap1 = np.arange(lon1.size) + overlap2 = np.arange(lon2.size) + + # Provided space distance is in meters, but here we convert it to degrees + # for the bounding box overlap check. + space_distance_degrees = np.degrees(space_distance / sphere.EARTH_RADIUS_METERS) + + # Compute the indices for each trajectory where the two trajectories' + # bounding boxes overlap. + bbox_overlap1, bbox_overlap2 = pair_bounding_box_overlap( + lon1[overlap1], + lat1[overlap1], + lon2[overlap2], + lat2[overlap2], + space_distance_degrees, + ) + + # bbox_overlap1 and bbox_overlap2 subset the overlap1 and overlap2 indices. + overlap1 = overlap1[bbox_overlap1] + overlap2 = overlap2[bbox_overlap2] + + # If time is present, first search for collocation in time. + if time_present: + time_separation = pair_time_distance(time1[overlap1], time2[overlap2]) + time_match2, time_match1 = np.where(time_separation <= time_distance) + overlap1 = overlap1[time_match1] + overlap2 = overlap2[time_match2] + + # Now search for collocation in space. + space_separation = pair_space_distance( + lon1[overlap1], lat1[overlap1], lon2[overlap2], lat2[overlap2] + ) + space_overlap = space_separation <= space_distance + if time_present: + time_separation = pair_time_distance(time1[overlap1], time2[overlap2]) + time_overlap = time_separation <= time_distance + match2, match1 = np.where(space_overlap & time_overlap) + else: + match2, match1 = np.where(space_overlap) + + overlap1 = overlap1[match1] + overlap2 = overlap2[match2] + + return overlap1, overlap2
+ + + +
+[docs] +def chance_pairs_from_ragged( + lon: array_like, + lat: array_like, + rowsize: array_like, + space_distance: Optional[float] = 0, + time: Optional[array_like] = None, + time_distance: Optional[float] = 0, +) -> List[Tuple[Tuple[int, int], Tuple[np.ndarray, np.ndarray]]]: + """Return all chance pairs of contiguous trajectories in a ragged array, + and their collocated points in space and (optionally) time, given input + ragged arrays of longitude, latitude, and (optionally) time, and chance + pair criteria as maximum allowable distances in space and time. + + If ``time`` and ``time_distance`` are omitted, the search will be done + only on the spatial criteria, and the result will not include the time + arrays. + + If ``time`` and ``time_distance`` are provided, the search will be done + on both the spatial and temporal criteria, and the result will include the + time arrays. + + Parameters + ---------- + lon : array_like + Array of longitudes in degrees. + lat : array_like + Array of latitudes in degrees. + rowsize : array_like + Array of rowsizes. + space_distance : float, optional + Maximum space distance in meters for the pair to qualify as chance pair. + If the separation is within this distance, the pair is considered to be + a chance pair. Default is 0, or no distance, i.e. the positions must be + exactly the same. + time : array_like, optional + Array of times. + time_distance : float, optional + Maximum time distance allowed for the pair to qualify as chance pair. + If the separation is within this distance, and the space distance + condition is satisfied, the pair is considered a chance pair. Default is + 0, or no distance, i.e. the times must be exactly the same. + + Returns + ------- + pairs : List[Tuple[Tuple[int, int], Tuple[np.ndarray, np.ndarray]]] + List of tuples, each tuple containing a Tuple of integer indices that + corresponds to the trajectory rows in the ragged array, indicating the + pair of trajectories that satisfy the chance pair criteria, and a Tuple + of arrays containing the indices of the collocated points for each + trajectory in the chance pair. + + Examples + -------- + In the following example, we load GLAD dataset as a ragged array dataset, + subset the result to retain the first five trajectories, and finally find + all trajectories that satisfy the chance pair criteria of 12 km separation + distance and no time separation, as well as the indices of the collocated + points for each pair. + + >>> from clouddrift.datasets import glad + >>> from clouddrift.pairs import chance_pairs_from_ragged + >>> from clouddrift.ragged import subset + >>> ds = subset(glad(), {"id": ["CARTHE_001", "CARTHE_002", "CARTHE_003", "CARTHE_004", "CARTHE_005"]}, id_var_name="id") + >>> pairs = chance_pairs_from_ragged( + ds["longitude"].values, + ds["latitude"].values, + ds["rowsize"].values, + space_distance=12000, + time=ds["time"].values, + time_distance=np.timedelta64(0) + ) + [((0, 1), + (array([153, 156, 159, 162, 165, 168, 171, 174, 177, 180, 183, 186, 189, + 192, 195, 198, 201, 204, 207, 210, 213, 216]), + array([142, 145, 148, 151, 154, 157, 160, 163, 166, 169, 172, 175, 178, + 181, 184, 187, 190, 193, 196, 199, 202, 205]))), + ((3, 4), + (array([141, 144, 147, 150, 153, 156, 159, 162, 165, 168, 171, 174, 177, + 180, 183]), + array([136, 139, 142, 145, 148, 151, 154, 157, 160, 163, 166, 169, 172, + 175, 178])))] + + The result above shows that 2 chance pairs were found. + + Raises + ------ + ValueError + If ``rowsize`` has fewer than two elements. + """ + if len(rowsize) < 2: + raise ValueError("rowsize must have at least two elements.") + pairs = list(itertools.combinations(np.arange(rowsize.size), 2)) + i = ragged.rowsize_to_index(rowsize) + results = [] + with ThreadPoolExecutor() as executor: + if time is None: + futures = [ + executor.submit( + chance_pair, + lon[i[j] : i[j + 1]], + lat[i[j] : i[j + 1]], + lon[i[k] : i[k + 1]], + lat[i[k] : i[k + 1]], + space_distance=space_distance, + ) + for j, k in pairs + ] + else: + futures = [ + executor.submit( + chance_pair, + lon[i[j] : i[j + 1]], + lat[i[j] : i[j + 1]], + lon[i[k] : i[k + 1]], + lat[i[k] : i[k + 1]], + time[i[j] : i[j + 1]], + time[i[k] : i[k + 1]], + space_distance, + time_distance, + ) + for j, k in pairs + ] + for future in as_completed(futures): + res = future.result() + # chance_pair function returns empty arrays if no chance criteria + # are satisfied. We only want to keep pairs that satisfy the + # criteria. chance_pair returns a tuple of arrays that are always + # the same size, so we only need to check the length of the first + # array. + if res[0].size > 0: + results.append((pairs[futures.index(future)], res)) + return results
+ + + +
+[docs] +def pair_bounding_box_overlap( + lon1: array_like, + lat1: array_like, + lon2: array_like, + lat2: array_like, + distance: Optional[float] = 0, +) -> Tuple[np.ndarray[bool], np.ndarray[bool]]: + """Given two arrays of longitudes and latitudes, return boolean masks for + their overlapping bounding boxes. + + Parameters + ---------- + lon1 : array_like + First array of longitudes in degrees. + lat1 : array_like + First array of latitudes in degrees. + lon2 : array_like + Second array of longitudes in degrees. + lat2 : array_like + Second array of latitudes in degrees. + distance : float, optional + Distance in degrees for the overlap. If the overlap is within this + distance, the bounding boxes are considered to overlap. Default is 0. + + Returns + ------- + overlap1 : np.ndarray[int] + Indices ``lon1`` and ``lat1`` where their bounding box overlaps with + that of ``lon2`` and ``lat2``. + overlap2 : np.ndarray[int] + Indices ``lon2`` and ``lat2`` where their bounding box overlaps with + that of ``lon1`` and ``lat1``. + + Examples + -------- + >>> lon1 = [0, 0, 1, 1] + >>> lat1 = [0, 0, 1, 1] + >>> lon2 = [1, 1, 2, 2] + >>> lat2 = [1, 1, 2, 2] + >>> pair_bounding_box_overlap(lon1, lat1, lon2, lat2, 0.5) + (array([2, 3]), array([0, 1])) + """ + # First get the bounding box of each trajectory. + # We unwrap the longitudes before computing min/max because we want to + # consider trajectories that cross the dateline. + lon1_min, lon1_max = np.min(np.unwrap(lon1, period=360)), np.max( + np.unwrap(lon1, period=360) + ) + lat1_min, lat1_max = np.min(lat1), np.max(lat1) + lon2_min, lon2_max = np.min(np.unwrap(lon2, period=360)), np.max( + np.unwrap(lon2, period=360) + ) + lat2_min, lat2_max = np.min(lat2), np.max(lat2) + + bounding_boxes_overlap = ( + (lon1_min <= lon2_max + distance) + & (lon1_max >= lon2_min - distance) + & (lat1_min <= lat2_max + distance) + & (lat1_max >= lat2_min - distance) + ) + + # Now check if the trajectories overlap within the bounding box. + if bounding_boxes_overlap: + overlap_start = ( + max(lon1_min, lon2_min) - distance, # West + max(lat1_min, lat2_min) - distance, # South + ) + overlap_end = ( + min(lon1_max, lon2_max) + distance, # East + min(lat1_max, lat2_max) + distance, # North + ) + overlap1 = ( + (lon1 >= overlap_start[0]) + & (lon1 <= overlap_end[0]) + & (lat1 >= overlap_start[1]) + & (lat1 <= overlap_end[1]) + ) + overlap2 = ( + (lon2 >= overlap_start[0]) + & (lon2 <= overlap_end[0]) + & (lat2 >= overlap_start[1]) + & (lat2 <= overlap_end[1]) + ) + return np.where(overlap1)[0], np.where(overlap2)[0] + else: + return np.array([], dtype=int), np.array([], dtype=int)
+ + + +
+[docs] +def pair_space_distance( + lon1: array_like, + lat1: array_like, + lon2: array_like, + lat2: array_like, +) -> np.ndarray[float]: + """Given two arrays of longitudes and latitudes, return the distance + on a sphere between all pairs of points. + + Parameters + ---------- + lon1 : array_like + First array of longitudes in degrees. + lat1 : array_like + First array of latitudes in degrees. + lon2 : array_like + Second array of longitudes in degrees. + lat2 : array_like + Second array of latitudes in degrees. + + Returns + ------- + distance : np.ndarray[float] + Array of distances between all pairs of points. + + Examples + -------- + >>> lon1 = [0, 0, 1, 1] + >>> lat1 = [0, 0, 1, 1] + >>> lon2 = [1, 1, 2, 2] + >>> lat2 = [1, 1, 2, 2] + >>> pair_space_distance(lon1, lat1, lon2, lat2) + array([[157424.62387233, 157424.62387233, 0. , + 0. ], + [157424.62387233, 157424.62387233, 0. , + 0. ], + [314825.26360286, 314825.26360286, 157400.64794884, + 157400.64794884], + [314825.26360286, 314825.26360286, 157400.64794884, + 157400.64794884]]) + """ + # Create longitude and latitude matrices from arrays to compute distance + lon1_2d, lon2_2d = np.meshgrid(lon1, lon2, copy=False) + lat1_2d, lat2_2d = np.meshgrid(lat1, lat2, copy=False) + + # Compute distance between all pairs of points + distance = sphere.distance(lon1_2d, lat1_2d, lon2_2d, lat2_2d) + + return distance
+ + + +
+[docs] +def pair_time_distance( + time1: array_like, + time2: array_like, +) -> np.ndarray[float]: + """Given two arrays of times (or any other monotonically increasing + quantity), return the temporal distance between all pairs of times. + + Parameters + ---------- + time1 : array_like + First array of times. + time2 : array_like + Second array of times. + + Returns + ------- + distance : np.ndarray[float] + Array of distances between all pairs of times. + + Examples + -------- + >>> time1 = np.arange(4) + >>> time2 = np.arange(2, 6) + >>> pair_time_distance(time1, time2) + array([[2, 1, 0, 1], + [3, 2, 1, 0], + [4, 3, 2, 1], + [5, 4, 3, 2]]) + """ + # Create time matrices from arrays to compute distance + time1_2d, time2_2d = np.meshgrid(time1, time2, copy=False) + + # Compute distance between all pairs of times + distance = np.abs(time1_2d - time2_2d) + + return distance
+ + + +
+[docs] +def pair_time_overlap( + time1: array_like, + time2: array_like, + distance: Optional[float] = 0, +) -> Tuple[np.ndarray[int], np.ndarray[int]]: + """Given two arrays of times (or any other monotonically increasing + quantity), return indices where the times are within a prescribed distance. + + Although higher-level array containers like xarray and pandas are supported + for input arrays, this function is an order of magnitude faster when passing + in numpy arrays. + + Parameters + ---------- + time1 : array_like + First array of times. + time2 : array_like + Second array of times. + distance : float + Maximum distance within which the values of ``time1`` and ``time2`` are + considered to overlap. Default is 0, or, the values must be exactly the + same. + + Returns + ------- + overlap1 : np.ndarray[int] + Indices of ``time1`` where its time overlaps with ``time2``. + overlap2 : np.ndarray[int] + Indices of ``time2`` where its time overlaps with ``time1``. + + Examples + -------- + >>> time1 = np.arange(4) + >>> time2 = np.arange(2, 6) + >>> pair_time_overlap(time1, time2) + (array([2, 3]), array([0, 1])) + + >>> pair_time_overlap(time1, time2, 1) + (array([1, 2, 3]), array([0, 1, 2])) + """ + time1_min, time1_max = np.min(time1), np.max(time1) + time2_min, time2_max = np.min(time2), np.max(time2) + overlap_start = max(time1_min, time2_min) - distance + overlap_end = min(time1_max, time2_max) + distance + overlap1 = np.where((time1 >= overlap_start) & (time1 <= overlap_end))[0] + overlap2 = np.where((time2 >= overlap_start) & (time2 <= overlap_end))[0] + return overlap1, overlap2
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/plotting.html b/_modules/clouddrift/plotting.html new file mode 100644 index 00000000..b89314c8 --- /dev/null +++ b/_modules/clouddrift/plotting.html @@ -0,0 +1,693 @@ + + + + + + + + + + clouddrift.plotting — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.plotting

+"""
+This module provides a function to easily and efficiently plot trajectories stored in a ragged array.
+"""
+
+from clouddrift.ragged import segment, rowsize_to_index
+import numpy as np
+import pandas as pd
+from typing import Optional, Union
+import xarray as xr
+import pandas as pd
+from typing import Optional, Union
+from clouddrift.ragged import segment, rowsize_to_index
+
+
+
+[docs] +def plot_ragged( + ax, + longitude: Union[list, np.ndarray, pd.Series, xr.DataArray], + latitude: Union[list, np.ndarray, pd.Series, xr.DataArray], + rowsize: Union[list, np.ndarray, pd.Series, xr.DataArray], + *args, + colors: Optional[Union[list, np.ndarray, pd.Series, xr.DataArray]] = None, + tolerance: Optional[Union[float, int]] = 180, + **kwargs, +): + """Plot trajectories from a ragged array dataset on a Matplotlib Axes + or a Cartopy GeoAxes object ``ax``. + + This function wraps Matplotlib's ``plot`` function (``plt.plot``) and + ``LineCollection`` (``matplotlib.collections``) to efficiently plot + trajectories from a ragged array dataset. + + Parameters + ---------- + ax: matplotlib.axes.Axes or cartopy.mpl.geoaxes.GeoAxes + Axis to plot on. + longitude : array-like + Longitude sequence. Unidimensional array input. + latitude : array-like + Latitude sequence. Unidimensional array input. + rowsize : list + List of integers specifying the number of data points in each row. + *args : tuple + Additional arguments to pass to ``ax.plot``. + colors : array-like + Colors to use for plotting. If colors is the same shape as longitude and latitude, + the trajectories are splitted into segments and each segment is colored according + to the corresponding color value. If colors is the same shape as rowsize, the + trajectories are uniformly colored according to the corresponding color value. + tolerance : float + Longitude tolerance gap between data points (in degrees) for segmenting trajectories. + For periodic domains, the tolerance parameter should be set to the maximum allowed gap + between data points. Defaults to 180. + **kwargs : dict + Additional keyword arguments to pass to ``ax.plot``. + + Returns + ------- + list of matplotlib.lines.Line2D or matplotlib.collections.LineCollection + The plotted lines or line collection. Can be used to set a colorbar + after plotting or extract information from the lines. + + Examples + -------- + + Plot the first 100 trajectories from the gdp1h dataset, assigning + a different color to each trajectory: + + >>> from clouddrift import datasets + >>> import matplotlib.pyplot as plt + >>> ds = datasets.gdp1h() + >>> ds = subset(ds, {"ID": ds.ID[:100].values}).load() + >>> fig = plt.figure() + >>> ax = fig.add_subplot(1, 1, 1) + + >>> plot_ragged( + >>> ax, + >>> ds.lon, + >>> ds.lat, + >>> ds.rowsize, + >>> colors=np.arange(len(ds.rowsize)) + >>> ) + + To plot the same trajectories, but assigning a different color to each + observation and specifying a colormap: + + >>> fig = plt.figure() + >>> ax = fig.add_subplot(1, 1, 1) + >>> time = [v.astype(np.int64) / 86400 / 1e9 for v in ds.time.values] + >>> lc = plot_ragged( + >>> ax, + >>> ds.lon, + >>> ds.lat, + >>> ds.rowsize, + >>> colors=np.floor(time), + >>> cmap="inferno" + >>> ) + >>> fig.colorbar(lc[0]) + >>> ax.set_xlim([-180, 180]) + >>> ax.set_ylim([-90, 90]) + + Finally, to plot the same trajectories, but using a cartopy + projection: + + >>> import cartopy.crs as ccrs + >>> fig = plt.figure() + >>> ax = fig.add_subplot(1, 1, 1, projection=ccrs.Mollweide()) + >>> time = [v.astype(np.int64) / 86400 / 1e9 for v in ds.time.values] + >>> lc = plot_ragged( + >>> ax, + >>> ds.lon, + >>> ds.lat, + >>> ds.rowsize, + >>> colors=np.arange(len(ds.rowsize)), + >>> transform=ccrs.PlateCarree(), + >>> cmap=cmocean.cm.ice, + >>> ) + + Raises + ------ + ValueError + If longitude and latitude arrays do not have the same shape. + If colors do not have the same shape as longitude and latitude arrays or rowsize. + If ax is not a matplotlib Axes or GeoAxes object. + If ax is a GeoAxes object and the transform keyword argument is not provided. + + ImportError + If matplotlib is not installed. + If the axis is a GeoAxes object and cartopy is not installed. + """ + + # optional dependency + try: + import matplotlib.pyplot as plt + import matplotlib.colors as mcolors + from matplotlib.collections import LineCollection + from matplotlib import cm + except ImportError: + raise ImportError("missing optional dependency 'matplotlib'") + + if hasattr(ax, "coastlines"): # check if GeoAxes without cartopy + try: + from cartopy.mpl.geoaxes import GeoAxes + + if isinstance(ax, GeoAxes) and not kwargs.get("transform"): + raise ValueError( + "For GeoAxes, the transform keyword argument must be provided." + ) + except ImportError: + raise ImportError("missing optional dependency 'cartopy'") + elif not isinstance(ax, plt.Axes): + raise ValueError("ax must be either: plt.Axes or GeoAxes.") + + if np.sum(rowsize) != len(longitude): + raise ValueError("The sum of rowsize must equal the length of lon and lat.") + + if len(longitude) != len(latitude): + raise ValueError("lon and lat must have the same length.") + + if colors is None: + colors = np.arange(len(rowsize)) + elif colors is not None and (len(colors) not in [len(longitude), len(rowsize)]): + raise ValueError("shape colors must match the shape of lon/lat or rowsize.") + + # define a colormap + cmap = kwargs.pop("cmap", cm.viridis) + + # define a normalization obtain uniform colors + # for the sequence of lines or LineCollection + norm = kwargs.pop( + "norm", mcolors.Normalize(vmin=np.nanmin(colors), vmax=np.nanmax(colors)) + ) + + mpl_plot = True if colors is None or len(colors) == len(rowsize) else False + traj_idx = rowsize_to_index(rowsize) + + lines = [] + for i in range(len(rowsize)): + lon_i, lat_i = ( + longitude[traj_idx[i] : traj_idx[i + 1]], + latitude[traj_idx[i] : traj_idx[i + 1]], + ) + + start = 0 + for length in segment(lon_i, tolerance, rowsize=segment(lon_i, -tolerance)): + end = start + length + + if mpl_plot: + line = ax.plot( + lon_i[start:end], + lat_i[start:end], + c=cmap(norm(colors[i])) if colors is not None else None, + *args, + **kwargs, + ) + else: + colors_i = colors[traj_idx[i] : traj_idx[i + 1]] + segments = np.column_stack( + [ + lon_i[start : end - 1], + lat_i[start : end - 1], + lon_i[start + 1 : end], + lat_i[start + 1 : end], + ] + ).reshape(-1, 2, 2) + line = LineCollection(segments, cmap=cmap, norm=norm, *args, **kwargs) + line.set_array( + # color of a segment is the average of its two data points + np.convolve(colors_i[start:end], [0.5, 0.5], mode="valid") + ) + ax.add_collection(line) + + start = end + lines.append(line) + + # set axis limits + ax.set_xlim([np.min(longitude), np.max(longitude)]) + ax.set_ylim([np.min(latitude), np.max(latitude)]) + + return lines
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/ragged.html b/_modules/clouddrift/ragged.html new file mode 100644 index 00000000..5c2f36f4 --- /dev/null +++ b/_modules/clouddrift/ragged.html @@ -0,0 +1,1341 @@ + + + + + + + + + + clouddrift.ragged — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.ragged

+"""
+Transformational and inquiry functions for ragged arrays.
+"""
+
+import numpy as np
+from typing import Tuple, Union, Iterable, Callable
+import xarray as xr
+import pandas as pd
+from concurrent import futures
+from datetime import timedelta
+import warnings
+
+
+
+[docs] +def apply_ragged( + func: callable, + arrays: Union[list[Union[np.ndarray, xr.DataArray]], np.ndarray, xr.DataArray], + rowsize: Union[list[int], np.ndarray[int], xr.DataArray], + *args: tuple, + rows: Union[int, Iterable[int]] = None, + axis: int = 0, + executor: futures.Executor = futures.ThreadPoolExecutor(max_workers=None), + **kwargs: dict, +) -> Union[tuple[np.ndarray], np.ndarray]: + """Apply a function to a ragged array. + + The function ``func`` will be applied to each contiguous row of ``arrays`` as + indicated by row sizes ``rowsize``. The output of ``func`` will be + concatenated into a single ragged array. + + You can pass ``arrays`` as NumPy arrays or xarray DataArrays, however, + the result will always be a NumPy array. Passing ``rows`` as an integer or + a sequence of integers will make ``apply_ragged`` process and return only + those specific rows, and otherwise, all rows in the input ragged array will + be processed. Further, you can use the ``axis`` parameter to specify the + ragged axis of the input array(s) (default is 0). + + By default this function uses ``concurrent.futures.ThreadPoolExecutor`` to + run ``func`` in multiple threads. The number of threads can be controlled by + passing the ``max_workers`` argument to the executor instance passed to + ``apply_ragged``. Alternatively, you can pass the ``concurrent.futures.ProcessPoolExecutor`` + instance to use processes instead. Passing alternative (3rd party library) + concurrent executors may work if they follow the same executor interface as + that of ``concurrent.futures``, however this has not been tested yet. + + Parameters + ---------- + func : callable + Function to apply to each row of each ragged array in ``arrays``. + arrays : list[np.ndarray] or np.ndarray or xr.DataArray + An array or a list of arrays to apply ``func`` to. + rowsize : list[int] or np.ndarray[int] or xr.DataArray[int] + List of integers specifying the number of data points in each row. + *args : tuple + Additional arguments to pass to ``func``. + rows : int or Iterable[int], optional + The row(s) of the ragged array to apply ``func`` to. If ``rows`` is + ``None`` (default), then ``func`` will be applied to all rows. + axis : int, optional + The ragged axis of the input arrays. Default is 0. + executor : concurrent.futures.Executor, optional + Executor to use for concurrent execution. Default is ``ThreadPoolExecutor`` + with the default number of ``max_workers``. + Another supported option is ``ProcessPoolExecutor``. + **kwargs : dict + Additional keyword arguments to pass to ``func``. + + Returns + ------- + out : tuple[np.ndarray] or np.ndarray + Output array(s) from ``func``. + + Examples + -------- + + Using ``velocity_from_position`` with ``apply_ragged``, calculate the velocities of + multiple particles, the coordinates of which are found in the ragged arrays x, y, and t + that share row sizes 2, 3, and 4: + + >>> rowsize = [2, 3, 4] + >>> x = np.array([1, 2, 10, 12, 14, 30, 33, 36, 39]) + >>> y = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8]) + >>> t = np.array([1, 2, 1, 2, 3, 1, 2, 3, 4]) + >>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, coord_system="cartesian") + array([1., 1., 2., 2., 2., 3., 3., 3., 3.]), + array([1., 1., 1., 1., 1., 1., 1., 1., 1.])) + + To apply ``func`` to only a subset of rows, use the ``rows`` argument: + + >>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, rows=0, coord_system="cartesian") + array([1., 1.]), + array([1., 1.])) + >>> u1, v1 = apply_ragged(velocity_from_position, [x, y, t], rowsize, rows=[0, 1], coord_system="cartesian") + array([1., 1., 2., 2., 2.]), + array([1., 1., 1., 1., 1.])) + + Raises + ------ + ValueError + If the sum of ``rowsize`` does not equal the length of ``arrays``. + IndexError + If empty ``arrays``. + """ + # make sure the arrays is iterable + if type(arrays) not in [list, tuple]: + arrays = [arrays] + # validate rowsize + for arr in arrays: + if not np.sum(rowsize) == arr.shape[axis]: + raise ValueError("The sum of rowsize must equal the length of arr.") + + # split the array(s) into trajectories + arrays = [unpack(np.array(arr), rowsize, rows, axis) for arr in arrays] + iter = [[arrays[i][j] for i in range(len(arrays))] for j in range(len(arrays[0]))] + + # parallel execution + res = [executor.submit(func, *x, *args, **kwargs) for x in iter] + res = [r.result() for r in res] + + # Concatenate the outputs. + + # The following wraps items in a list if they are not already iterable. + res = [item if isinstance(item, Iterable) else [item] for item in res] + + # np.concatenate can concatenate along non-zero axis iff the length of + # arrays to be concatenated is > 1. If the length is 1, for example in the + # case of func that reduces over the non-ragged axis, we can only + # concatenate along axis 0. + if isinstance(res[0], tuple): # more than 1 parameter + outputs = [] + for i in range(len(res[0])): # iterate over each result variable + # If we have multiple outputs and func is a reduction function, + # we now here have a list of scalars. We need to wrap them in a + # list to concatenate them. + result = [r[i] if isinstance(r[i], Iterable) else [r[i]] for r in res] + if len(result[0]) > 1: + # Arrays to concatenate are longer than 1 element, so we can + # concatenate along the non-zero axis. + outputs.append(np.concatenate(result, axis=axis)) + else: + # Arrays to concatenate are 1 element long, so we can only + # concatenate along axis 0. + outputs.append(np.concatenate(result)) + return tuple(outputs) + else: + if len(res[0]) > 1: + # Arrays to concatenate are longer than 1 element, so we can + # concatenate along the non-zero axis. + return np.concatenate(res, axis=axis) + else: + # Arrays to concatenate are 1 element long, so we can only + # concatenate along axis 0. + return np.concatenate(res)
+ + + +
+[docs] +def chunk( + x: Union[list, np.ndarray, xr.DataArray, pd.Series], + length: int, + overlap: int = 0, + align: str = "start", +) -> np.ndarray: + """Divide an array ``x`` into equal chunks of length ``length``. The result + is a 2-dimensional NumPy array of shape ``(num_chunks, length)``. The resulting + number of chunks is determined based on the length of ``x``, ``length``, + and ``overlap``. + + ``chunk`` can be combined with :func:`apply_ragged` to chunk a ragged array. + + Parameters + ---------- + x : list or array-like + Array to divide into chunks. + length : int + The length of each chunk. + overlap : int, optional + The number of overlapping array elements across chunks. The default is 0. + Must be smaller than ``length``. For example, if ``length`` is 4 and + ``overlap`` is 2, the chunks of ``[0, 1, 2, 3, 4, 5]`` will be + ``np.array([[0, 1, 2, 3], [2, 3, 4, 5]])``. Negative overlap can be used + to offset chunks by some number of elements. For example, if ``length`` + is 2 and ``overlap`` is -1, the chunks of ``[0, 1, 2, 3, 4, 5]`` will + be ``np.array([[0, 1], [3, 4]])``. + align : str, optional ["start", "middle", "end"] + If the remainder of the length of ``x`` divided by the chunk ``length`` is a number + N different from zero, this parameter controls which part of the array will be kept + into the chunks. If ``align="start"``, the elements at the beginning of the array + will be part of the chunks and N points are discarded at the end. If `align="middle"`, + floor(N/2) and ceil(N/2) elements will be discarded from the beginning and the end + of the array, respectively. If ``align="end"``, the elements at the end of the array + will be kept, and the `N` first elements are discarded. The default is "start". + + Returns + ------- + np.ndarray + 2-dimensional array of shape ``(num_chunks, length)``. + + Examples + -------- + + Chunk a simple list; this discards the end elements that exceed the last chunk: + + >>> chunk([1, 2, 3, 4, 5], 2) + array([[1, 2], + [3, 4]]) + + To discard the starting elements of the array instead, use ``align="end"``: + + >>> chunk([1, 2, 3, 4, 5], 2, align="end") + array([[2, 3], + [4, 5]]) + + To center the chunks by discarding both ends of the array, use ``align="middle"``: + + >>> chunk([1, 2, 3, 4, 5, 6, 7, 8], 3, align="middle") + array([[2, 3, 4], + [5, 6, 7]]) + + Specify ``overlap`` to get overlapping chunks: + + >>> chunk([1, 2, 3, 4, 5], 2, overlap=1) + array([[1, 2], + [2, 3], + [3, 4], + [4, 5]]) + + Use ``apply_ragged`` to chunk a ragged array by providing the row sizes; + notice that you must pass the array to chunk as an array-like, not a list: + + >>> x = np.array([1, 2, 3, 4, 5]) + >>> rowsize = [2, 1, 2] + >>> apply_ragged(chunk, x, rowsize, 2) + array([[1, 2], + [4, 5]]) + + Raises + ------ + ValueError + If ``length < 0``. + ValueError + If ``align not in ["start", "middle", "end"]``. + ZeroDivisionError + if ``length == 0``. + """ + num_chunks = (len(x) - length) // (length - overlap) + 1 if len(x) >= length else 0 + remainder = len(x) - num_chunks * length + (num_chunks - 1) * overlap + res = np.empty((num_chunks, length), dtype=np.array(x).dtype) + + if align == "start": + start = 0 + elif align == "middle": + start = remainder // 2 + elif align == "end": + start = remainder + else: + raise ValueError("align must be one of 'start', 'middle', or 'end'.") + + for n in range(num_chunks): + end = start + length + res[n] = x[start:end] + start = end - overlap + + return res
+ + + +
+[docs] +def prune( + ragged: Union[list, np.ndarray, pd.Series, xr.DataArray], + rowsize: Union[list, np.ndarray, pd.Series, xr.DataArray], + min_rowsize: float, +) -> Tuple[np.ndarray, np.ndarray]: + """Within a ragged array, removes arrays less than a specified row size. + + Parameters + ---------- + ragged : np.ndarray or pd.Series or xr.DataArray + A ragged array. + rowsize : list or np.ndarray[int] or pd.Series or xr.DataArray[int] + The size of each row in the input ragged array. + min_rowsize : + The minimum row size that will be kept. + + Returns + ------- + tuple[np.ndarray, np.ndarray] + A tuple of ragged array and size of each row. + + Examples + -------- + >>> prune(np.array([1, 2, 3, 0, -1, -2]), np.array([3, 1, 2]),2) + (array([1, 2, 3, -1, -2]), array([3, 2])) + + Raises + ------ + ValueError + If the sum of ``rowsize`` does not equal the length of ``arrays``. + IndexError + If empty ``ragged``. + + See Also + -------- + :func:`segment`, `chunk` + """ + + ragged = apply_ragged( + lambda x, min_len: x if len(x) >= min_len else np.empty(0, dtype=x.dtype), + np.array(ragged), + rowsize, + min_len=min_rowsize, + ) + rowsize = apply_ragged( + lambda x, min_len: x if x >= min_len else np.empty(0, dtype=x.dtype), + np.array(rowsize), + np.ones_like(rowsize), + min_len=min_rowsize, + ) + + return ragged, rowsize
+ + + +
+[docs] +def ragged_to_regular( + ragged: Union[np.ndarray, pd.Series, xr.DataArray], + rowsize: Union[list, np.ndarray, pd.Series, xr.DataArray], + fill_value: float = np.nan, +) -> np.ndarray: + """Convert a ragged array to a two-dimensional array such that each contiguous segment + of a ragged array is a row in the two-dimensional array. Each row of the two-dimensional + array is padded with NaNs as needed. The length of the first dimension of the output + array is the length of ``rowsize``. The length of the second dimension is the maximum + element of ``rowsize``. + + Note: Although this function accepts parameters of type ``xarray.DataArray``, + passing NumPy arrays is recommended for performance reasons. + + Parameters + ---------- + ragged : np.ndarray or pd.Series or xr.DataArray + A ragged array. + rowsize : list or np.ndarray[int] or pd.Series or xr.DataArray[int] + The size of each row in the ragged array. + fill_value : float, optional + Fill value to use for the trailing elements of each row of the resulting + regular array. + + Returns + ------- + np.ndarray + A two-dimensional array. + + Examples + -------- + By default, the fill value used is NaN: + + >>> ragged_to_regular(np.array([1, 2, 3, 4, 5]), np.array([2, 1, 2])) + array([[ 1., 2.], + [ 3., nan], + [ 4., 5.]]) + + You can specify an alternative fill value: + + >>> ragged_to_regular(np.array([1, 2, 3, 4, 5]), np.array([2, 1, 2]), fill_value=999) + array([[ 1., 2.], + [ 3., -999.], + [ 4., 5.]]) + + See Also + -------- + :func:`regular_to_ragged` + """ + res = fill_value * np.ones((len(rowsize), int(max(rowsize))), dtype=ragged.dtype) + unpacked = unpack(ragged, rowsize) + for n in range(len(rowsize)): + res[n, : int(rowsize[n])] = unpacked[n] + return res
+ + + +
+[docs] +def regular_to_ragged( + array: np.ndarray, fill_value: float = np.nan +) -> tuple[np.ndarray, np.ndarray]: + """Convert a two-dimensional array to a ragged array. Fill values in the input array are + excluded from the output ragged array. + + Parameters + ---------- + array : np.ndarray + A two-dimensional array. + fill_value : float, optional + Fill value used to determine the bounds of contiguous segments. + + Returns + ------- + tuple[np.ndarray, np.ndarray] + A tuple of the ragged array and the size of each row. + + Examples + -------- + By default, NaN values found in the input regular array are excluded from + the output ragged array: + + >>> regular_to_ragged(np.array([[1, 2], [3, np.nan], [4, 5]])) + (array([1., 2., 3., 4., 5.]), array([2, 1, 2])) + + Alternatively, a different fill value can be specified: + + >>> regular_to_ragged(np.array([[1, 2], [3, -999], [4, 5]]), fill_value=-999) + (array([1., 2., 3., 4., 5.]), array([2, 1, 2])) + + See Also + -------- + :func:`ragged_to_regular` + """ + if np.isnan(fill_value): + valid = ~np.isnan(array) + else: + valid = array != fill_value + return array[valid], np.sum(valid, axis=1)
+ + + +
+[docs] +def rowsize_to_index(rowsize: Union[list, np.ndarray, xr.DataArray]) -> np.ndarray: + """Convert a list of row sizes to a list of indices. + + This function is typically used to obtain the indices of data rows organized + in a ragged array. + + Parameters + ---------- + rowsize : list or np.ndarray or xr.DataArray + A list of row sizes. + + Returns + ------- + np.ndarray + A list of indices. + + Examples + -------- + To obtain the indices within a ragged array of three consecutive rows of sizes 100, 202, and 53: + + >>> rowsize_to_index([100, 202, 53]) + array([0, 100, 302, 355]) + """ + return np.cumsum(np.insert(np.array(rowsize), 0, 0))
+ + + +
+[docs] +def segment( + x: np.ndarray, + tolerance: Union[float, np.timedelta64, timedelta, pd.Timedelta], + rowsize: np.ndarray[int] = None, +) -> np.ndarray[int]: + """Divide an array into segments based on a tolerance value. + + Parameters + ---------- + x : list, np.ndarray, or xr.DataArray + An array to divide into segment. + tolerance : float, np.timedelta64, timedelta, pd.Timedelta + The maximum signed difference between consecutive points in a segment. + The array x will be segmented wherever differences exceed the tolerance. + rowsize : np.ndarray[int], optional + The size of rows if x is originally a ragged array. If present, x will be + divided both by gaps that exceed the tolerance, and by the original rows + of the ragged array. + + Returns + ------- + np.ndarray[int] + An array of row sizes that divides the input array into segments. + + Examples + -------- + The simplest use of ``segment`` is to provide a tolerance value that is + used to divide an array into segments: + + >>> x = [0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 4] + >>> segment(x, 0.5) + array([1, 3, 2, 4, 1]) + + If the array is already previously segmented (e.g. multiple rows in + a ragged array), then the ``rowsize`` argument can be used to preserve + the original segments: + + >>> x = [0, 1, 1, 1, 2, 2, 3, 3, 3, 3, 4] + >>> rowsize = [3, 2, 6] + >>> segment(x, 0.5, rowsize) + array([1, 2, 1, 1, 1, 4, 1]) + + The tolerance can also be negative. In this case, the input array is + segmented where the negative difference exceeds the negative + value of the tolerance, i.e. where ``x[n+1] - x[n] < -tolerance``: + + >>> x = [0, 1, 2, 0, 1, 2] + >>> segment(x, -0.5) + array([3, 3]) + + To segment an array for both positive and negative gaps, invoke the function + twice, once for a positive tolerance and once for a negative tolerance. + The result of the first invocation can be passed as the ``rowsize`` argument + to the first ``segment`` invocation: + + >>> x = [1, 1, 2, 2, 1, 1, 2, 2] + >>> segment(x, 0.5, rowsize=segment(x, -0.5)) + array([2, 2, 2, 2]) + + If the input array contains time objects, the tolerance must be a time interval: + + >>> x = np.array([np.datetime64("2023-01-01"), np.datetime64("2023-01-02"), + np.datetime64("2023-01-03"), np.datetime64("2023-02-01"), + np.datetime64("2023-02-02")]) + >>> segment(x, np.timedelta64(1, "D")) + np.array([3, 2]) + """ + + # for compatibility with datetime list or np.timedelta64 arrays + if type(tolerance) in [np.timedelta64, timedelta]: + tolerance = pd.Timedelta(tolerance) + + if type(tolerance) == pd.Timedelta: + positive_tol = tolerance >= pd.Timedelta("0 seconds") + else: + positive_tol = tolerance >= 0 + + if rowsize is None: + if positive_tol: + exceeds_tolerance = np.diff(x) > tolerance + else: + exceeds_tolerance = np.diff(x) < tolerance + segment_sizes = np.diff(np.insert(np.where(exceeds_tolerance)[0] + 1, 0, 0)) + segment_sizes = np.append(segment_sizes, len(x) - np.sum(segment_sizes)) + return segment_sizes + else: + if not np.sum(rowsize) == len(x): + raise ValueError("The sum of rowsize must equal the length of x.") + segment_sizes = [] + start = 0 + for r in rowsize: + end = start + int(r) + segment_sizes.append(segment(x[start:end], tolerance)) + start = end + return np.concatenate(segment_sizes)
+ + + +
+[docs] +def subset( + ds: xr.Dataset, + criteria: dict, + id_var_name: str = "id", + rowsize_var_name: str = "rowsize", + traj_dim_name: str = "traj", + obs_dim_name: str = "obs", + full_trajectories=False, +) -> xr.Dataset: + """Subset a ragged array dataset as a function of one or more criteria. + The criteria are passed with a dictionary, where a dictionary key + is a variable to subset and the associated dictionary value is either a range + (valuemin, valuemax), a list [value1, value2, valueN], a single value, or a + masking function applied to every row of the ragged array using ``apply_ragged``. + + This function needs to know the names of the dimensions of the ragged array dataset + (`traj_dim_name` and `obs_dim_name`), and the name of the rowsize variable (`rowsize_var_name`). + Default values are provided for these arguments (see below), but they can be changed if needed. + + Parameters + ---------- + ds : xr.Dataset + Dataset stored as ragged arrays + criteria : dict + dictionary containing the variables (as keys) and the ranges/values/functions (as values) to subset + id_var_name : str, optional + Name of the variable containing the ID of the trajectories (default is "id") + rowsize_var_name : str, optional + Name of the variable containing the number of observations per trajectory (default is "rowsize") + traj_dim_name : str, optional + Name of the trajectory dimension (default is "traj") + obs_dim_name : str, optional + Name of the observation dimension (default is "obs") + full_trajectories : bool, optional + If True, it returns the complete trajectories (rows) where at least one observation + matches the criteria, rather than just the segments where the criteria are satisfied. + Default is False. + + Returns + ------- + xr.Dataset + subset Dataset matching the criterion(a) + + Examples + -------- + Criteria are combined on any data or metadata variables part of the Dataset. + The following examples are based on NOAA GDP datasets which can be accessed with the + ``clouddrift.datasets`` module. + + Retrieve a region, like the Gulf of Mexico, using ranges of latitude and longitude: + + >>> subset(ds, {"lat": (21, 31), "lon": (-98, -78)}) + + The parameter `full_trajectories` can be used to retrieve trajectories passing through a region, for example all trajectories passing through the Gulf of Mexico: + + >>> subset(ds, {"lat": (21, 31), "lon": (-98, -78)}, full_trajectories=True) + + Retrieve drogued trajectory segments: + + >>> subset(ds, {"drogue_status": True}) + + Retrieve trajectory segments with temperature higher than 25°C (303.15K): + + >>> subset(ds, {"sst": (303.15, np.inf)}) + + You can use the same approach to return only the trajectories that are + shorter than some number of observations (similar to :func:`prune` but for + the entire dataset): + + >>> subset(ds, {"rowsize": (0, 1000)}) + + Retrieve specific drifters from their IDs: + + >>> subset(ds, {"id": [2578, 2582, 2583]}) + + Sometimes, you may want to retrieve specific rows of a ragged array. + You can do that by filtering along the trajectory dimension directly, since + this one corresponds to row numbers: + + >>> rows = [5, 6, 7] + >>> subset(ds, {"traj": rows}) + + Retrieve a specific time period: + + >>> subset(ds, {"time": (np.datetime64("2000-01-01"), np.datetime64("2020-01-31"))}) + + Note that to subset time variable, the range has to be defined as a function + type of the variable. By default, ``xarray`` uses ``np.datetime64`` to + represent datetime data. If the datetime data is a ``datetime.datetime``, or + ``pd.Timestamp``, the range would have to be defined accordingly. + + Those criteria can also be combined: + + >>> subset(ds, {"lat": (21, 31), "lon": (-98, -78), "drogue_status": True, "sst": (303.15, np.inf), "time": (np.datetime64("2000-01-01"), np.datetime64("2020-01-31"))}) + + You can also use a function to filter the data. For example, retrieve every other observation + of each trajectory (row): + + >>> func = (lambda arr: ((arr - arr[0]) % 2) == 0) + >>> subset(ds, {"time": func}) + + Raises + ------ + ValueError + If one of the variable in a criterion is not found in the Dataset + """ + mask_traj = xr.DataArray( + data=np.ones(ds.sizes[traj_dim_name], dtype="bool"), dims=[traj_dim_name] + ) + mask_obs = xr.DataArray( + data=np.ones(ds.sizes[obs_dim_name], dtype="bool"), dims=[obs_dim_name] + ) + + for key in criteria.keys(): + if key in ds or key in ds.dims: + if ds[key].dims == (traj_dim_name,): + mask_traj = np.logical_and( + mask_traj, + _mask_var( + ds[key], criteria[key], ds[rowsize_var_name], traj_dim_name + ), + ) + elif ds[key].dims == (obs_dim_name,): + mask_obs = np.logical_and( + mask_obs, + _mask_var( + ds[key], criteria[key], ds[rowsize_var_name], obs_dim_name + ), + ) + else: + raise ValueError(f"Unknown variable '{key}'.") + + # remove data when trajectories are filtered + traj_idx = rowsize_to_index(ds[rowsize_var_name].values) + for i in np.where(~mask_traj)[0]: + mask_obs[slice(traj_idx[i], traj_idx[i + 1])] = False + + # remove trajectory completely filtered in mask_obs + ids_with_mask_obs = np.repeat(ds[id_var_name].values, ds[rowsize_var_name].values)[ + mask_obs + ] + mask_traj = np.logical_and( + mask_traj, np.in1d(ds[id_var_name], np.unique(ids_with_mask_obs)) + ) + + # reset mask_obs to True to keep complete trajectories + if full_trajectories: + for i in np.where(mask_traj)[0]: + mask_obs[slice(traj_idx[i], traj_idx[i + 1])] = True + ids_with_mask_obs = np.repeat( + ds[id_var_name].values, ds[rowsize_var_name].values + )[mask_obs] + + if not any(mask_traj): + warnings.warn("No data matches the criteria; returning an empty dataset.") + return xr.Dataset() + else: + # apply the filtering for both dimensions + ds_sub = ds.isel({traj_dim_name: mask_traj, obs_dim_name: mask_obs}) + _, unique_idx, sorted_rowsize = np.unique( + ids_with_mask_obs, return_index=True, return_counts=True + ) + ds_sub[rowsize_var_name].values = sorted_rowsize[np.argsort(unique_idx)] + return ds_sub
+ + + +
+[docs] +def unpack( + ragged_array: np.ndarray, + rowsize: np.ndarray[int], + rows: Union[int, Iterable[int]] = None, + axis: int = 0, +) -> list[np.ndarray]: + """Unpack a ragged array into a list of regular arrays. + + Unpacking a ``np.ndarray`` ragged array is about 2 orders of magnitude + faster than unpacking an ``xr.DataArray`` ragged array, so unless you need a + ``DataArray`` as the result, we recommend passing ``np.ndarray`` as input. + + Parameters + ---------- + ragged_array : array-like + A ragged_array to unpack + rowsize : array-like + An array of integers whose values is the size of each row in the ragged + array + rows : int or Iterable[int], optional + A row or list of rows to unpack. Default is None, which unpacks all rows. + axis : int, optional + The axis along which to unpack the ragged array. Default is 0. + + Returns + ------- + list + A list of array-likes with sizes that correspond to the values in + rowsize, and types that correspond to the type of ragged_array + + Examples + -------- + + Unpacking longitude arrays from a ragged Xarray Dataset: + + .. code-block:: python + + lon = unpack(ds.lon, ds["rowsize"]) # return a list[xr.DataArray] (slower) + lon = unpack(ds.lon.values, ds["rowsize"]) # return a list[np.ndarray] (faster) + first_lon = unpack(ds.lon.values, ds["rowsize"], rows=0) # return only the first row + first_two_lons = unpack(ds.lon.values, ds["rowsize"], rows=[0, 1]) # return first two rows + + Looping over trajectories in a ragged Xarray Dataset to compute velocities + for each: + + .. code-block:: python + + for lon, lat, time in list(zip( + unpack(ds.lon.values, ds["rowsize"]), + unpack(ds.lat.values, ds["rowsize"]), + unpack(ds.time.values, ds["rowsize"]) + )): + u, v = velocity_from_position(lon, lat, time) + """ + indices = rowsize_to_index(rowsize) + + if rows is None: + rows = range(indices.size - 1) + if isinstance(rows, (int, np.integer)): + rows = [rows] + + unpacked = np.split(ragged_array, indices[1:-1], axis=axis) + + return [unpacked[i] for i in rows]
+ + + +def _mask_var( + var: xr.DataArray, + criterion: Union[tuple, list, np.ndarray, xr.DataArray, bool, float, int, Callable], + rowsize: xr.DataArray = None, + dim_name: str = "dim_0", +) -> xr.DataArray: + """Return the mask of a subset of the data matching a test criterion. + + Parameters + ---------- + var : xr.DataArray + DataArray to be subset by the criterion + criterion : array-like or scalar or Callable + The criterion can take four forms: + - tuple: (min, max) defining a range + - list, np.ndarray, or xr.DataArray: An array-like defining multiples values + - scalar: value defining a single value + - function: a function applied against each trajectory using ``apply_ragged`` and returning a mask + rowsize : xr.DataArray, optional + List of integers specifying the number of data points in each row + dim_name : str, optional + Name of the masked dimension (default is "dim_0") + + Examples + -------- + >>> x = xr.DataArray(data=np.arange(0, 5)) + >>> _mask_var(x, (2, 4)) + <xarray.DataArray (dim_0: 5)> + array([False, False, True, True, True]) + Dimensions without coordinates: dim_0 + + >>> _mask_var(x, [0, 2, 4]) + <xarray.DataArray (dim_0: 5)> + array([ True, False, True, False, True]) + Dimensions without coordinates: dim_0 + + >>> _mask_var(x, 4) + <xarray.DataArray (dim_0: 5)> + array([False, False, False, True, False]) + Dimensions without coordinates: dim_0 + + >>> rowsize = xr.DataArray(data=[2, 3]) + >>> _mask_var(x, lambda arr: arr==arr[0]+1, rowsize, "dim_0") + <xarray.DataArray (dim_0: 5)> + array([False, True, False, True, False]) + Dimensions without coordinates: dim_0 + + Returns + ------- + mask : xr.DataArray + The mask of the subset of the data matching the criteria + """ + if isinstance(criterion, tuple): # min/max defining range + mask = np.logical_and(var >= criterion[0], var <= criterion[1]) + elif isinstance(criterion, (list, np.ndarray, xr.DataArray)): + # select multiple values + mask = np.isin(var, criterion) + elif callable(criterion): + # mask directly created by applying `criterion` function + if len(var) == len(rowsize): + mask = criterion(var) + else: + mask = apply_ragged(criterion, var, rowsize) + + mask = xr.DataArray(data=mask, dims=[dim_name]).astype(bool) + + if not len(var) == len(mask): + raise ValueError( + "The `Callable` function must return a masked array that matches the length of the variable to filter." + ) + else: # select one specific value + mask = var == criterion + return mask +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/raggedarray.html b/_modules/clouddrift/raggedarray.html new file mode 100644 index 00000000..121adb5d --- /dev/null +++ b/_modules/clouddrift/raggedarray.html @@ -0,0 +1,999 @@ + + + + + + + + + + clouddrift.raggedarray — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.raggedarray

+"""
+This module defines the RaggedArray class, which is the intermediate data
+structure used by CloudDrift to process custom Lagrangian datasets to Xarray
+Datasets and Awkward Arrays.
+"""
+import awkward as ak
+from clouddrift.ragged import rowsize_to_index
+import xarray as xr
+import numpy as np
+from collections.abc import Callable
+from typing import Tuple, Optional
+from tqdm import tqdm
+import warnings
+
+
+
+[docs] +class RaggedArray: +
+[docs] + def __init__( + self, + coords: dict, + metadata: dict, + data: dict, + attrs_global: Optional[dict] = {}, + attrs_variables: Optional[dict] = {}, + ): + self.coords = coords + self.metadata = metadata + self.data = data + self.attrs_global = attrs_global + self.attrs_variables = attrs_variables + self.validate_attributes()
+ + +
+[docs] + @classmethod + def from_awkward( + cls, + array: ak.Array, + name_coords: Optional[list] = ["time", "lon", "lat", "ids"], + ): + """Load a RaggedArray instance from an Awkward Array. + + Parameters + ---------- + array : ak.Array + Awkward Array instance to load the data from + name_coords : list, optional + Names of the coordinate variables in the ragged arrays + + Returns + ------- + RaggedArray + A RaggedArray instance + """ + coords = {} + metadata = {} + data = {} + attrs_variables = {} + + attrs_global = array.layout.parameters["attrs"] + + for var in name_coords: + coords[var] = ak.flatten(array.obs[var]).to_numpy() + attrs_variables[var] = array.obs[var].layout.parameters["attrs"] + + for var in [v for v in array.fields if v != "obs"]: + metadata[var] = array[var].to_numpy() + attrs_variables[var] = array[var].layout.parameters["attrs"] + + for var in [v for v in array.obs.fields if v not in name_coords]: + data[var] = ak.flatten(array.obs[var]).to_numpy() + attrs_variables[var] = array.obs[var].layout.parameters["attrs"] + + return cls(coords, metadata, data, attrs_global, attrs_variables)
+ + +
+[docs] + @classmethod + def from_files( + cls, + indices: list, + preprocess_func: Callable[[int], xr.Dataset], + name_coords: list, + name_meta: Optional[list] = [], + name_data: Optional[list] = [], + rowsize_func: Optional[Callable[[int], int]] = None, + **kwargs, + ): + """Generate a ragged array archive from a list of trajectory files + + Parameters + ---------- + indices : list + Identification numbers list to iterate + preprocess_func : Callable[[int], xr.Dataset] + Returns a processed xarray Dataset from an identification number + name_coords : list + Name of the coordinate variables to include in the archive + name_meta : list, optional + Name of metadata variables to include in the archive (Defaults to []) + name_data : list, optional + Name of the data variables to include in the archive (Defaults to []) + rowsize_func : Optional[Callable[[int], int]], optional + Returns the number of observations from an identification number (to speed up processing) (Defaults to None) + + Returns + ------- + RaggedArray + A RaggedArray instance + """ + # if no method is supplied, get the dimension from the preprocessing function + rowsize_func = ( + rowsize_func + if rowsize_func + else lambda i, **kwargs: preprocess_func(i, **kwargs).sizes["obs"] + ) + rowsize = cls.number_of_observations(rowsize_func, indices, **kwargs) + coords, metadata, data = cls.allocate( + preprocess_func, + indices, + rowsize, + name_coords, + name_meta, + name_data, + **kwargs, + ) + attrs_global, attrs_variables = cls.attributes( + preprocess_func(indices[0], **kwargs), + name_coords, + name_meta, + name_data, + ) + + return cls(coords, metadata, data, attrs_global, attrs_variables)
+ + +
+[docs] + @classmethod + def from_netcdf(cls, filename: str): + """Read a ragged arrays archive from a NetCDF file. + + This is a thin wrapper around ``from_xarray()``. + + Parameters + ---------- + filename : str + File name of the NetCDF archive to read. + + Returns + ------- + RaggedArray + A ragged array instance + """ + return cls.from_xarray(xr.open_dataset(filename))
+ + +
+[docs] + @classmethod + def from_parquet( + cls, filename: str, name_coords: Optional[list] = ["time", "lon", "lat", "ids"] + ): + """Read a ragged array from a parquet file. + + Parameters + ---------- + filename : str + File name of the parquet archive to read. + name_coords : list, optional + Names of the coordinate variables in the ragged arrays + + Returns + ------- + RaggedArray + A ragged array instance + """ + return cls.from_awkward(ak.from_parquet(filename), name_coords)
+ + +
+[docs] + @classmethod + def from_xarray(cls, ds: xr.Dataset, dim_traj: str = "traj", dim_obs: str = "obs"): + """Populate a RaggedArray instance from an xarray Dataset instance. + + Parameters + ---------- + ds : xr.Dataset + Xarray Dataset from which to load the RaggedArray + dim_traj : str, optional + Name of the trajectories dimension in the xarray Dataset + dim_obs : str, optional + Name of the observations dimension in the xarray Dataset + + Returns + ------- + RaggedArray + A RaggedArray instance + """ + coords = {} + metadata = {} + data = {} + attrs_global = {} + attrs_variables = {} + + attrs_global = ds.attrs + + for var in ds.coords.keys(): + coords[var] = ds[var].data + attrs_variables[var] = ds[var].attrs + + for var in ds.data_vars.keys(): + if len(ds[var]) == ds.sizes[dim_traj]: + metadata[var] = ds[var].data + elif len(ds[var]) == ds.sizes[dim_obs]: + data[var] = ds[var].data + else: + warnings.warn( + f""" + Variable '{var}' has unknown dimension size of + {len(ds[var])}, which is not traj={ds.sizes[dim_traj]} or + obs={ds.sizes[dim_obs]}; skipping. + """ + ) + attrs_variables[var] = ds[var].attrs + + return cls(coords, metadata, data, attrs_global, attrs_variables)
+ + +
+[docs] + @staticmethod + def number_of_observations( + rowsize_func: Callable[[int], int], indices: list, **kwargs + ) -> np.array: + """Iterate through the files and evaluate the number of observations. + + Parameters + ---------- + rowsize_func : Callable[[int], int]] + Function that returns the number observations of a trajectory from + its identification number + indices : list + Identification numbers list to iterate + + Returns + ------- + np.ndarray + Number of observations of each trajectory + """ + rowsize = np.zeros(len(indices), dtype="int") + + for i, index in tqdm( + enumerate(indices), + total=len(indices), + desc="Retrieving the number of obs", + ncols=80, + ): + rowsize[i] = rowsize_func(index, **kwargs) + return rowsize
+ + +
+[docs] + @staticmethod + def attributes( + ds: xr.Dataset, name_coords: list, name_meta: list, name_data: list + ) -> Tuple[dict, dict]: + """Return global attributes and the attributes of all variables + (name_coords, name_meta, and name_data) from an Xarray Dataset. + + Parameters + ---------- + ds : xr.Dataset + _description_ + name_coords : list + Name of the coordinate variables to include in the archive + name_meta : list, optional + Name of metadata variables to include in the archive (default is []) + name_data : list, optional + Name of the data variables to include in the archive (default is []) + + Returns + ------- + Tuple[dict, dict] + The global and variables attributes + """ + attrs_global = ds.attrs + + # coordinates, metadata, and data + attrs_variables = {} + for var in name_coords + name_meta + name_data: + if var in ds.keys(): + attrs_variables[var] = ds[var].attrs + else: + warnings.warn(f"Variable {var} requested but not found; skipping.") + + return attrs_global, attrs_variables
+ + +
+[docs] + @staticmethod + def allocate( + preprocess_func: Callable[[int], xr.Dataset], + indices: list, + rowsize: list, + name_coords: list, + name_meta: list, + name_data: list, + **kwargs, + ) -> Tuple[dict, dict, dict]: + """ + Iterate through the files and fill for the ragged array associated + with coordinates, and selected metadata and data variables. + + Parameters + ---------- + preprocess_func : Callable[[int], xr.Dataset] + Returns a processed xarray Dataset from an identification number. + indices : list + List of indices separating trajectory in the ragged arrays. + rowsize : list + List of the number of observations per trajectory. + name_coords : list + Name of the coordinate variables to include in the archive. + name_meta : list, optional + Name of metadata variables to include in the archive (Defaults to []). + name_data : list, optional + Name of the data variables to include in the archive (Defaults to []). + + Returns + ------- + Tuple[dict, dict, dict] + Dictionaries containing numerical data and attributes of coordinates, metadata and data variables. + """ + + # open one file to get dtype of variables + ds = preprocess_func(indices[0], **kwargs) + nb_traj = len(rowsize) + nb_obs = np.sum(rowsize).astype("int") + index_traj = rowsize_to_index(rowsize) + + # allocate memory + coords = {} + for var in name_coords: + coords[var] = np.zeros(nb_obs, dtype=ds[var].dtype) + + metadata = {} + for var in name_meta: + try: + metadata[var] = np.zeros(nb_traj, dtype=ds[var].dtype) + except KeyError: + warnings.warn(f"Variable {var} requested but not found; skipping.") + + data = {} + for var in name_data: + if var in ds.keys(): + data[var] = np.zeros(nb_obs, dtype=ds[var].dtype) + else: + warnings.warn(f"Variable {var} requested but not found; skipping.") + ds.close() + + # loop and fill the ragged array + for i, index in tqdm( + enumerate(indices), + total=len(indices), + desc="Filling the Ragged Array", + ncols=80, + ): + with preprocess_func(index, **kwargs) as ds: + size = rowsize[i] + oid = index_traj[i] + + for var in name_coords: + coords[var][oid : oid + size] = ds[var].data + + for var in name_meta: + try: + metadata[var][i] = ds[var][0].data + except KeyError: + warnings.warn( + f"Variable {var} requested but not found; skipping." + ) + + for var in name_data: + if var in ds.keys(): + data[var][oid : oid + size] = ds[var].data + else: + warnings.warn( + f"Variable {var} requested but not found; skipping." + ) + + return coords, metadata, data
+ + +
+[docs] + def validate_attributes(self): + """Validate that each variable has an assigned attribute tag.""" + for key in ( + list(self.coords.keys()) + + list(self.metadata.keys()) + + list(self.data.keys()) + ): + if key not in self.attrs_variables: + self.attrs_variables[key] = {}
+ + +
+[docs] + def to_xarray(self, cast_to_float32: bool = True): + """Convert ragged array object to a xarray Dataset. + + Parameters + ---------- + cast_to_float32 : bool, optional + Cast all float64 variables to float32 (default is True). This option aims at + minimizing the size of the xarray dataset. + + Returns + ------- + xr.Dataset + Xarray Dataset containing the ragged arrays and their attributes + """ + + xr_coords = {} + for var in self.coords.keys(): + xr_coords[var] = (["obs"], self.coords[var], self.attrs_variables[var]) + + xr_data = {} + for var in self.metadata.keys(): + xr_data[var] = (["traj"], self.metadata[var], self.attrs_variables[var]) + + for var in self.data.keys(): + xr_data[var] = (["obs"], self.data[var], self.attrs_variables[var]) + + return xr.Dataset(coords=xr_coords, data_vars=xr_data, attrs=self.attrs_global)
+ + +
+[docs] + def to_awkward(self): + """Convert ragged array object to an Awkward Array. + + Returns + ------- + ak.Array + Awkward Array containing the ragged array and its attributes + """ + index_traj = rowsize_to_index(self.metadata["rowsize"]) + offset = ak.index.Index64(index_traj) + + data = [] + for var in self.coords.keys(): + data.append( + ak.contents.ListOffsetArray( + offset, + ak.contents.NumpyArray(self.coords[var]), + parameters={"attrs": self.attrs_variables[var]}, + ) + ) + for var in self.data.keys(): + data.append( + ak.contents.ListOffsetArray( + offset, + ak.contents.NumpyArray(self.data[var]), + parameters={"attrs": self.attrs_variables[var]}, + ) + ) + data_names = list(self.coords.keys()) + list(self.data.keys()) + + metadata = [] + for var in self.metadata.keys(): + metadata.append( + ak.with_parameter( + self.metadata[var], + "attrs", + self.attrs_variables[var], + highlevel=False, + ) + ) + metadata_names = list(self.metadata.keys()) + + # include the data inside the metadata list as a nested array + metadata_names.append("obs") + metadata.append(ak.Array(ak.contents.RecordArray(data, data_names)).layout) + + return ak.Array( + ak.contents.RecordArray( + metadata, metadata_names, parameters={"attrs": self.attrs_global} + ) + )
+ + +
+[docs] + def to_netcdf(self, filename: str): + """Export ragged array object to a NetCDF file. + + Parameters + ---------- + filename : str + Name of the NetCDF file to create. + """ + + self.to_xarray().to_netcdf(filename)
+ + +
+[docs] + def to_parquet(self, filename: str): + """Export ragged array object to a parquet file. + + Parameters + ---------- + filename : str + Name of the parquet file to create. + """ + ak.to_parquet(self.to_awkward(), filename)
+
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/signal.html b/_modules/clouddrift/signal.html new file mode 100644 index 00000000..7f8c9174 --- /dev/null +++ b/_modules/clouddrift/signal.html @@ -0,0 +1,951 @@ + + + + + + + + + + clouddrift.signal — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.signal

+"""
+This module provides signal processing functions.
+"""
+
+import numpy as np
+from typing import Optional, Tuple, Union
+import xarray as xr
+
+
+
+[docs] +def analytic_signal( + x: Union[np.ndarray, xr.DataArray], + boundary: Optional[str] = "mirror", + time_axis: Optional[int] = -1, +) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray]]: + """Return the analytic signal from a real-valued signal or the analytic and + conjugate analytic signals from a complex-valued signal. + + If the input is a real-valued signal, the analytic signal is calculated as + the inverse Fourier transform of the positive-frequency part of the Fourier + transform. If the input is a complex-valued signal, the conjugate analytic signal + is additionally calculated as the inverse Fourier transform of the positive-frequency + part of the Fourier transform of the complex conjugate of the input signal. + + For a complex-valued signal, the mean is evenly divided between the analytic and + conjugate analytic signal. + + The calculation is performed along the last axis of the input array by default. + Alternatively, the user can specify the time axis of the input. The user can also + specify the boundary conditions to be applied to the input array (default is "mirror"). + + Parameters + ---------- + x : array_like + Real- or complex-valued signal. + boundary : str, optional + The boundary condition to be imposed at the edges of the time series. + Allowed values are "mirror", "zeros", and "periodic". + Default is "mirror". + time_axis : int, optional + Axis on which the time is defined (default is -1). + + Returns + ------- + xa : np.ndarray + Analytic signal. It is a tuple if the input is a complex-valed signal + with the first element being the analytic signal and the second element + being the conjugate analytic signal. + + Examples + -------- + + To obtain the analytic signal of a real-valued signal: + + >>> x = np.random.rand(99) + >>> xa = analytic_signal(x) + + To obtain the analytic and conjugate analytic signals of a complex-valued signal: + + >>> w = np.random.rand(99)+1j*np.random.rand(99) + >>> wp, wn = analytic_signal(w) + + To specify that a periodic boundary condition should be used: + + >>> x = np.random.rand(99) + >>> xa = analytic_signal(x, boundary="periodic") + + To specify that the time axis is along the first axis and apply + zero boundary conditions: + + >>> x = np.random.rand(100, 99) + >>> xa = analytic_signal(x, time_axis=0, boundary="zeros") + + Raises + ------ + ValueError + If the time axis is outside of the valid range ([-1, N-1]). + If ``boundary not in ["mirror", "zeros", "periodic"]``. + + References + ---------- + [1] Gabor D. 1946 Theory of communication. Proc. IEE 93, 429–457. (10.1049/ji-1.1947.0015). + + [2] Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. + IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729). + + See Also + -------- + :func:`rotary_to_cartesian`, :func:`cartesian_to_rotary` + """ + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(x.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(x.shape) - 1}])." + ) + + # Swap the axis to make the time axis last (fast-varying). + # np.swapaxes returns a view to the input array, so no copy is made. + if time_axis != -1 and time_axis != len(x.shape) - 1: + x_ = np.swapaxes(x, time_axis, -1) + else: + x_ = x + + # time dimension length + N = np.shape(x_)[-1] + + # Subtract mean along time axis (-1); convert to np.array for compatibility + # with xarray.DataArray. + mx_ = np.array(np.mean(x_, axis=-1, keepdims=True)) + xa = x_ - mx_ + + # apply boundary conditions + if boundary == "mirror": + xa = np.concatenate((np.flip(xa, axis=-1), xa, np.flip(xa, axis=-1)), axis=-1) + elif boundary == "zeros": + xa = np.concatenate((np.zeros_like(xa), xa, np.zeros_like(xa)), axis=-1) + elif boundary == "periodic": + xa = np.concatenate((xa, xa, xa), axis=-1) + else: + raise ValueError("boundary must be one of 'mirror', 'align', or 'zeros'.") + + # analytic signal + xap = np.fft.fft(xa) + # conjugate analytic signal + xan = np.fft.fft(np.conj(xa)) + + # time dimension of extended time series + M = np.shape(xa)[-1] + + # zero negative frequencies + if M % 2 == 0: + xap[..., int(M / 2 + 2) - 1 : int(M + 1) + 1] = 0 + xan[..., int(M / 2 + 2) - 1 : int(M + 1) + 1] = 0 + # divide Nyquist component by 2 in even case + xap[..., int(M / 2 + 1) - 1] = xap[..., int(M / 2 + 1) - 1] / 2 + xan[..., int(M / 2 + 1) - 1] = xan[..., int(M / 2 + 1) - 1] / 2 + else: + xap[..., int((M + 3) / 2) - 1 : int(M + 1) + 1] = 0 + xan[..., int((M + 3) / 2) - 1 : int(M + 1) + 1] = 0 + + # inverse Fourier transform along last axis + xap = np.fft.ifft(xap) + xan = np.fft.ifft(xan) + + # return central part plus half the mean + xap = xap[..., int(N + 1) - 1 : int(2 * N + 1) - 1] + 0.5 * mx_ + xan = xan[..., int(N + 1) - 1 : int(2 * N + 1) - 1] + 0.5 * np.conj(mx_) + + if np.isrealobj(x): + xa = xap + xan + else: + xa = (xap, xan) + + # return after reorganizing the axes + if time_axis != -1 and time_axis != len(x.shape) - 1: + return np.swapaxes(xa, time_axis, -1) + else: + return xa
+ + + +
+[docs] +def cartesian_to_rotary( + ua: Union[np.ndarray, xr.DataArray], + va: Union[np.ndarray, xr.DataArray], + time_axis: Optional[int] = -1, +) -> Tuple[np.ndarray, np.ndarray]: + """Return rotary signals (wp,wn) from analytic Cartesian signals (ua,va). + + If ua is the analytic signal from real-valued signal u, and va the analytic signal + from real-valued signal v, then the positive (counterclockwise) and negative (clockwise) + signals are defined by wp = 0.5*(up+1j*vp), wp = 0.5*(up-1j*vp). + + This function is the inverse of :func:`rotary_to_cartesian`. + + Parameters + ---------- + ua : array_like + Complex-valued analytic signal for first Cartesian component (zonal, east-west) + va : array_like + Complex-valued analytic signal for second Cartesian component (meridional, north-south) + time_axis : int, optional + The axis of the time array. Default is -1, which corresponds to the + last axis. + + Returns + ------- + wp : np.ndarray + Complex-valued positive (counterclockwise) rotary signal. + wn : np.ndarray + Complex-valued negative (clockwise) rotary signal. + + Examples + -------- + To obtain the rotary signals from a pair of real-valued signal: + + >>> u = np.random.rand(99) + >>> v = np.random.rand(99) + >>> wp, wn = cartesian_to_rotary(analytic_signal(u), analytic_signal(v)) + + To specify that the time axis is along the first axis: + + >>> u = np.random.rand(100, 99) + >>> v = np.random.rand(100, 99) + >>> wp, wn = cartesian_to_rotary(analytic_signal(u), analytic_signal(v), time_axis=0) + + Raises + ------ + ValueError + If the input arrays do not have the same shape. + If the time axis is outside of the valid range ([-1, N-1]). + + References + ---------- + Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. + IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729) + + See Also + -------- + :func:`analytic_signal`, :func:`rotary_to_cartesian` + """ + # u and v arrays must have the same shape. + if not ua.shape == va.shape: + raise ValueError("u and v must have the same shape.") + + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(ua.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(ua.shape) - 1}])." + ) + + wp = 0.5 * (ua + 1j * va) + wn = 0.5 * (ua - 1j * va) + + return wp, wn
+ + + +
+[docs] +def ellipse_parameters( + xa: Union[np.ndarray, xr.DataArray], + ya: Union[np.ndarray, xr.DataArray], +) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Return the instantaneous parameters of a modulated elliptical signal from its analytic Cartesian signals. + + Parameters + ---------- + xa : array_like + Complex-valued analytic signal for first Cartesian component (zonal, east-west). + ya : array_like + Complex-valued analytic signal for second Cartesian component (meridional, north-south). + + Returns + ------- + kappa : np.ndarray + Ellipse root-mean-square amplitude. + lambda : np.ndarray + Ellipse linearity between -1 and 1, or departure from circular motion (lambda=0). + theta : np.ndarray + Ellipse orientation in radian. + phi : np.ndarray + Ellipse phase in radian. + + Examples + -------- + + To obtain the ellipse parameters from a pair of real-valued signals (x, y): + + >>> kappa, lambda, theta, phi = ellipse_parameters(analytic_signal(x), analytic_signal(y)) + + Raises + ------ + ValueError + If the input arrays do not have the same shape. + + References + ---------- + Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. + IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729). + + See Also + -------- + :func:`modulated_ellipse_signal`, :func:`analytic_signal`, :func:`rotary_to_cartesian`, :func:`cartesian_to_rotary` + + """ + + # u and v arrays must have the same shape. + if not xa.shape == ya.shape: + raise ValueError("xa and ya must have the same shape.") + + X = np.abs(xa) + Y = np.abs(ya) + phix = np.angle(xa) + phiy = np.angle(ya) + + phia = 0.5 * (phix + phiy + 0.5 * np.pi) + phid = 0.5 * (phix - phiy - 0.5 * np.pi) + + P = 0.5 * np.sqrt(X**2 + Y**2 + 2 * X * Y * np.cos(2 * phid)) + N = 0.5 * np.sqrt(X**2 + Y**2 - 2 * X * Y * np.cos(2 * phid)) + + phip = np.unwrap( + phia + + np.unwrap(np.imag(np.log(X * np.exp(1j * phid) + Y * np.exp(-1j * phid)))) + ) + phin = np.unwrap( + phia + + np.unwrap(np.imag(np.log(X * np.exp(1j * phid) - Y * np.exp(-1j * phid)))) + ) + + kappa = np.sqrt(P**2 + N**2) + lambda_ = (2 * P * N * np.sign(P - N)) / (P**2 + N**2) + + # For vanishing linearity, put in very small number to have sign information + lambda_[lambda_ == 0] = np.sign(P[lambda_ == 0] - N[lambda_ == 0]) * (1e-12) + + theta = np.unwrap(0.5 * (phip - phin)) + phi = np.unwrap(0.5 * (phip + phin)) + + lambda_ = np.real(lambda_) + + return kappa, lambda_, theta, phi
+ + + +
+[docs] +def modulated_ellipse_signal( + kappa: Union[np.ndarray, xr.DataArray], + lambda_: Union[np.ndarray, xr.DataArray], + theta: Union[np.ndarray, xr.DataArray], + phi: Union[np.ndarray, xr.DataArray], +) -> Tuple[np.ndarray, np.ndarray]: + """Return the analytic Cartesian signals (xa, ya) from the instantaneous parameters of a modulated elliptical signal. + + This function is the inverse of :func:`ellipse_parameters`. + + Parameters + ---------- + kappa : array_like + Ellipse root-mean-square amplitude. + lambda : array_like + Ellipse linearity between -1 and 1, or departure from circular motion (lambda=0). + theta : array_like + Ellipse orientation in radian. + phi : array_like + Ellipse phase in radian. + time_axis : int, optional + The axis of the time array. Default is -1, which corresponds to the + last axis. + + Returns + ------- + xa : np.ndarray + Complex-valued analytic signal for first Cartesian component (zonal, east-west). + ya : np.ndarray + Complex-valued analytic signal for second Cartesian component (meridional, north-south). + + Examples + -------- + + To obtain the analytic signals from the instantaneous parameters of a modulated elliptical signal: + + >>> xa, ya = modulated_ellipse_signal(kappa, lambda, theta, phi) + + Raises + ------ + ValueError + If the input arrays do not have the same shape. + + References + ---------- + Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. + IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729). + + See Also + -------- + :func:`ellipse_parameters`, :func:`analytic_signal`, :func:`rotary_to_cartesian`, :func:`cartesian_to_rotary` + + """ + + # make sure all input arrays have the same shape + if not kappa.shape == lambda_.shape == theta.shape == phi.shape: + raise ValueError("All input arrays must have the same shape.") + + # calculate semi major and semi minor axes + a = kappa * np.sqrt(1 + np.abs(lambda_)) + b = np.sign(lambda_) * kappa * np.sqrt(1 - np.abs(lambda_)) + + # define b to be positive for lambda exactly zero + b[lambda_ == 0] = kappa[lambda_ == 0] + + xa = np.exp(1j * phi) * (a * np.cos(theta) + 1j * b * np.sin(theta)) + ya = np.exp(1j * phi) * (a * np.sin(theta) - 1j * b * np.cos(theta)) + + mask = np.isinf(kappa * lambda_ * theta * phi) + xa[mask] = np.inf + 1j * np.inf + ya[mask] = np.inf + 1j * np.inf + + return xa, ya
+ + + +
+[docs] +def rotary_to_cartesian( + wp: Union[np.ndarray, xr.DataArray], + wn: Union[np.ndarray, xr.DataArray], + time_axis: Optional[int] = -1, +) -> Tuple[np.ndarray, np.ndarray]: + """Return Cartesian analytic signals (ua, va) from rotary signals (wp, wn) + as ua = wp + wn and va = -1j * (wp - wn). + + This function is the inverse of :func:`cartesian_to_rotary`. + + Parameters + ---------- + wp : array_like + Complex-valued positive (counterclockwise) rotary signal. + wn : array_like + Complex-valued negative (clockwise) rotary signal. + time_axis : int, optional + The axis of the time array. Default is -1, which corresponds to the + last axis. + + Returns + ------- + ua : array_like + Complex-valued analytic signal, first Cartesian component (zonal, east-west) + va : array_like + Complex-valued analytic signal, second Cartesian component (meridional, north-south) + + Examples + -------- + + To obtain the Cartesian analytic signals from a pair of rotary signals (wp,wn): + + >>> ua, va = rotary_to_cartesian(wp, wn) + + To specify that the time axis is along the first axis: + + >>> ua, va = rotary_to_cartesian(wp, wn, time_axis=0) + + Raises + ------ + ValueError + If the input arrays do not have the same shape. + If the time axis is outside of the valid range ([-1, N-1]). + + References + ---------- + Lilly JM, Olhede SC. 2010 Bivariate instantaneous frequency and bandwidth. + IEEE T. Signal Proces. 58, 591–603. (10.1109/TSP.2009.2031729) + + See Also + -------- + :func:`analytic_signal`, :func:`cartesian_to_rotary` + """ + + if not wp.shape == wn.shape: + raise ValueError("u and v must have the same shape.") + + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(wp.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(wp.shape) - 1}])." + ) + + # I think this may return xarray dataarrays if that's the input + ua = wp + wn + va = -1j * (wp - wn) + + return ua, va
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/sphere.html b/_modules/clouddrift/sphere.html new file mode 100644 index 00000000..9cdae7dc --- /dev/null +++ b/_modules/clouddrift/sphere.html @@ -0,0 +1,1320 @@ + + + + + + + + + + clouddrift.sphere — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.sphere

+"""
+This module provides functions for spherical geometry calculations.
+"""
+
+import numpy as np
+from typing import Optional, Tuple, Union
+import xarray as xr
+import warnings
+
+EARTH_RADIUS_METERS = 6.3781e6
+EARTH_DAY_SECONDS = 86164.091
+EARTH_ROTATION_RATE = 2 * np.pi / EARTH_DAY_SECONDS
+
+
+
+[docs] +def cumulative_distance( + longitude: Union[list, np.ndarray, xr.DataArray], + latitude: Union[list, np.ndarray, xr.DataArray], +) -> np.ndarray: + """Return the cumulative great circle distance in meters along a sequence of geographical locations. + + Parameters + ---------- + latitude : array-like + Latitude sequence, in degrees. + longitude : array-like + Longitude sequence, in degrees. + + Returns + ------- + out : np.ndarray + Cumulative distance. + + See Also + -------- + :func:`distance` + + Examples + -------- + Calculate the cumulative distance in meters along a path of three points: + + >>> cumulative_distance(np.array([0, 1, 2]), np.array([0, 1, 2])) + array([ 0. , 157424.62387233, 314825.27182116]) + """ + return np.cumsum( + np.concatenate( + ( + [0], + distance(latitude[0:-1], longitude[0:-1], latitude[1:], longitude[1:]), + ) + ) + )
+ + + +
+[docs] +def distance( + lon1: Union[float, list, np.ndarray, xr.DataArray], + lat1: Union[float, list, np.ndarray, xr.DataArray], + lon2: Union[float, list, np.ndarray, xr.DataArray], + lat2: Union[float, list, np.ndarray, xr.DataArray], +) -> Union[float, np.ndarray]: + """Return elementwise great circle distance in meters between one or more + points from arrays of their latitudes and longitudes, using the Haversine + formula. + + d = 2⋅r⋅asin √[sin²(Δφ/2) + cos φ1 ⋅ cos φ2 ⋅ sin²(Δλ/2)] + + where (φ, λ) is (lat, lon) in radians and r is the radius of the sphere in + meters. + + Parameters + ---------- + lon1 : np.ndarray + Longitudes of the first set of points, in degrees + lat1 : np.ndarray + Latitudes of the first set of points, in degrees + lon2 : np.ndarray + Longitudes of the second set of points, in degrees + lat2 : np.ndarray + Latitudes of the second set of points, in degrees + + Returns + ------- + out : np.ndarray + Great circle distance + + Examples + -------- + Calculate the distance of one degree longitude on the equator: + + >>> distance(0, 0, 0, 1) + 111318.84502145034 + + Calculate the distance of one degree longitude at 45-degrees North latitude: + + >>> distance(0, 45, 1, 45) + 78713.81064540472 + + You can also pass array-like inputs to calculate an array of distances: + + >>> distance([0, 0], [0, 45], [0, 1], [1, 45]) + array([111318.84502145, 78713.8106454 ]) + """ + + # Input coordinates are in degrees; convert to radians. + # If any of the input arrays are xr.DataArray, extract the values first + # because Xarray enforces alignment between coordinates. + if type(lat1) is xr.DataArray: + lat1_rad = np.deg2rad(lat1.values) + else: + lat1_rad = np.deg2rad(lat1) + if type(lon1) is xr.DataArray: + lon1_rad = np.deg2rad(lon1.values) + else: + lon1_rad = np.deg2rad(lon1) + if type(lat2) is xr.DataArray: + lat2_rad = np.deg2rad(lat2.values) + else: + lat2_rad = np.deg2rad(lat2) + if type(lon2) is xr.DataArray: + lon2_rad = np.deg2rad(lon2.values) + else: + lon2_rad = np.deg2rad(lon2) + + dlat = lat2_rad - lat1_rad + dlon = lon2_rad - lon1_rad + + h = ( + np.sin(0.5 * dlat) ** 2 + + np.cos(lat1_rad) * np.cos(lat2_rad) * np.sin(0.5 * dlon) ** 2 + ) + + return 2 * np.arcsin(np.sqrt(h)) * EARTH_RADIUS_METERS
+ + + +
+[docs] +def bearing( + lon1: Union[float, list, np.ndarray, xr.DataArray], + lat1: Union[float, list, np.ndarray, xr.DataArray], + lon2: Union[float, list, np.ndarray, xr.DataArray], + lat2: Union[float, list, np.ndarray, xr.DataArray], +) -> Union[float, np.ndarray]: + """Return elementwise initial (forward) bearing in radians from arrays of + latitude and longitude in degrees, based on the spherical law of cosines. + + The formula is: + + θ = atan2(cos φ1 ⋅ sin φ2 - sin φ1 ⋅ cos φ2 ⋅ cos Δλ, sin Δλ ⋅ cos φ2) + + where (φ, λ) is (lat, lon) and θ is bearing, all in radians. + Bearing is defined as zero toward East and positive counterclockwise. + + Parameters + ---------- + lon1 : float or array-like + Longitudes of the first set of points, in degrees + lat1 : float or array-like + Latitudes of the first set of points, in degrees + lon2 : float or array-like + Longitudes of the second set of points, in degrees + lat2 : float or array-like + Latitudes of the second set of points, in degrees + + Returns + ------- + theta : float or np.ndarray + Bearing angles in radians + + Examples + -------- + Calculate the bearing of one degree longitude on the equator: + + >>> bearing(0, 0, 1, 0) + 0.0 + + Calculate the bearing of 10 degrees longitude at 45-degrees North latitude: + + >>> bearing(0, 45, 10, 45) + 0.06178508761798218 + """ + # Input coordinates are in degrees; convert to radians. + # If any of the input arrays are xr.DataArray, extract the values first + # because Xarray enforces alignment between coordinates. + if type(lat1) is xr.DataArray: + lat1_rad = np.deg2rad(lat1.values) + else: + lat1_rad = np.deg2rad(lat1) + if type(lon1) is xr.DataArray: + lon1_rad = np.deg2rad(lon1.values) + else: + lon1_rad = np.deg2rad(lon1) + if type(lat2) is xr.DataArray: + lat2_rad = np.deg2rad(lat2.values) + else: + lat2_rad = np.deg2rad(lat2) + if type(lon2) is xr.DataArray: + lon2_rad = np.deg2rad(lon2.values) + else: + lon2_rad = np.deg2rad(lon2) + + dlon = lon2_rad - lon1_rad + + theta = np.arctan2( + np.cos(lat1_rad) * np.sin(lat2_rad) + - np.sin(lat1_rad) * np.cos(lat2_rad) * np.cos(dlon), + np.sin(dlon) * np.cos(lat2_rad), + ) + + return theta
+ + + +
+[docs] +def position_from_distance_and_bearing( + lon: float, lat: float, distance: float, bearing: float +) -> Tuple[float, float]: + """Return elementwise new position in degrees from arrays of latitude and + longitude in degrees, distance in meters, and bearing in radians, based on + the spherical law of cosines. + + The formula is: + + φ2 = asin( sin φ1 ⋅ cos δ + cos φ1 ⋅ sin δ ⋅ cos θ ) + λ2 = λ1 + atan2( sin θ ⋅ sin δ ⋅ cos φ1, cos δ − sin φ1 ⋅ sin φ2 ) + + where (φ, λ) is (lat, lon) and θ is bearing, all in radians. + Bearing is defined as zero toward East and positive counterclockwise. + + Parameters + ---------- + lon : float + Longitude of the first set of points, in degrees + lat : float + Latitude of the first set of points, in degrees + distance : array_like + Distance in meters + bearing : array_like + Bearing angles in radians + + Returns + ------- + lon2 : array_like + Latitudes of the second set of points, in degrees, in the range [-90, 90] + lat2 : array_like + Longitudes of the second set of points, in degrees, in the range [-180, 180] + + Examples + -------- + Calculate the position of one degree longitude distance on the equator: + + >>> position_from_distance_and_bearing(0, 0, 111318.84502145034, 0) + (1.0, 0.0) + + Calculate the position of one degree latitude distance from 45 degrees North latitude: + + >>> position_from_distance_and_bearing(0, 45, 111318.84502145034, np.pi / 2) + (8.81429402840006e-17, 45.99999999999999) + """ + lat_rad = np.deg2rad(lat) + lon_rad = np.deg2rad(lon) + + distance_rad = distance / EARTH_RADIUS_METERS + + lat2_rad = np.arcsin( + np.sin(lat_rad) * np.cos(distance_rad) + + np.cos(lat_rad) * np.sin(distance_rad) * np.sin(bearing) + ) + lon2_rad = lon_rad + np.arctan2( + np.cos(bearing) * np.sin(distance_rad) * np.cos(lat_rad), + np.cos(distance_rad) - np.sin(lat_rad) * np.sin(lat2_rad), + ) + + return np.rad2deg(lon2_rad), np.rad2deg(lat2_rad)
+ + + +
+[docs] +def recast_lon(lon: np.ndarray, lon0: Optional[float] = -180) -> np.ndarray: + """Recast (convert) longitude values to a selected range of 360 degrees + starting from ``lon0``. + + Parameters + ---------- + lon : np.ndarray or float + An N-d array of longitudes in degrees + lon0 : float, optional + Starting longitude of the recasted range (default -180). + + Returns + ------- + np.ndarray or float + Converted longitudes in the range `[lon0, lon0+360[` + + Examples + -------- + By default, ``recast_lon`` converts longitude values to the range + `[-180, 180[`: + + >>> recast_lon(200) + -160 + + >>> recast_lon(180) + -180 + + The range of the output longitude is controlled by ``lon0``. + For example, with ``lon0 = 0``, the longitude values are converted to the + range `[0, 360[`. + + >>> recast_lon(200, -180) + -160 + + With ``lon0 = 20``, longitude values are converted to range `[20, 380]`, + which can be useful to avoid cutting the major ocean basins. + + >>> recast_lon(10, 20) + 370 + + See Also + -------- + :func:`recast_lon360`, :func:`recast_lon180` + """ + return np.mod(lon - lon0, 360) + lon0
+ + + +
+[docs] +def recast_lon360(lon: np.ndarray) -> np.ndarray: + """Recast (convert) longitude values to the range `[0, 360[`. + This is a convenience wrapper around :func:`recast_lon` with ``lon0 = 0``. + + Parameters + ---------- + lon : np.ndarray + An N-d array of longitudes in degrees + + Returns + ------- + np.ndarray + Converted longitudes in the range `[0, 360[` + + Examples + -------- + >>> recast_lon360(200) + 200 + + >>> recast_lon360(-200) + 160 + + See Also + -------- + :func:`recast_lon`, :func:`recast_lon180` + """ + return recast_lon(lon, 0)
+ + + +
+[docs] +def recast_lon180(lon: np.ndarray) -> np.ndarray: + """Recast (convert) longitude values to the range `[-180, 180[`. + This is a convenience wrapper around :func:`recast_lon` with ``lon0 = -180``. + + Parameters + ---------- + lon : np.ndarray + An N-d array of longitudes in degrees + + Returns + ------- + np.ndarray + Converted longitudes in the range `[-180, 180[` + + Examples + -------- + >>> recast_lon180(200) + -160 + + >>> recast_lon180(-200) + 160 + + See Also + -------- + :func:`recast_lon`, :func:`recast_lon360` + """ + return recast_lon(lon, -180)
+ + + +
+[docs] +def plane_to_sphere( + x: np.ndarray, y: np.ndarray, lon_origin: float = 0, lat_origin: float = 0 +) -> Tuple[np.ndarray, np.ndarray]: + """Convert Cartesian coordinates on a plane to spherical coordinates. + + The arrays of input zonal and meridional displacements ``x`` and ``y`` are + assumed to follow a contiguous trajectory. The spherical coordinate of each + successive point is determined by following a great circle path from the + previous point. The spherical coordinate of the first point is determined by + following a great circle path from the origin, by default (0, 0). + + The output arrays have the same floating-point output type as the input. + + If projecting multiple trajectories onto the same plane, use + :func:`apply_ragged` for highest accuracy. + + Parameters + ---------- + x : np.ndarray + An N-d array of zonal displacements in meters + y : np.ndarray + An N-d array of meridional displacements in meters + lon_origin : float, optional + Origin longitude of the tangent plane in degrees, default 0 + lat_origin : float, optional + Origin latitude of the tangent plane in degrees, default 0 + + Returns + ------- + lon : np.ndarray + Longitude in degrees + lat : np.ndarray + Latitude in degrees + + Examples + -------- + >>> plane_to_sphere(np.array([0., 0.]), np.array([0., 1000.])) + (array([0.00000000e+00, 5.50062664e-19]), array([0. , 0.0089832])) + + You can also specify an origin longitude and latitude: + + >>> plane_to_sphere(np.array([0., 0.]), np.array([0., 1000.]), lon_origin=1, lat_origin=0) + (array([1., 1.]), array([0. , 0.0089832])) + + Raises + ------ + AttributeError + If ``x`` and ``y`` are not NumPy arrays + + See Also + -------- + :func:`sphere_to_plane` + """ + lon = np.empty_like(x) + lat = np.empty_like(y) + + # Cartesian distances between each point + dx = np.diff(x, prepend=0) + dy = np.diff(y, prepend=0) + + distances = np.sqrt(dx**2 + dy**2) + bearings = np.arctan2(dy, dx) + + # Compute spherical coordinates following great circles between each + # successive point. + lon[..., 0], lat[..., 0] = position_from_distance_and_bearing( + lon_origin, lat_origin, distances[..., 0], bearings[..., 0] + ) + for n in range(1, lon.shape[-1]): + lon[..., n], lat[..., n] = position_from_distance_and_bearing( + lon[..., n - 1], lat[..., n - 1], distances[..., n], bearings[..., n] + ) + + return lon, lat
+ + + +
+[docs] +def sphere_to_plane( + lon: np.ndarray, lat: np.ndarray, lon_origin: float = 0, lat_origin: float = 0 +) -> Tuple[np.ndarray, np.ndarray]: + """Convert spherical coordinates to a tangent (Cartesian) plane. + + The arrays of input longitudes and latitudes are assumed to be following + a contiguous trajectory. The Cartesian coordinate of each successive point + is determined by following a great circle path from the previous point. + The Cartesian coordinate of the first point is determined by following a + great circle path from the origin, by default (0, 0). + + The output arrays have the same floating-point output type as the input. + + If projecting multiple trajectories onto the same plane, use + :func:`apply_ragged` for highest accuracy. + + Parameters + ---------- + lon : np.ndarray + An N-d array of longitudes in degrees + lat : np.ndarray + An N-d array of latitudes in degrees + lon_origin : float, optional + Origin longitude of the tangent plane in degrees, default 0 + lat_origin : float, optional + Origin latitude of the tangent plane in degrees, default 0 + + Returns + ------- + x : np.ndarray + x-coordinates on the tangent plane + y : np.ndarray + y-coordinates on the tangent plane + + Examples + -------- + >>> sphere_to_plane(np.array([0., 1.]), np.array([0., 0.])) + (array([ 0. , 111318.84502145]), array([0., 0.])) + + You can also specify an origin longitude and latitude: + + >>> sphere_to_plane(np.array([0., 1.]), np.array([0., 0.]), lon_origin=1, lat_origin=0) + (array([-111318.84502145, 0. ]), + array([1.36326267e-11, 1.36326267e-11])) + + Raises + ------ + AttributeError + If ``lon`` and ``lat`` are not NumPy arrays + + See Also + -------- + :func:`plane_to_sphere` + """ + x = np.empty_like(lon) + y = np.empty_like(lat) + + distances = np.empty_like(x) + bearings = np.empty_like(x) + + # Distance and bearing of the starting point relative to the origin + distances[0] = distance(lon_origin, lat_origin, lon[..., 0], lat[..., 0]) + bearings[0] = bearing(lon_origin, lat_origin, lon[..., 0], lat[..., 0]) + + # Distance and bearing of the remaining points + distances[1:] = distance(lon[..., :-1], lat[..., :-1], lon[..., 1:], lat[..., 1:]) + bearings[1:] = bearing(lon[..., :-1], lat[..., :-1], lon[..., 1:], lat[..., 1:]) + + dx = distances * np.cos(bearings) + dy = distances * np.sin(bearings) + + x[..., :] = np.cumsum(dx, axis=-1) + y[..., :] = np.cumsum(dy, axis=-1) + + return x, y
+ + + +
+[docs] +def spherical_to_cartesian( + lon: Union[float, list, np.ndarray, xr.DataArray], + lat: Union[float, list, np.ndarray, xr.DataArray], + radius: Optional[float] = EARTH_RADIUS_METERS, +) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """Converts latitude and longitude on a spherical body to + three-dimensional Cartesian coordinates. + + The Cartesian coordinate system is a right-handed system whose + origin lies at the center of a sphere. It is oriented with the + Z-axis passing through the poles and the X-axis passing through + the point lon = 0, lat = 0. This function is inverted by + :func:`cartesian_to_spherical`. + + Parameters + ---------- + lon : array-like + An N-d array of longitudes in degrees. + lat : array-like + An N-d array of latitudes in degrees. + radius: float, optional + The radius of the spherical body in meters. The default assumes the Earth with + EARTH_RADIUS_METERS = 6.3781e6. + + Returns + ------- + x : float or array-like + x-coordinates in 3D in meters. + y : float or array-like + y-coordinates in 3D in meters. + z : float or array-like + z-coordinates in 3D in meters. + + Examples + -------- + >>> spherical_to_cartesian(np.array([0, 45]), np.array([0, 45])) + (array([6378100., 3189050.]), + array([ 0., 3189050.]), + array([ 0. , 4509997.76108592])) + + >>> spherical_to_cartesian(np.array([0, 45, 90]), np.array([0, 90, 180]), radius=1) + (array([ 1.00000000e+00, 4.32978028e-17, -6.12323400e-17]), + array([ 0.00000000e+00, 4.32978028e-17, -1.00000000e+00]), + array([0.0000000e+00, 1.0000000e+00, 1.2246468e-16])) + + >>> x, y, z = spherical_to_cartesian(np.array([0, 5]), np.array([0, 5])) + + Raises + ------ + AttributeError + If ``lon`` and ``lat`` are not NumPy arrays. + + See Also + -------- + :func:`cartesian_to_spherical` + """ + lonr, latr = np.deg2rad(lon), np.deg2rad(lat) + + x = radius * np.cos(latr) * np.cos(lonr) + y = radius * np.cos(latr) * np.sin(lonr) + z = radius * np.sin(latr) + + return x, y, z
+ + + +
+[docs] +def cartesian_to_spherical( + x: Union[float, np.ndarray, xr.DataArray], + y: Union[float, np.ndarray, xr.DataArray], + z: Union[float, np.ndarray, xr.DataArray], +) -> Tuple[np.ndarray, np.ndarray]: + """Converts Cartesian three-dimensional coordinates to latitude and longitude on a + spherical body. + + The Cartesian coordinate system is a right-handed system whose + origin lies at the center of the sphere. It is oriented with the + Z-axis passing through the poles and the X-axis passing through + the point lon = 0, lat = 0. This function is inverted by `spherical_to_cartesian`. + + Parameters + ---------- + x : float or array-like + x-coordinates in 3D. + y : float or array-like + y-coordinates in 3D. + z : float or array-like + z-coordinates in 3D. + + Returns + ------- + lon : float or array-like + An N-d array of longitudes in degrees in range [-180, 180]. + lat : float or array-like + An N-d array of latitudes in degrees. + + Examples + -------- + >>> x = EARTH_RADIUS_METERS * np.cos(np.deg2rad(45)) + >>> y = EARTH_RADIUS_METERS * np.cos(np.deg2rad(45)) + >>> z = 0 * x + >>> cartesian_to_spherical(x, y, z) + (44.99999999999985, 0.0) + + ``cartesian_to_spherical`` is inverted by ``spherical_to_cartesian``: + + >>> x, y, z = spherical_to_cartesian(np.array([45]),np.array(0)) + >>> cartesian_to_spherical(x, y, z) + (array([45.]), array([0.])) + + Raises + ------ + AttributeError + If ``x``, ``y``, and ``z`` are not NumPy arrays. + + See Also + -------- + :func:`spherical_to_cartesian` + """ + + R = np.sqrt(x**2 + y**2 + z**2) + x /= R + y /= R + z /= R + + with np.errstate(divide="ignore"): + lon = np.where( + np.logical_and(x == 0, y == 0), + 0, + recast_lon180(np.rad2deg(np.imag(np.log((x + 1j * y))))), + ) + lat = np.rad2deg(np.arcsin(z)) + + return lon, lat
+ + + +
+[docs] +def cartesian_to_tangentplane( + u: Union[float, np.ndarray], + v: Union[float, np.ndarray], + w: Union[float, np.ndarray], + longitude: Union[float, np.ndarray], + latitude: Union[float, np.ndarray], +) -> Union[Tuple[float], Tuple[np.ndarray]]: + """ + Project a three-dimensional Cartesian vector on a plane tangent to + a spherical Earth. + + The Cartesian coordinate system is a right-handed system whose + origin lies at the center of a sphere. It is oriented with the + Z-axis passing through the north pole at lat = 90, the X-axis passing through + the point lon = 0, lat = 0, and the Y-axis passing through the point lon = 90, + lat = 0. + + Parameters + ---------- + u : float or np.ndarray + First component of Cartesian vector. + v : float or np.ndarray + Second component of Cartesian vector. + w : float or np.ndarray + Third component of Cartesian vector. + longitude : float or np.ndarray + Longitude in degrees of tangent point of plane. + latitude : float or np.ndarray + Latitude in degrees of tangent point of plane. + + Returns + ------- + up: float or np.ndarray + First component of projected vector on tangent plane (positive eastward). + vp: float or np.ndarray + Second component of projected vector on tangent plane (positive northward). + + Raises + ------ + Warning + Raised if the input latitude is not in the expected range [-90, 90]. + + Examples + -------- + >>> u, v = cartesian_to_tangentplane(1, 1, 1, 45, 90) + + See Also + -------- + :func:`tangentplane_to_cartesian` + """ + if np.any(latitude < -90) or np.any(latitude > 90): + warnings.warn("Input latitude outside of range [-90,90].") + + phi = np.radians(latitude) + theta = np.radians(longitude) + u_projected = v * np.cos(theta) - u * np.sin(theta) + v_projected = ( + w * np.cos(phi) + - u * np.cos(theta) * np.sin(phi) + - v * np.sin(theta) * np.sin(phi) + ) + # JML says vh = w.*cos(phi)-u.*cos(theta).*sin(phi)-v.*sin(theta).*sin(phi) but vh=w./cos(phi) is the same + return u_projected, v_projected
+ + + +
+[docs] +def tangentplane_to_cartesian( + up: Union[float, np.ndarray], + vp: Union[float, np.ndarray], + longitude: Union[float, np.ndarray], + latitude: Union[float, np.ndarray], +) -> Union[Tuple[float], Tuple[np.ndarray]]: + """ + Return the three-dimensional Cartesian components of a vector contained in + a plane tangent to a spherical Earth. + + The Cartesian coordinate system is a right-handed system whose + origin lies at the center of a sphere. It is oriented with the + Z-axis passing through the north pole at lat = 90, the X-axis passing through + the point lon = 0, lat = 0, and the Y-axis passing through the point lon = 90, + lat = 0. + + Parameters + ---------- + up: float or np.ndarray + First component of vector on tangent plane (positive eastward). + vp: float or np.ndarray + Second component of vector on tangent plane (positive northward). + longitude : float or np.ndarray + Longitude in degrees of tangent point of plane. + latitude : float or np.ndarray + Latitude in degrees of tangent point of plane. + + Returns + ------- + u : float or np.ndarray + First component of Cartesian vector. + v : float or np.ndarray + Second component of Cartesian vector. + w : float or np.ndarray + Third component of Cartesian vector. + + Examples + -------- + >>> u, v, w = tangentplane_to_cartesian(1, 1, 45, 90) + + Notes + ----- + This function is inverted by :func:`cartesian_to_tangetplane`. + + See Also + -------- + :func:`cartesian_to_tangentplane` + """ + phi = np.radians(latitude) + theta = np.radians(longitude) + u = -up * np.sin(theta) - vp * np.sin(phi) * np.cos(theta) + v = up * np.cos(theta) - vp * np.sin(phi) * np.sin(theta) + w = vp * np.cos(phi) + + return u, v, w
+ + + +
+[docs] +def coriolis_frequency( + latitude: Union[float, np.ndarray], +) -> Union[float, np.ndarray]: + """ + Return the Coriolis frequency or commonly known `f` parameter in geophysical fluid dynamics. + + Parameters + ---------- + latitude : float or np.ndarray + Latitude in degrees. + + Returns + ------- + f : float or np.ndarray + Signed Coriolis frequency in radian per seconds. + + Examples + -------- + >>> f = coriolis_frequency(np.array([0, 45, 90])) + """ + f = 2 * EARTH_ROTATION_RATE * np.sin(np.radians(latitude)) + + return f
+ +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/clouddrift/wavelet.html b/_modules/clouddrift/wavelet.html new file mode 100644 index 00000000..024a63a3 --- /dev/null +++ b/_modules/clouddrift/wavelet.html @@ -0,0 +1,1322 @@ + + + + + + + + + + clouddrift.wavelet — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + +
+ +

Source code for clouddrift.wavelet

+"""
+This module provides functions for computing wavelet transforms and time-frequency analyses,
+notably using generalized Morse wavelets.
+
+The Python code in this module was translated from the MATLAB implementation
+by J. M. Lilly in the jWavelet module of jLab (http://jmlilly.net/code.html).
+
+Lilly, J. M. (2021), jLab: A data analysis package for Matlab, v.1.7.1,
+doi:10.5281/zenodo.4547006, http://www.jmlilly.net/software.
+
+jLab is licensed under the Creative Commons Attribution-Noncommercial-ShareAlike
+License (https://creativecommons.org/licenses/by-nc-sa/4.0/). The code that is
+directly translated from jLab/jWavelet is licensed under the same license.
+Any other code that is added to this module and that is specific to Python and
+not the MATLAB implementation is licensed under CloudDrift's MIT license.
+"""
+
+import numpy as np
+from typing import Optional, Tuple, Union
+from scipy.special import gamma as _gamma, gammaln as _lgamma
+
+
+
+[docs] +def morse_wavelet_transform( + x: np.ndarray, + gamma: float, + beta: float, + radian_frequency: np.ndarray, + complex: Optional[bool] = False, + order: Optional[int] = 1, + normalization: Optional[str] = "bandpass", + boundary: Optional[str] = "mirror", + time_axis: Optional[int] = -1, +) -> Union[Tuple[np.ndarray], np.ndarray]: + """ + Apply a continuous wavelet transform to an input signal using the generalized Morse + wavelets of Olhede and Walden (2002). The wavelet transform is normalized differently + for complex-valued input than for real-valued input, and this in turns depends on whether the + optional argument ``normalization`` is set to ``"bandpass"`` or ``"energy"`` normalizations. + + Parameters + ---------- + x : np.ndarray + Real- or complex-valued signals. The time axis is assumed to be the last. If not, specify optional + argument `time_axis`. + gamma : float + Gamma parameter of the Morse wavelets. + beta : float + Beta parameter of the Morse wavelets. + radian_frequency : np.ndarray + An array of radian frequencies at which the Fourier transform of the wavelets + reach their maximum amplitudes. ``radian_frequency`` is typically between 0 and 2 * np.pi * 0.5, + the normalized Nyquist radian frequency. + complex : boolean, optional + Specify explicitely if the input signal ``x`` is a complex signal. Default is False which + means that the input is real but that is not explicitely tested by the function. + This choice affects the normalization of the outputs and their interpretation. + See examples below. + time_axis : int, optional + Axis on which the time is defined for input ``x`` (default is last, or -1). + normalization : str, optional + Normalization for the wavelet transforms. By default it is assumed to be + ``"bandpass"`` which uses a bandpass normalization, meaning that the FFT + of the wavelets have peak value of 2 for all central frequencies + ``radian_frequency``. However, if the optional argument ``complex=True`` + is specified, the wavelets will be divided by 2 so that the total + variance of the input complex signal is equal to the sum of the + variances of the returned analytic (positive) and conjugate analytic + (negative) parts. See examples below. The other option is ``"energy"`` + which uses the unit energy normalization. In this last case, the + time-domain wavelet energies ``np.sum(np.abs(wave)**2)`` are always + unity. + boundary : str, optional + The boundary condition to be imposed at the edges of the input signal ``x``. + Allowed values are ``"mirror"``, ``"zeros"``, and ``"periodic"``. Default is ``"mirror"``. + order : int, optional + Order of Morse wavelets, default is 1. + + Returns + ------- + If the input signal is real as specificied by ``complex=False``: + + wtx : np.ndarray + Time-domain wavelet transform of input ``x`` with shape ((x shape without time_axis), orders, frequencies, time_axis) + but with dimensions of length 1 removed (squeezed). + + If the input signal is complex as specificied by ``complex=True``, a tuple is returned: + + wtx_p : np.array + Time-domain positive wavelet transform of input ``x`` with shape ((x shape without time_axis), frequencies, orders), + but with dimensions of length 1 removed (squeezed). + wtx_n : np.array + Time-domain negative wavelet transform of input ``x`` with shape ((x shape without time_axis), frequencies, orders), + but with dimensions of length 1 removed (squeezed). + + Examples + -------- + Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta parameter 4, + at radian frequency 0.2 cycles per unit time: + + >>> x = np.random.random(1024) + >>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2])) + + Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta parameter 4, + for a complex input signal at radian frequency 0.2 cycles per unit time. This case returns the + analytic and conjugate analytic components: + + >>> z = np.random.random(1024) + 1j*np.random.random(1024) + >>> wtz_p, wtz_n = morse_wavelet_transform(z, 3, 4, np.array([2*np.pi*0.2]), complex=True) + + The same result as above can be otained by applying the Morse transform on the real and imaginary + component of z and recombining the results as follows for the "bandpass" normalization: + >>> wtz_real = morse_wavelet_transform(np.real(z)), 3, 4, np.array([2*np.pi*0.2])) + >>> wtz_imag = morse_wavelet_transform(np.imag(z)), 3, 4, np.array([2*np.pi*0.2])) + >>> wtz_p, wtz_n = (wtz_real + 1j*wtz_imag) / 2, (wtz_real - 1j*wtz_imag) / 2 + + For the "energy" normalization, the analytic and conjugate analytic components are obtained as follows + with this alternative method: + >>> wtz_real = morse_wavelet_transform(np.real(z)), 3, 4, np.array([2*np.pi*0.2])) + >>> wtz_imag = morse_wavelet_transform(np.imag(z)), 3, 4, np.array([2*np.pi*0.2])) + >>> wtz_p, wtz_n = (wtz_real + 1j*wtz_imag) / np.sqrt(2), (wtz_real - 1j*wtz_imag) / np.sqrt(2) + + The input signal can have an arbitrary number of dimensions but its ``time_axis`` must be + specified if it is not the last: + + >>> x = np.random.random((1024,10,15)) + >>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]), time_axis=0) + + The default way to handle the boundary conditions is to mirror the ends points + but this can be changed by specifying the chosen boundary method: + + >>> x = np.random.random((10,15,1024)) + >>> wtx = morse_wavelet_transform(x, 3, 4, np.array([2*np.pi*0.2]), boundary="periodic") + + This function can be used to conduct a time-frequency analysis of the input signal by specifying + a range of randian frequencies using the ``morse_logspace_freq`` function as an example: + + >>> x = np.random.random(1024) + >>> gamma = 3 + >>> beta = 4 + >>> radian_frequency = morse_logspace_freq(gamma, beta, np.shape(x)[0]) + >>> wtx = morse_wavelet_transform(x, gamma, beta, radian_frequency) + + Raises + ------ + ValueError + If the time axis is outside of the valid range ([-1, np.ndim(x)-1]). + If boundary optional argument is not in ["mirror", "zeros", "periodic"]``. + If normalization optional argument is not in ["bandpass", "energy"]``. + + See Also + -------- + :func:`morse_wavelet`, :func:`wavelet_transform`, :func:`morse_logspace_freq` + + """ + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(x.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(x.shape) - 1}])." + ) + # generate the wavelet + wavelet, _ = morse_wavelet( + np.shape(x)[time_axis], + gamma, + beta, + radian_frequency, + normalization=normalization, + order=order, + ) + + # apply the wavelet transform, distinguish complex and real cases + if complex: + # imaginary case, divide by 2 the wavelet and return analytic and conjugate analytic + if normalization == "bandpass": + wtx_p = wavelet_transform( + 0.5 * x, wavelet, boundary="mirror", time_axis=time_axis + ) + wtx_n = wavelet_transform( + np.conj(0.5 * x), wavelet, boundary="mirror", time_axis=time_axis + ) + elif normalization == "energy": + wtx_p = wavelet_transform( + x / np.sqrt(2), wavelet, boundary="mirror", time_axis=time_axis + ) + wtx_n = wavelet_transform( + np.conj(x / np.sqrt(2)), wavelet, boundary="mirror", time_axis=time_axis + ) + wtx = wtx_p, wtx_n + + elif not complex: + # real case + wtx = wavelet_transform(x, wavelet, boundary=boundary, time_axis=time_axis) + + else: + raise ValueError( + "`complex` optional argument must be boolean 'True' or 'False'" + ) + + return wtx
+ + + +
+[docs] +def wavelet_transform( + x: np.ndarray, + wavelet: np.ndarray, + boundary: Optional[str] = "mirror", + time_axis: Optional[int] = -1, + freq_axis: Optional[int] = -2, + order_axis: Optional[int] = -3, +) -> np.ndarray: + """ + Apply a continuous wavelet transform to an input signal using an input wavelet + function. Such wavelet can be provided by the function ``morse_wavelet``. + + Parameters + ---------- + x : np.ndarray + Real- or complex-valued signals. + wavelet : np.ndarray + A suite of time-domain wavelets, typically returned by the function ``morse_wavelet``. + The length of the time axis of the wavelets must be the last one and matches the + length of the time axis of x. The other dimensions (axes) of the wavelets (such as orders and frequencies) are + typically organized as orders, frequencies, and time, unless specified by optional arguments freq_axis and order_axis. + The normalization of the wavelets is assumed to be "bandpass", if not, use kwarg normalization="energy", see ``morse_wavelet``. + boundary : str, optional + The boundary condition to be imposed at the edges of the input signal ``x``. + Allowed values are ``"mirror"``, ``"zeros"``, and ``"periodic"``. Default is ``"mirror"``. + time_axis : int, optional + Axis on which the time is defined for input ``x`` (default is last, or -1). Note that the time axis of the + wavelets must be last. + freq_axis : int, optional + Axis of ``wavelet`` for the frequencies (default is second or 1) + order_axis : int, optional + Axis of ``wavelet`` for the orders (default is first or 0) + + Returns + ------- + wtx : np.ndarray + Time-domain wavelet transform of ``x`` with shape ((x shape without time_axis), orders, frequencies, time_axis) + but with dimensions of length 1 removed (squeezed). + + Examples + -------- + Apply a wavelet transform with a Morse wavelet with gamma parameter 3, beta + parameter 4, at radian frequency 0.2 cycles per unit time: + + >>> x = np.random.random(1024) + >>> wavelet, _ = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2])) + >>> wtx = wavelet_transform(x, wavelet) + + The input signal can have an arbitrary number of dimensions but its + ``time_axis`` must be specified if it is not the last: + + >>> x = np.random.random((1024,10,15)) + >>> wavelet, _ = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2])) + >>> wtx = wavelet_transform(x, wavelet,time_axis=0) + + Raises + ------ + ValueError + If the time axis is outside of the valid range ([-1, N-1]). + If the shape of time axis is different for input signal and wavelet. + If boundary optional argument is not in ["mirror", "zeros", "periodic"]``. + + See Also + -------- + :func:`morse_wavelet`, :func:`morse_wavelet_transform`, :func:`morse_freq` + """ + # time_axis must be in valid range + if time_axis < -1 or time_axis > len(x.shape) - 1: + raise ValueError( + f"time_axis ({time_axis}) is outside of the valid range ([-1," + f" {len(x.shape) - 1}])." + ) + # Positions and time arrays must have the same shape. + if x.shape[time_axis] != wavelet.shape[-1]: + raise ValueError("x and wavelet time axes must have the same length.") + + wavelet_ = np.moveaxis(wavelet, [freq_axis, order_axis], [-2, -3]) + + # if x is of dimension 1 we need to expand + # otherwise make sure time axis is last + if np.ndim(x) < 2: + x_ = np.expand_dims(x, axis=0) + else: + x_ = np.moveaxis(x, time_axis, -1) + + # add detrending option eventually + + # apply boundary conditions + if boundary == "mirror": + x_ = np.concatenate((np.flip(x_, axis=-1), x_, np.flip(x_, axis=-1)), axis=-1) + elif boundary == "zeros": + x_ = np.concatenate((np.zeros_like(x_), x_, np.zeros_like(x_)), axis=-1) + elif boundary == "periodic": + pass + else: + raise ValueError("boundary must be one of 'mirror', 'zeros', or 'periodic'.") + + time_length = np.shape(x)[time_axis] + time_length_ = np.shape(x_)[-1] + + # pad wavelet with zeros: JML ok + order_length, freq_length, _ = np.shape(wavelet) + _wavelet = np.zeros((order_length, freq_length, time_length_), dtype=np.cdouble) + + index = slice( + int(np.floor(time_length_ - time_length) / 2), + int(time_length + np.floor(time_length_ - time_length) / 2), + ) + _wavelet[:, :, index] = wavelet_ + + # take fft along axis = -1 + _wavelet_fft = np.fft.fft(_wavelet) + om = 2 * np.pi * np.linspace(0, 1 - 1 / time_length_, time_length_) + if time_length_ % 2 == 0: + _wavelet_fft = ( + _wavelet_fft + * np.exp(1j * -om * (time_length_ + 1) / 2) + * np.sign(np.pi - om) + ) + else: + _wavelet_fft = _wavelet_fft * np.exp(1j * -om * (time_length_ + 1) / 2) + + # here we should be able to automate the tiling without assuming extra dimensions of wave + X_ = np.tile( + np.expand_dims(np.fft.fft(x_), (-3, -2)), + (1, order_length, freq_length, 1), + ) + + # finally the transform; return precision of input `x``; central part only + complex_dtype = np.cdouble if x.dtype == np.single else np.csingle + wtx = np.fft.ifft(X_ * np.conj(_wavelet_fft)).astype(complex_dtype) + wtx = wtx[..., index] + + # reposition the time axis if needed from axis -1 + if time_axis != -1: + wtx = np.moveaxis(wtx, -1, time_axis) + + # remove extra dimensions if needed + wtx = np.squeeze(wtx) + + return wtx
+ + + +
+[docs] +def morse_wavelet( + length: int, + gamma: float, + beta: float, + radian_frequency: np.ndarray, + order: Optional[int] = 1, + normalization: Optional[str] = "bandpass", +) -> Tuple[np.ndarray, np.ndarray]: + """ + Compute the generalized Morse wavelets of Olhede and Walden (2002), doi: 10.1109/TSP.2002.804066. + + Parameters + ---------- + length : int + Length of the wavelets. + gamma : float + Gamma parameter of the wavelets. + beta : float + Beta parameter of the wavelets. + radian_frequency : np.ndarray + The radian frequencies at which the Fourier transform of the wavelets + reach their maximum amplitudes. radian_frequency is between 0 and 2 * np.pi * 0.5, + the normalized Nyquist radian frequency. + order : int, optional + Order of wavelets, default is 1. + normalization : str, optional + Normalization for the ``wavelet`` output. By default it is assumed to be ``"bandpass"`` + which uses a bandpass normalization, meaning that the FFT of the wavelets + have peak value of 2 for all central frequencies ``radian_frequency``. The other option is + ``"energy"``which uses the unit energy normalization. In this last case, the time-domain wavelet + energies ``np.sum(np.abs(wave)**2)`` are always unity. + + Returns + ------- + wavelet : np.ndarray + Time-domain wavelets with shape (order, radian_frequency, length). + wavelet_fft: np.ndarray + Frequency-domain wavelets with shape (order, radian_frequency, length). + + Examples + -------- + Compute a Morse wavelet with gamma parameter 3, beta parameter 4, at radian + frequency 0.2 cycles per unit time: + + >>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2])) + >>> np.shape(wavelet) + (1, 1, 1024) + + Compute a suite of Morse wavelets with gamma parameter 3, beta parameter 4, up to order 3, + at radian frequencies 0.2 and 0.3 cycles per unit time: + + >>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2, 2*np.pi*0.3]), order=3) + >>> np.shape(wavelet) + (3, 2, 1024) + + Compute a Morse wavelet specifying an energy normalization : + >>> wavelet, wavelet_fft = morse_wavelet(1024, 3, 4, np.array([2*np.pi*0.2]), normalization="energy") + + Raises + ------ + ValueError + If normalization optional argument is not in ["bandpass", "energy"]``. + + See Also + -------- + :func:`wavelet_transform`, :func:`morse_wavelet_transform`, :func:`morse_freq`, :func:`morse_logspace_freq`, :func:`morse_amplitude`, :func:`morse_properties` + """ + # ad test for radian_frequency being a numpy array + # initialization + wavelet = np.zeros((length, order, len(radian_frequency)), dtype=np.cdouble) + waveletfft = np.zeros((length, order, len(radian_frequency)), dtype=np.cdouble) + + # call to morse_wavelet take only gamma and be as float, no array + fo, _, _ = morse_freq(gamma, beta) + for i in range(len(radian_frequency)): + wavelet_tmp = np.zeros((length, order), dtype=np.cdouble) + waveletfft_tmp = np.zeros((length, order), dtype=np.cdouble) + + # wavelet frequencies + fact = np.abs(radian_frequency[i]) / fo + # norm_radian_frequency first dim is n points + norm_radian_frequency = ( + 2 * np.pi * np.linspace(0, 1 - 1 / length, length) / fact + ) + if normalization == "energy": + with np.errstate(divide="ignore"): + waveletzero = np.exp( + beta * np.log(norm_radian_frequency) + - norm_radian_frequency**gamma + ) + elif normalization == "bandpass": + if beta == 0: + waveletzero = 2 * np.exp(-(norm_radian_frequency**gamma)) + else: + with np.errstate(divide="ignore"): + waveletzero = 2 * np.exp( + -beta * np.log(fo) + + fo**gamma + + beta * np.log(norm_radian_frequency) + - norm_radian_frequency**gamma + ) + else: + raise ValueError( + "Normalization option (norm) must be one of 'energy' or 'bandpass'." + ) + waveletzero[0] = 0.5 * waveletzero[0] + # Replace NaN with zeros in waveletzero + waveletzero = np.nan_to_num(waveletzero, copy=False, nan=0.0) + # second family is never used + waveletfft_tmp = _morse_wavelet_first_family( + fact, + gamma, + beta, + norm_radian_frequency, + waveletzero, + order=order, + normalization=normalization, + ) + waveletfft_tmp = np.nan_to_num(waveletfft_tmp, posinf=0, neginf=0) + # shape of waveletfft_tmp is points, order + # center wavelet + norm_radian_frequency_mat = np.tile( + np.expand_dims(norm_radian_frequency, -1), (order) + ) + waveletfft_tmp = waveletfft_tmp * np.exp( + 1j * norm_radian_frequency_mat * (length + 1) / 2 * fact + ) + # time domain waveletlet + wavelet_tmp = np.fft.ifft(waveletfft_tmp, axis=0) + if radian_frequency[i] < 0: + wavelet[:, :, i] = np.conj(wavelet_tmp) + waveletfft_tmp[1:-1, :] = np.flip(waveletfft_tmp[1:-1, :], axis=0) + waveletfft[:, :, i] = waveletfft_tmp + else: + waveletfft[:, :, i] = waveletfft_tmp + wavelet[:, :, i] = wavelet_tmp + + # reorder dimension to be (order, frequency, time steps) + # enforce length 1 for first axis if order=1 (no squeezing) + wavelet = np.moveaxis(wavelet, [0, 1, 2], [2, 0, 1]) + waveletfft = np.moveaxis(waveletfft, [0, 1, 2], [2, 0, 1]) + + return wavelet, waveletfft
+ + + +def _morse_wavelet_first_family( + fact: float, + gamma: float, + beta: float, + norm_radian_frequency: np.ndarray, + wavezero: np.ndarray, + order: Optional[int] = 1, + normalization: Optional[str] = "bandpass", +) -> np.ndarray: + """ + Derive first family of Morse wavelets. Internal use only. + """ + r = (2 * beta + 1) / gamma + c = r - 1 + L = np.zeros_like(norm_radian_frequency, dtype=np.float64) + wavefft1 = np.zeros((np.shape(wavezero)[0], order)) + + for i in np.arange(0, order): + if normalization == "energy": + A = morse_amplitude(gamma, beta, order=i + 1, normalization=normalization) + coeff = np.sqrt(1 / fact) * A + elif normalization == "bandpass": + if beta != 0: + coeff = np.sqrt(np.exp(_lgamma(r) + _lgamma(i + 1) - _lgamma(i + r))) + else: + coeff = 1 + + index = slice( + 0, int(np.round(np.shape(wavezero)[0] / 2)) + ) # how to define indices? + L[index] = _laguerre(2 * norm_radian_frequency[index] ** gamma, i, c) + wavefft1[:, i] = coeff * wavezero * L + + return wavefft1 + + +
+[docs] +def morse_freq( + gamma: Union[np.ndarray, float], + beta: Union[np.ndarray, float], +) -> Union[Tuple[np.ndarray], Tuple[float]]: + """ + Frequency measures for generalized Morse wavelets. This functions calculates + three different measures fm, fe, and fi of the frequency of the lowest-order generalized Morse + wavelet specified by parameters ``gamma`` and ``beta``. + + Note that all frequency quantities here are in *radian* as in cos(f t) and not + cyclic as in np.cos(2 np.pi f t). + + For ``beta=0``, the corresponding wavelet becomes an analytic lowpass filter, and fm + is not defined in the usual way but as the point at which the filter has decayed + to one-half of its peak power. + + For details see Lilly and Olhede (2009), doi: 10.1109/TSP.2008.2007607. + + Parameters + ---------- + gamma : np.ndarray or float + Gamma parameter of the wavelets. + beta : np.ndarray or float + Beta parameter of the wavelets. + + Returns + ------- + fm : np.ndarray + The modal or peak frequency. + fe : np.ndarray + The energy frequency. + fi : np.ndarray + The instantaneous frequency at the wavelets' centers. + + Examples + -------- + >>> fm, fe, fi = morse_freq(3, 4) + + >>> morse_freq(3, 4) + (array(1.10064242), 1.1025129235952809, 1.1077321674324723) + + >>> morse_freq(3, np.array([10, 20, 30])) + (array([1.49380158, 1.88207206, 2.15443469]), + array([1.49421505, 1.88220264, 2.15450116]), + array([1.49543843, 1.88259299, 2.15470024])) + + >>> morse_freq(np.array([3, 4, 5]), np.array([10, 20, 30])) + (array([1.49380158, 1.49534878, 1.43096908]), + array([1.49421505, 1.49080278, 1.4262489 ]), + array([1.49543843, 1.48652036, 1.42163583])) + + >>> morse_freq(np.array([3, 4, 5]), 10) + (array([1.49380158, 1.25743343, 1.14869835]), + array([1.49421505, 1.25000964, 1.13759731]), + array([1.49543843, 1.24350315, 1.12739747])) + + See Also + -------- + :func:`morse_wavelet`, :func:`morse_amplitude` + """ + with np.errstate(divide="ignore"): # ignore warning when beta=0 + fm = np.where( + beta == 0, + np.log(2) ** (1 / gamma), + np.exp((1 / gamma) * (np.log(beta) - np.log(gamma))), + ) + + fe = ( + 1 + / (2 ** (1 / gamma)) + * _gamma((2 * beta + 2) / gamma) + / _gamma((2 * beta + 1) / gamma) + ) + + fi = _gamma((beta + 2) / gamma) / _gamma((beta + 1) / gamma) + + return fm, fe, fi
+ + + +
+[docs] +def morse_logspace_freq( + gamma: float, + beta: float, + length: int, + highset: Optional[Tuple[float]] = (0.1, np.pi), + lowset: Optional[Tuple[float]] = (5, 0), + density: Optional[int] = 4, +) -> np.ndarray: + """ + Compute logarithmically-spaced frequencies for generalized Morse wavelets + with parameters gamma and beta. This is a useful function to obtain the frequencies + needed for time-frequency analyses using wavelets. If ``radian_frequencies`` is the + output, ``np.log(radian_frequencies)`` is uniformly spaced, following convention + for wavelet analysis. See Lilly (2017), doi: 10.1098/rspa.2016.0776. + + Default settings to compute the frequencies can be changed by passing optional + arguments ``lowset``, ``highset``, and ``density``. See below. + + Parameters + ---------- + gamma : float + Gamma parameter of the Morse wavelets. + beta : float + Beta parameter of the Morse wavelets. + length : int + Length of the Morse wavelets and input signals. + highset : tuple of floats, optional. + Tuple of values (eta, high) used for high-frequency cutoff calculation. The highest + frequency is set to be the minimum of a specified value and a cutoff frequency + based on a Nyquist overlap condition: the highest frequency is the minimum of + the specified value high, and the largest frequency for which the wavelet will + satisfy the threshold level eta. Here eta be a number between zero and one + specifying the ratio of a frequency-domain wavelet at the Nyquist frequency + to its peak value. Default is (eta, high) = (0.1, np.pi). + lowset : tuple of floats, optional. + Tupe of values (P, low) set used for low-frequency cutoff calculation based on an + endpoint overlap condition. The lowest frequency is set such that the lowest-frequency + wavelet will reach some number P, called the packing number, times its central window + width at the ends of the time series. A choice of P=1 corresponds to roughly 95% of + the time-domain wavelet energy being contained within the time series endpoints for + a wavelet at the center of the domain. The second value of the tuple is the absolute + lowest frequency. Default is (P, low) = (5, 0). + density : int, optional + This optional argument controls the number of points in the returned frequency + array. Higher values of ``density`` mean more overlap in the frequency + domain between transforms. When ``density=1``, the peak of one wavelet is located at the + half-power points of the adjacent wavelet. The default ``density=4`` means + that four other wavelets will occur between the peak of one wavelet and + its half-power point. + + Returns + ------- + radian_frequency : np.ndarray + Logarithmically-spaced frequencies in radians cycles per unit time, + sorted in descending order. + + Examples + -------- + Generate a frequency array for the generalized Morse wavelet + with parameters gamma=3 and beta=5 for a time series of length n=1024: + + >>> radian_frequency = morse_logspace_freq(3, 5, 1024) + >>> radian_frequency = morse_logspace_freq(3, 5, 1024, highset=(0.2, np.pi), lowset=(5, 0)) + >>> radian_frequency = morse_logspace_freq(3, 5, 1024, highset=(0.2, np.pi), lowset=(5, 0), density=10) + + See Also + -------- + :func:`morse_wavelet`, :func:`morse_freq`, :func:`morse_properties` + """ + gamma_ = np.array([gamma]) + beta_ = np.array([beta]) + width, _, _ = morse_properties(gamma_, beta_) + + _high = _morsehigh(gamma_, beta_, highset[0]) + high_ = np.min(np.append(_high, highset[1])) + + low = 2 * np.sqrt(2) * width * lowset[0] / length + low_ = np.max(np.append(low, lowset[1])) + + r = 1 + 1 / (density * width) + m = np.floor(np.log10(high_ / low_) / np.log10(r)).astype(int)[0] + radian_frequency = high_ * np.ones(int(m + 1)) / r ** np.arange(0, m + 1) + + return radian_frequency
+ + + +def _morsehigh( + gamma: np.ndarray, + beta: np.ndarray, + eta: float, +) -> Union[np.ndarray, float]: + """High-frequency cutoff of the generalized Morse wavelets. + gamma and be should be arrays of the same length. Internal use only. + """ + m = 10000 + omhigh = np.linspace(0, np.pi, m) + f = np.zeros_like(gamma, dtype="float") + + for i in range(0, len(gamma)): + fm, _, _ = morse_freq(gamma[i], beta[i]) + with np.errstate(all="ignore"): + om = fm * np.pi / omhigh + lnwave1 = beta[i] / gamma[i] * np.log(np.exp(1) * gamma[i] / beta[i]) + lnwave2 = beta[i] * np.log(om) - om ** gamma[i] + lnwave = lnwave1 + lnwave2 + index = np.nonzero(np.log(eta) - lnwave < 0)[0][0] + f[i] = omhigh[index] + + return f + + +
+[docs] +def morse_properties( + gamma: Union[np.ndarray, float], + beta: Union[np.ndarray, float], +) -> Union[Tuple[np.ndarray], Tuple[float]]: + """ + Calculate the properties of the demodulated generalized Morse wavelets. + See Lilly and Olhede (2009), doi: 10.1109/TSP.2008.2007607. + + Parameters + ---------- + gamma : np.ndarray or float + Gamma parameter of the wavelets. + beta : np.ndarray or float + Beta parameter of the wavelets. + + Returns + ------- + width : np.ndarray or float + Dimensionless time-domain window width of the wavelets. + skew : np.ndarray or float + Imaginary part of normalized third moment of the time-domain demodulate, + or 'demodulate skewness'. + kurt : np.ndarray or float + Normalized fourth moment of the time-domain demodulate, + or 'demodulate kurtosis'. + + Examples + -------- + TODO + + See Also + -------- + :func:`morse_wavelet`, :func:`morse_freq`, :func:`morse_amplitude`, :func:`morse_logspace_freq`. + """ + # test common size? or could be broadcasted + width = np.sqrt(gamma * beta) + skew = (gamma - 3) / width + kurt = 3 - skew**2 - 2 / width**2 + + return width, skew, kurt
+ + + +
+[docs] +def morse_amplitude( + gamma: Union[np.ndarray, float], + beta: Union[np.ndarray, float], + order: Optional[np.int64] = 1, + normalization: Optional[str] = "bandpass", +) -> float: + """ + Calculate the amplitude coefficient of the generalized Morse wavelets. + By default, the amplitude is calculated such that the maximum of the + frequency-domain wavelet is equal to 2, which is the bandpass normalization. + Optionally, specify ``normalization="energy"`` in order to return the coefficient + giving the wavelets unit energies. See Lilly and Olhede (2009), doi doi: 10.1109/TSP.2008.2007607. + + Parameters + ---------- + gamma : np.ndarray or float + Gamma parameter of the wavelets. + beta : np.ndarray or float + Beta parameter of the wavelets. + order : int, optional + Order of wavelets, default is 1. + normalization : str, optional + Normalization for the wavelets. By default it is assumed to be ``"bandpass"`` + which uses a bandpass normalization, meaning that the FFT of the wavelets + have peak value of 2 for all central frequencies ``radian_frequency``. The other option is ``"energy"`` + which uses the unit energy normalization. In this last case the time-domain wavelet + energies ``np.sum(np.abs(wave)**2)`` are always unity. + + Returns + ------- + amp : np.ndarray or float + The amplitude coefficient of the wavelets. + + Examples + -------- + TODO + + See Also + -------- + :func:`morse_wavelet`, :func:`morse_freq`, :func:`morse_properties`, :func:`morse_logspace_freq`. + """ + # add test for type and shape in case of ndarray + if normalization == "energy": + r = (2 * beta + 1) / gamma + amp = ( + 2 + * np.pi + * gamma + * (2**r) + * np.exp(_lgamma(order) - _lgamma(order + r - 1)) + ) ** 0.5 + elif normalization == "bandpass": + fm, _, _ = morse_freq(gamma, beta) + amp = np.where(beta == 0, 2, 2 / (np.exp(beta * np.log(fm) - fm**gamma))) + else: + raise ValueError( + "Normalization option (normalization) must be one of 'energy' or 'bandpass'." + ) + + return amp
+ + + +def _laguerre( + x: Union[np.ndarray, float], + k: float, + c: float, +) -> np.ndarray: + """Generalized Laguerre polynomials""" + y = np.zeros_like(x, dtype="float") + for i in np.arange(0, k + 1): + fact = np.exp(_lgamma(k + c + 1) - _lgamma(c + i + 1) - _lgamma(k - i + 1)) + y = y + (-1) ** i * fact * x**i / _gamma(i + 1) + return y +
+ +
+ + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_modules/index.html b/_modules/index.html new file mode 100644 index 00000000..bf527bb0 --- /dev/null +++ b/_modules/index.html @@ -0,0 +1,486 @@ + + + + + + + + + + Overview: module code — CloudDrift documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
+ + + + +
+
+ + + + + +
+ + + + + + + + + + + + + +
+ +
+ + + +
+ +
+
+ +
+
+ +
+ +
+ +
+ + +
+ +
+ +
+ + + + + + + + + + + + +
+ +
+ +
+
+ + + +
+

+ +
+
+ +
+
+
+ + + + + + + + + + + +
+ +
+
+
+ +
+ + + +
+ + +
+ + + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.cast_float64_variables_to_float32.rst b/_sources/_autosummary/clouddrift.adapters.gdp.cast_float64_variables_to_float32.rst new file mode 100644 index 00000000..dbea8a8e --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.cast_float64_variables_to_float32.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.cast\_float64\_variables\_to\_float32 +============================================================= + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: cast_float64_variables_to_float32 \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.cut_str.rst b/_sources/_autosummary/clouddrift.adapters.gdp.cut_str.rst new file mode 100644 index 00000000..bb1786e9 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.cut_str.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.cut\_str +================================ + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: cut_str \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.decode_date.rst b/_sources/_autosummary/clouddrift.adapters.gdp.decode_date.rst new file mode 100644 index 00000000..62449b7a --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.decode_date.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.decode\_date +==================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: decode_date \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.drogue_presence.rst b/_sources/_autosummary/clouddrift.adapters.gdp.drogue_presence.rst new file mode 100644 index 00000000..0a7a649e --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.drogue_presence.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.drogue\_presence +======================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: drogue_presence \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.fetch_netcdf.rst b/_sources/_autosummary/clouddrift.adapters.gdp.fetch_netcdf.rst new file mode 100644 index 00000000..4d08cedd --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.fetch_netcdf.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.fetch\_netcdf +===================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: fetch_netcdf \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.fill_values.rst b/_sources/_autosummary/clouddrift.adapters.gdp.fill_values.rst new file mode 100644 index 00000000..fdaf3967 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.fill_values.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.fill\_values +==================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: fill_values \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.get_gdp_metadata.rst b/_sources/_autosummary/clouddrift.adapters.gdp.get_gdp_metadata.rst new file mode 100644 index 00000000..bfae0290 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.get_gdp_metadata.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.get\_gdp\_metadata +========================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: get_gdp_metadata \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.order_by_date.rst b/_sources/_autosummary/clouddrift.adapters.gdp.order_by_date.rst new file mode 100644 index 00000000..f0e80cb5 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.order_by_date.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.order\_by\_date +======================================= + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: order_by_date \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.parse_directory_file.rst b/_sources/_autosummary/clouddrift.adapters.gdp.parse_directory_file.rst new file mode 100644 index 00000000..fbf192b9 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.parse_directory_file.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.parse\_directory\_file +============================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: parse_directory_file \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.rowsize.rst b/_sources/_autosummary/clouddrift.adapters.gdp.rowsize.rst new file mode 100644 index 00000000..71432532 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.rowsize.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.rowsize +=============================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: rowsize \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.rst b/_sources/_autosummary/clouddrift.adapters.gdp.rst new file mode 100644 index 00000000..2088d1b8 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.rst @@ -0,0 +1,41 @@ +clouddrift.adapters.gdp +======================= + +.. automodule:: clouddrift.adapters.gdp + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + cast_float64_variables_to_float32 + cut_str + decode_date + drogue_presence + fetch_netcdf + fill_values + get_gdp_metadata + order_by_date + parse_directory_file + rowsize + str_to_float + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.adapters.gdp.str_to_float.rst b/_sources/_autosummary/clouddrift.adapters.gdp.str_to_float.rst new file mode 100644 index 00000000..eea69cd5 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp.str_to_float.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp.str\_to\_float +====================================== + +.. currentmodule:: clouddrift.adapters.gdp + +.. autofunction:: str_to_float \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp1h.download.rst b/_sources/_autosummary/clouddrift.adapters.gdp1h.download.rst new file mode 100644 index 00000000..73404cf7 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp1h.download.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp1h.download +================================== + +.. currentmodule:: clouddrift.adapters.gdp1h + +.. autofunction:: download \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp1h.preprocess.rst b/_sources/_autosummary/clouddrift.adapters.gdp1h.preprocess.rst new file mode 100644 index 00000000..4eefda7a --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp1h.preprocess.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp1h.preprocess +==================================== + +.. currentmodule:: clouddrift.adapters.gdp1h + +.. autofunction:: preprocess \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp1h.rst b/_sources/_autosummary/clouddrift.adapters.gdp1h.rst new file mode 100644 index 00000000..908c49e0 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp1h.rst @@ -0,0 +1,33 @@ +clouddrift.adapters.gdp1h +========================= + +.. automodule:: clouddrift.adapters.gdp1h + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + download + preprocess + to_raggedarray + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.adapters.gdp1h.to_raggedarray.rst b/_sources/_autosummary/clouddrift.adapters.gdp1h.to_raggedarray.rst new file mode 100644 index 00000000..046b40dc --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp1h.to_raggedarray.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp1h.to\_raggedarray +========================================= + +.. currentmodule:: clouddrift.adapters.gdp1h + +.. autofunction:: to_raggedarray \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp6h.download.rst b/_sources/_autosummary/clouddrift.adapters.gdp6h.download.rst new file mode 100644 index 00000000..30fa0927 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp6h.download.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp6h.download +================================== + +.. currentmodule:: clouddrift.adapters.gdp6h + +.. autofunction:: download \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp6h.preprocess.rst b/_sources/_autosummary/clouddrift.adapters.gdp6h.preprocess.rst new file mode 100644 index 00000000..a9653f9e --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp6h.preprocess.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp6h.preprocess +==================================== + +.. currentmodule:: clouddrift.adapters.gdp6h + +.. autofunction:: preprocess \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.gdp6h.rst b/_sources/_autosummary/clouddrift.adapters.gdp6h.rst new file mode 100644 index 00000000..2d038ddf --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp6h.rst @@ -0,0 +1,33 @@ +clouddrift.adapters.gdp6h +========================= + +.. automodule:: clouddrift.adapters.gdp6h + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + download + preprocess + to_raggedarray + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.adapters.gdp6h.to_raggedarray.rst b/_sources/_autosummary/clouddrift.adapters.gdp6h.to_raggedarray.rst new file mode 100644 index 00000000..d455ad5d --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.gdp6h.to_raggedarray.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.gdp6h.to\_raggedarray +========================================= + +.. currentmodule:: clouddrift.adapters.gdp6h + +.. autofunction:: to_raggedarray \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.glad.get_dataframe.rst b/_sources/_autosummary/clouddrift.adapters.glad.get_dataframe.rst new file mode 100644 index 00000000..3bc0c352 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.glad.get_dataframe.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.glad.get\_dataframe +======================================= + +.. currentmodule:: clouddrift.adapters.glad + +.. autofunction:: get_dataframe \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.glad.rst b/_sources/_autosummary/clouddrift.adapters.glad.rst new file mode 100644 index 00000000..4c3608fa --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.glad.rst @@ -0,0 +1,32 @@ +clouddrift.adapters.glad +======================== + +.. automodule:: clouddrift.adapters.glad + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + get_dataframe + to_xarray + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.adapters.glad.to_xarray.rst b/_sources/_autosummary/clouddrift.adapters.glad.to_xarray.rst new file mode 100644 index 00000000..c4709c86 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.glad.to_xarray.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.glad.to\_xarray +=================================== + +.. currentmodule:: clouddrift.adapters.glad + +.. autofunction:: to_xarray \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.mosaic.get_dataframes.rst b/_sources/_autosummary/clouddrift.adapters.mosaic.get_dataframes.rst new file mode 100644 index 00000000..3cab1328 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.mosaic.get_dataframes.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.mosaic.get\_dataframes +========================================== + +.. currentmodule:: clouddrift.adapters.mosaic + +.. autofunction:: get_dataframes \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.mosaic.get_file_urls.rst b/_sources/_autosummary/clouddrift.adapters.mosaic.get_file_urls.rst new file mode 100644 index 00000000..9bcb3c25 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.mosaic.get_file_urls.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.mosaic.get\_file\_urls +========================================== + +.. currentmodule:: clouddrift.adapters.mosaic + +.. autofunction:: get_file_urls \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.mosaic.get_repository_metadata.rst b/_sources/_autosummary/clouddrift.adapters.mosaic.get_repository_metadata.rst new file mode 100644 index 00000000..b349a8cd --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.mosaic.get_repository_metadata.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.mosaic.get\_repository\_metadata +==================================================== + +.. currentmodule:: clouddrift.adapters.mosaic + +.. autofunction:: get_repository_metadata \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.mosaic.rst b/_sources/_autosummary/clouddrift.adapters.mosaic.rst new file mode 100644 index 00000000..556f64be --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.mosaic.rst @@ -0,0 +1,34 @@ +clouddrift.adapters.mosaic +========================== + +.. automodule:: clouddrift.adapters.mosaic + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + get_dataframes + get_file_urls + get_repository_metadata + to_xarray + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.adapters.mosaic.to_xarray.rst b/_sources/_autosummary/clouddrift.adapters.mosaic.to_xarray.rst new file mode 100644 index 00000000..8041998e --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.mosaic.to_xarray.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.mosaic.to\_xarray +===================================== + +.. currentmodule:: clouddrift.adapters.mosaic + +.. autofunction:: to_xarray \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.subsurface_floats.download.rst b/_sources/_autosummary/clouddrift.adapters.subsurface_floats.download.rst new file mode 100644 index 00000000..9d08b36e --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.subsurface_floats.download.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.subsurface\_floats.download +=============================================== + +.. currentmodule:: clouddrift.adapters.subsurface_floats + +.. autofunction:: download \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.adapters.subsurface_floats.rst b/_sources/_autosummary/clouddrift.adapters.subsurface_floats.rst new file mode 100644 index 00000000..d6701ce1 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.subsurface_floats.rst @@ -0,0 +1,32 @@ +clouddrift.adapters.subsurface\_floats +====================================== + +.. automodule:: clouddrift.adapters.subsurface_floats + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + download + to_xarray + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.adapters.subsurface_floats.to_xarray.rst b/_sources/_autosummary/clouddrift.adapters.subsurface_floats.to_xarray.rst new file mode 100644 index 00000000..d609c841 --- /dev/null +++ b/_sources/_autosummary/clouddrift.adapters.subsurface_floats.to_xarray.rst @@ -0,0 +1,6 @@ +clouddrift.adapters.subsurface\_floats.to\_xarray +================================================= + +.. currentmodule:: clouddrift.adapters.subsurface_floats + +.. autofunction:: to_xarray \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.andro.rst b/_sources/_autosummary/clouddrift.datasets.andro.rst new file mode 100644 index 00000000..4c098fea --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.andro.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.andro +========================= + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: andro \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.gdp1h.rst b/_sources/_autosummary/clouddrift.datasets.gdp1h.rst new file mode 100644 index 00000000..81957f3c --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.gdp1h.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.gdp1h +========================= + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: gdp1h \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.gdp6h.rst b/_sources/_autosummary/clouddrift.datasets.gdp6h.rst new file mode 100644 index 00000000..bf7159b0 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.gdp6h.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.gdp6h +========================= + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: gdp6h \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.glad.rst b/_sources/_autosummary/clouddrift.datasets.glad.rst new file mode 100644 index 00000000..5fba87d8 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.glad.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.glad +======================== + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: glad \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.mosaic.rst b/_sources/_autosummary/clouddrift.datasets.mosaic.rst new file mode 100644 index 00000000..00d04959 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.mosaic.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.mosaic +========================== + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: mosaic \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.rst b/_sources/_autosummary/clouddrift.datasets.rst new file mode 100644 index 00000000..87f043a1 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.rst @@ -0,0 +1,38 @@ +clouddrift.datasets +=================== + +.. automodule:: clouddrift.datasets + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + andro + gdp1h + gdp6h + glad + mosaic + spotters + subsurface_floats + yomaha + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.datasets.spotters.rst b/_sources/_autosummary/clouddrift.datasets.spotters.rst new file mode 100644 index 00000000..e36c57d3 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.spotters.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.spotters +============================ + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: spotters \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.subsurface_floats.rst b/_sources/_autosummary/clouddrift.datasets.subsurface_floats.rst new file mode 100644 index 00000000..92168de2 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.subsurface_floats.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.subsurface\_floats +====================================== + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: subsurface_floats \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.datasets.yomaha.rst b/_sources/_autosummary/clouddrift.datasets.yomaha.rst new file mode 100644 index 00000000..eb39b026 --- /dev/null +++ b/_sources/_autosummary/clouddrift.datasets.yomaha.rst @@ -0,0 +1,6 @@ +clouddrift.datasets.yomaha +========================== + +.. currentmodule:: clouddrift.datasets + +.. autofunction:: yomaha \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.kinematics.inertial_oscillation_from_position.rst b/_sources/_autosummary/clouddrift.kinematics.inertial_oscillation_from_position.rst new file mode 100644 index 00000000..05ef021c --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.inertial_oscillation_from_position.rst @@ -0,0 +1,6 @@ +clouddrift.kinematics.inertial\_oscillation\_from\_position +=========================================================== + +.. currentmodule:: clouddrift.kinematics + +.. autofunction:: inertial_oscillation_from_position \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.kinematics.kinetic_energy.rst b/_sources/_autosummary/clouddrift.kinematics.kinetic_energy.rst new file mode 100644 index 00000000..67455f5b --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.kinetic_energy.rst @@ -0,0 +1,6 @@ +clouddrift.kinematics.kinetic\_energy +===================================== + +.. currentmodule:: clouddrift.kinematics + +.. autofunction:: kinetic_energy \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.kinematics.position_from_velocity.rst b/_sources/_autosummary/clouddrift.kinematics.position_from_velocity.rst new file mode 100644 index 00000000..e7f6e94e --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.position_from_velocity.rst @@ -0,0 +1,6 @@ +clouddrift.kinematics.position\_from\_velocity +============================================== + +.. currentmodule:: clouddrift.kinematics + +.. autofunction:: position_from_velocity \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.kinematics.residual_position_from_displacement.rst b/_sources/_autosummary/clouddrift.kinematics.residual_position_from_displacement.rst new file mode 100644 index 00000000..a3662a4b --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.residual_position_from_displacement.rst @@ -0,0 +1,6 @@ +clouddrift.kinematics.residual\_position\_from\_displacement +============================================================ + +.. currentmodule:: clouddrift.kinematics + +.. autofunction:: residual_position_from_displacement \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.kinematics.rst b/_sources/_autosummary/clouddrift.kinematics.rst new file mode 100644 index 00000000..f2f7e742 --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.rst @@ -0,0 +1,36 @@ +clouddrift.kinematics +===================== + +.. automodule:: clouddrift.kinematics + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + inertial_oscillation_from_position + kinetic_energy + position_from_velocity + residual_position_from_displacement + spin + velocity_from_position + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.kinematics.spin.rst b/_sources/_autosummary/clouddrift.kinematics.spin.rst new file mode 100644 index 00000000..1e635bda --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.spin.rst @@ -0,0 +1,6 @@ +clouddrift.kinematics.spin +========================== + +.. currentmodule:: clouddrift.kinematics + +.. autofunction:: spin \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.kinematics.velocity_from_position.rst b/_sources/_autosummary/clouddrift.kinematics.velocity_from_position.rst new file mode 100644 index 00000000..c4981723 --- /dev/null +++ b/_sources/_autosummary/clouddrift.kinematics.velocity_from_position.rst @@ -0,0 +1,6 @@ +clouddrift.kinematics.velocity\_from\_position +============================================== + +.. currentmodule:: clouddrift.kinematics + +.. autofunction:: velocity_from_position \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.chance_pair.rst b/_sources/_autosummary/clouddrift.pairs.chance_pair.rst new file mode 100644 index 00000000..1ec1fa3d --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.chance_pair.rst @@ -0,0 +1,6 @@ +clouddrift.pairs.chance\_pair +============================= + +.. currentmodule:: clouddrift.pairs + +.. autofunction:: chance_pair \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.chance_pairs_from_ragged.rst b/_sources/_autosummary/clouddrift.pairs.chance_pairs_from_ragged.rst new file mode 100644 index 00000000..685fb995 --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.chance_pairs_from_ragged.rst @@ -0,0 +1,6 @@ +clouddrift.pairs.chance\_pairs\_from\_ragged +============================================ + +.. currentmodule:: clouddrift.pairs + +.. autofunction:: chance_pairs_from_ragged \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.pair_bounding_box_overlap.rst b/_sources/_autosummary/clouddrift.pairs.pair_bounding_box_overlap.rst new file mode 100644 index 00000000..b3ea30b2 --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.pair_bounding_box_overlap.rst @@ -0,0 +1,6 @@ +clouddrift.pairs.pair\_bounding\_box\_overlap +============================================= + +.. currentmodule:: clouddrift.pairs + +.. autofunction:: pair_bounding_box_overlap \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.pair_space_distance.rst b/_sources/_autosummary/clouddrift.pairs.pair_space_distance.rst new file mode 100644 index 00000000..876afc5a --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.pair_space_distance.rst @@ -0,0 +1,6 @@ +clouddrift.pairs.pair\_space\_distance +====================================== + +.. currentmodule:: clouddrift.pairs + +.. autofunction:: pair_space_distance \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.pair_time_distance.rst b/_sources/_autosummary/clouddrift.pairs.pair_time_distance.rst new file mode 100644 index 00000000..62e06df9 --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.pair_time_distance.rst @@ -0,0 +1,6 @@ +clouddrift.pairs.pair\_time\_distance +===================================== + +.. currentmodule:: clouddrift.pairs + +.. autofunction:: pair_time_distance \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.pair_time_overlap.rst b/_sources/_autosummary/clouddrift.pairs.pair_time_overlap.rst new file mode 100644 index 00000000..692f5d5b --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.pair_time_overlap.rst @@ -0,0 +1,6 @@ +clouddrift.pairs.pair\_time\_overlap +==================================== + +.. currentmodule:: clouddrift.pairs + +.. autofunction:: pair_time_overlap \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.pairs.rst b/_sources/_autosummary/clouddrift.pairs.rst new file mode 100644 index 00000000..0c6ac09d --- /dev/null +++ b/_sources/_autosummary/clouddrift.pairs.rst @@ -0,0 +1,36 @@ +clouddrift.pairs +================ + +.. automodule:: clouddrift.pairs + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + chance_pair + chance_pairs_from_ragged + pair_bounding_box_overlap + pair_space_distance + pair_time_distance + pair_time_overlap + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.plotting.plot_ragged.rst b/_sources/_autosummary/clouddrift.plotting.plot_ragged.rst new file mode 100644 index 00000000..922dad62 --- /dev/null +++ b/_sources/_autosummary/clouddrift.plotting.plot_ragged.rst @@ -0,0 +1,6 @@ +clouddrift.plotting.plot\_ragged +================================ + +.. currentmodule:: clouddrift.plotting + +.. autofunction:: plot_ragged \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.plotting.rst b/_sources/_autosummary/clouddrift.plotting.rst new file mode 100644 index 00000000..f5010642 --- /dev/null +++ b/_sources/_autosummary/clouddrift.plotting.rst @@ -0,0 +1,31 @@ +clouddrift.plotting +=================== + +.. automodule:: clouddrift.plotting + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + plot_ragged + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.ragged.apply_ragged.rst b/_sources/_autosummary/clouddrift.ragged.apply_ragged.rst new file mode 100644 index 00000000..7f8ebef0 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.apply_ragged.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.apply\_ragged +=============================== + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: apply_ragged \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.chunk.rst b/_sources/_autosummary/clouddrift.ragged.chunk.rst new file mode 100644 index 00000000..fd5a8e75 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.chunk.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.chunk +======================= + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: chunk \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.prune.rst b/_sources/_autosummary/clouddrift.ragged.prune.rst new file mode 100644 index 00000000..77ef6d8c --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.prune.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.prune +======================= + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: prune \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.ragged_to_regular.rst b/_sources/_autosummary/clouddrift.ragged.ragged_to_regular.rst new file mode 100644 index 00000000..50c0a58a --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.ragged_to_regular.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.ragged\_to\_regular +===================================== + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: ragged_to_regular \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.regular_to_ragged.rst b/_sources/_autosummary/clouddrift.ragged.regular_to_ragged.rst new file mode 100644 index 00000000..fac8b976 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.regular_to_ragged.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.regular\_to\_ragged +===================================== + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: regular_to_ragged \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.rowsize_to_index.rst b/_sources/_autosummary/clouddrift.ragged.rowsize_to_index.rst new file mode 100644 index 00000000..49758c65 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.rowsize_to_index.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.rowsize\_to\_index +==================================== + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: rowsize_to_index \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.rst b/_sources/_autosummary/clouddrift.ragged.rst new file mode 100644 index 00000000..57219478 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.rst @@ -0,0 +1,39 @@ +clouddrift.ragged +================= + +.. automodule:: clouddrift.ragged + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + apply_ragged + chunk + prune + ragged_to_regular + regular_to_ragged + rowsize_to_index + segment + subset + unpack + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.ragged.segment.rst b/_sources/_autosummary/clouddrift.ragged.segment.rst new file mode 100644 index 00000000..9291fd1d --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.segment.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.segment +========================= + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: segment \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.subset.rst b/_sources/_autosummary/clouddrift.ragged.subset.rst new file mode 100644 index 00000000..5c3953d6 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.subset.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.subset +======================== + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: subset \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.ragged.unpack.rst b/_sources/_autosummary/clouddrift.ragged.unpack.rst new file mode 100644 index 00000000..e9b6faf2 --- /dev/null +++ b/_sources/_autosummary/clouddrift.ragged.unpack.rst @@ -0,0 +1,6 @@ +clouddrift.ragged.unpack +======================== + +.. currentmodule:: clouddrift.ragged + +.. autofunction:: unpack \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.raggedarray.RaggedArray.rst b/_sources/_autosummary/clouddrift.raggedarray.RaggedArray.rst new file mode 100644 index 00000000..b68a671d --- /dev/null +++ b/_sources/_autosummary/clouddrift.raggedarray.RaggedArray.rst @@ -0,0 +1,38 @@ +clouddrift.raggedarray.RaggedArray +================================== + +.. currentmodule:: clouddrift.raggedarray + +.. autoclass:: RaggedArray + :members: + :show-inheritance: + :inherited-members: + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~RaggedArray.__init__ + ~RaggedArray.allocate + ~RaggedArray.attributes + ~RaggedArray.from_awkward + ~RaggedArray.from_files + ~RaggedArray.from_netcdf + ~RaggedArray.from_parquet + ~RaggedArray.from_xarray + ~RaggedArray.number_of_observations + ~RaggedArray.to_awkward + ~RaggedArray.to_netcdf + ~RaggedArray.to_parquet + ~RaggedArray.to_xarray + ~RaggedArray.validate_attributes + + + + + + \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.raggedarray.rst b/_sources/_autosummary/clouddrift.raggedarray.rst new file mode 100644 index 00000000..867a9006 --- /dev/null +++ b/_sources/_autosummary/clouddrift.raggedarray.rst @@ -0,0 +1,32 @@ +clouddrift.raggedarray +====================== + +.. automodule:: clouddrift.raggedarray + :members: + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: class.rst + + RaggedArray + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.signal.analytic_signal.rst b/_sources/_autosummary/clouddrift.signal.analytic_signal.rst new file mode 100644 index 00000000..29c06519 --- /dev/null +++ b/_sources/_autosummary/clouddrift.signal.analytic_signal.rst @@ -0,0 +1,6 @@ +clouddrift.signal.analytic\_signal +================================== + +.. currentmodule:: clouddrift.signal + +.. autofunction:: analytic_signal \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.signal.cartesian_to_rotary.rst b/_sources/_autosummary/clouddrift.signal.cartesian_to_rotary.rst new file mode 100644 index 00000000..12e02aaf --- /dev/null +++ b/_sources/_autosummary/clouddrift.signal.cartesian_to_rotary.rst @@ -0,0 +1,6 @@ +clouddrift.signal.cartesian\_to\_rotary +======================================= + +.. currentmodule:: clouddrift.signal + +.. autofunction:: cartesian_to_rotary \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.signal.ellipse_parameters.rst b/_sources/_autosummary/clouddrift.signal.ellipse_parameters.rst new file mode 100644 index 00000000..5b167071 --- /dev/null +++ b/_sources/_autosummary/clouddrift.signal.ellipse_parameters.rst @@ -0,0 +1,6 @@ +clouddrift.signal.ellipse\_parameters +===================================== + +.. currentmodule:: clouddrift.signal + +.. autofunction:: ellipse_parameters \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.signal.modulated_ellipse_signal.rst b/_sources/_autosummary/clouddrift.signal.modulated_ellipse_signal.rst new file mode 100644 index 00000000..e83afcfb --- /dev/null +++ b/_sources/_autosummary/clouddrift.signal.modulated_ellipse_signal.rst @@ -0,0 +1,6 @@ +clouddrift.signal.modulated\_ellipse\_signal +============================================ + +.. currentmodule:: clouddrift.signal + +.. autofunction:: modulated_ellipse_signal \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.signal.rotary_to_cartesian.rst b/_sources/_autosummary/clouddrift.signal.rotary_to_cartesian.rst new file mode 100644 index 00000000..1ee02b9f --- /dev/null +++ b/_sources/_autosummary/clouddrift.signal.rotary_to_cartesian.rst @@ -0,0 +1,6 @@ +clouddrift.signal.rotary\_to\_cartesian +======================================= + +.. currentmodule:: clouddrift.signal + +.. autofunction:: rotary_to_cartesian \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.signal.rst b/_sources/_autosummary/clouddrift.signal.rst new file mode 100644 index 00000000..c8716d8a --- /dev/null +++ b/_sources/_autosummary/clouddrift.signal.rst @@ -0,0 +1,35 @@ +clouddrift.signal +================= + +.. automodule:: clouddrift.signal + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + analytic_signal + cartesian_to_rotary + ellipse_parameters + modulated_ellipse_signal + rotary_to_cartesian + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.sphere.bearing.rst b/_sources/_autosummary/clouddrift.sphere.bearing.rst new file mode 100644 index 00000000..d28ddcf8 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.bearing.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.bearing +========================= + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: bearing \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.cartesian_to_spherical.rst b/_sources/_autosummary/clouddrift.sphere.cartesian_to_spherical.rst new file mode 100644 index 00000000..9287d08b --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.cartesian_to_spherical.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.cartesian\_to\_spherical +========================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: cartesian_to_spherical \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.cartesian_to_tangentplane.rst b/_sources/_autosummary/clouddrift.sphere.cartesian_to_tangentplane.rst new file mode 100644 index 00000000..9c7ec442 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.cartesian_to_tangentplane.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.cartesian\_to\_tangentplane +============================================= + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: cartesian_to_tangentplane \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.coriolis_frequency.rst b/_sources/_autosummary/clouddrift.sphere.coriolis_frequency.rst new file mode 100644 index 00000000..5a125065 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.coriolis_frequency.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.coriolis\_frequency +===================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: coriolis_frequency \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.cumulative_distance.rst b/_sources/_autosummary/clouddrift.sphere.cumulative_distance.rst new file mode 100644 index 00000000..b28978eb --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.cumulative_distance.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.cumulative\_distance +====================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: cumulative_distance \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.distance.rst b/_sources/_autosummary/clouddrift.sphere.distance.rst new file mode 100644 index 00000000..8e60bd83 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.distance.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.distance +========================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: distance \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.plane_to_sphere.rst b/_sources/_autosummary/clouddrift.sphere.plane_to_sphere.rst new file mode 100644 index 00000000..1d8e161e --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.plane_to_sphere.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.plane\_to\_sphere +=================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: plane_to_sphere \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.position_from_distance_and_bearing.rst b/_sources/_autosummary/clouddrift.sphere.position_from_distance_and_bearing.rst new file mode 100644 index 00000000..e13860d3 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.position_from_distance_and_bearing.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.position\_from\_distance\_and\_bearing +======================================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: position_from_distance_and_bearing \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.recast_lon.rst b/_sources/_autosummary/clouddrift.sphere.recast_lon.rst new file mode 100644 index 00000000..324878dd --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.recast_lon.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.recast\_lon +============================= + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: recast_lon \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.recast_lon180.rst b/_sources/_autosummary/clouddrift.sphere.recast_lon180.rst new file mode 100644 index 00000000..a569e3e6 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.recast_lon180.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.recast\_lon180 +================================ + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: recast_lon180 \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.recast_lon360.rst b/_sources/_autosummary/clouddrift.sphere.recast_lon360.rst new file mode 100644 index 00000000..da3b5567 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.recast_lon360.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.recast\_lon360 +================================ + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: recast_lon360 \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.rst b/_sources/_autosummary/clouddrift.sphere.rst new file mode 100644 index 00000000..e102c875 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.rst @@ -0,0 +1,44 @@ +clouddrift.sphere +================= + +.. automodule:: clouddrift.sphere + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + bearing + cartesian_to_spherical + cartesian_to_tangentplane + coriolis_frequency + cumulative_distance + distance + plane_to_sphere + position_from_distance_and_bearing + recast_lon + recast_lon180 + recast_lon360 + sphere_to_plane + spherical_to_cartesian + tangentplane_to_cartesian + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.sphere.sphere_to_plane.rst b/_sources/_autosummary/clouddrift.sphere.sphere_to_plane.rst new file mode 100644 index 00000000..a3d411fe --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.sphere_to_plane.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.sphere\_to\_plane +=================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: sphere_to_plane \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.spherical_to_cartesian.rst b/_sources/_autosummary/clouddrift.sphere.spherical_to_cartesian.rst new file mode 100644 index 00000000..656b2ab1 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.spherical_to_cartesian.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.spherical\_to\_cartesian +========================================== + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: spherical_to_cartesian \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.sphere.tangentplane_to_cartesian.rst b/_sources/_autosummary/clouddrift.sphere.tangentplane_to_cartesian.rst new file mode 100644 index 00000000..c60f14a9 --- /dev/null +++ b/_sources/_autosummary/clouddrift.sphere.tangentplane_to_cartesian.rst @@ -0,0 +1,6 @@ +clouddrift.sphere.tangentplane\_to\_cartesian +============================================= + +.. currentmodule:: clouddrift.sphere + +.. autofunction:: tangentplane_to_cartesian \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.morse_amplitude.rst b/_sources/_autosummary/clouddrift.wavelet.morse_amplitude.rst new file mode 100644 index 00000000..a76397b5 --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.morse_amplitude.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.morse\_amplitude +=================================== + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: morse_amplitude \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.morse_freq.rst b/_sources/_autosummary/clouddrift.wavelet.morse_freq.rst new file mode 100644 index 00000000..f2d7b9ac --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.morse_freq.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.morse\_freq +============================== + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: morse_freq \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.morse_logspace_freq.rst b/_sources/_autosummary/clouddrift.wavelet.morse_logspace_freq.rst new file mode 100644 index 00000000..8f13e282 --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.morse_logspace_freq.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.morse\_logspace\_freq +======================================== + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: morse_logspace_freq \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.morse_properties.rst b/_sources/_autosummary/clouddrift.wavelet.morse_properties.rst new file mode 100644 index 00000000..5135106c --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.morse_properties.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.morse\_properties +==================================== + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: morse_properties \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.morse_wavelet.rst b/_sources/_autosummary/clouddrift.wavelet.morse_wavelet.rst new file mode 100644 index 00000000..2358902d --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.morse_wavelet.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.morse\_wavelet +================================= + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: morse_wavelet \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.morse_wavelet_transform.rst b/_sources/_autosummary/clouddrift.wavelet.morse_wavelet_transform.rst new file mode 100644 index 00000000..7549d08b --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.morse_wavelet_transform.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.morse\_wavelet\_transform +============================================ + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: morse_wavelet_transform \ No newline at end of file diff --git a/_sources/_autosummary/clouddrift.wavelet.rst b/_sources/_autosummary/clouddrift.wavelet.rst new file mode 100644 index 00000000..17e01f58 --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.rst @@ -0,0 +1,37 @@ +clouddrift.wavelet +================== + +.. automodule:: clouddrift.wavelet + :members: + + + + + + + + .. rubric:: Functions + + .. autosummary:: + :toctree: + + morse_amplitude + morse_freq + morse_logspace_freq + morse_properties + morse_wavelet + morse_wavelet_transform + wavelet_transform + + + + + + + + + + + + + diff --git a/_sources/_autosummary/clouddrift.wavelet.wavelet_transform.rst b/_sources/_autosummary/clouddrift.wavelet.wavelet_transform.rst new file mode 100644 index 00000000..9458246c --- /dev/null +++ b/_sources/_autosummary/clouddrift.wavelet.wavelet_transform.rst @@ -0,0 +1,6 @@ +clouddrift.wavelet.wavelet\_transform +===================================== + +.. currentmodule:: clouddrift.wavelet + +.. autofunction:: wavelet_transform \ No newline at end of file diff --git a/_sources/api.rst b/_sources/api.rst new file mode 100644 index 00000000..7517f546 --- /dev/null +++ b/_sources/api.rst @@ -0,0 +1,27 @@ +API +=== + +Auto-generated summary of CloudDrift's API. For more details and examples, refer to the different Jupyter Notebooks. + +.. currentmodule:: clouddrift + +.. autosummary:: + :toctree: _autosummary + :template: module.rst + :recursive: + + adapters.gdp + adapters.gdp1h + adapters.gdp6h + adapters.glad + adapters.mosaic + adapters.subsurface_floats + datasets + kinematics + pairs + plotting + ragged + raggedarray + signal + sphere + wavelet diff --git a/_sources/contributing.rst b/_sources/contributing.rst new file mode 100644 index 00000000..a18b0302 --- /dev/null +++ b/_sources/contributing.rst @@ -0,0 +1,37 @@ +.. _contributing: + +Contributing +============ + +This project follows `NumFOCUS `_ code of conduct, the short version is: + +- Be kind to others. Do not insult or put down others. Behave professionally. Remember that harassment and sexist, racist, or exclusionary jokes are not appropriate. +- All communication should be appropriate for a professional audience including people of many different backgrounds. Sexual language and imagery is not appropriate. +- We are dedicated to providing a harassment-free community for everyone, regardless of gender, sexual orientation, gender identity and expression, disability, physical appearance, body size, race, or religion. +- We do not tolerate harassment of community members in any form. + +Thank you for helping make this a welcoming, friendly community for all. + +Bug reports and requests +------------------------ + +We encourage users to participate in the development of CloudDrift by filling out bug reports, participating in discussions, and requesting features on `https://github.com/Cloud-Drift/clouddrift `_. + +Contributing to the documentation +--------------------------------- + +We also welcome contributions to improving the documentation of the project. To create a local static documentation website, the following packages are required: + +.. code-block:: console + + conda install sphinx + pip install sphinx_book_theme + pip install sphinx-copybutton + +Then, from the ``clouddrift/docs`` directory, run + +.. code-block:: console + + make html + +to compile and output the documentation website to ``clouddrift/docs/_build/html/``. The index page ``index.html`` can be visualized in a web browser. Note that after modifying the documentation, it might be necessary to run ``make clean`` before rebuilding. diff --git a/_sources/datasets.rst b/_sources/datasets.rst new file mode 100644 index 00000000..e12ec970 --- /dev/null +++ b/_sources/datasets.rst @@ -0,0 +1,78 @@ +.. _datasets: + +Datasets +======== + +CloudDrift provides convenience functions to access real-world ragged-array +datasets. + +>>> from clouddrift.datasets import gdp1h +>>> ds = gdp1h() + + Dimensions: (traj: 17324, obs: 165754333) + Coordinates: + ids (obs) int64 ... + lat (obs) float32 ... + lon (obs) float32 ... + time (obs) datetime64[ns] ... + Dimensions without coordinates: traj, obs + Data variables: (12/55) + BuoyTypeManufacturer (traj) |S20 ... + BuoyTypeSensorArray (traj) |S20 ... + CurrentProgram (traj) float64 ... + DeployingCountry (traj) |S20 ... + DeployingShip (traj) |S20 ... + DeploymentComments (traj) |S20 ... + ... ... + sst1 (obs) float64 ... + sst2 (obs) float64 ... + typebuoy (traj) |S10 ... + typedeath (traj) int8 ... + ve (obs) float32 ... + vn (obs) float32 ... + Attributes: (12/16) + Conventions: CF-1.6 + acknowledgement: Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurio... + contributor_name: NOAA Global Drifter Program + contributor_role: Data Acquisition Center + date_created: 2022-12-09T06:02:29.684949 + doi: 10.25921/x46c-3620 + ... ... + processing_level: Level 2 QC by GDP drifter DAC + publisher_email: aoml.dftr@noaa.gov + publisher_name: GDP Drifter DAC + publisher_url: https://www.aoml.noaa.gov/phod/gdp + summary: Global Drifter Program hourly data + title: Global Drifter Program hourly drifting buoy collection + +Currently available datasets are: + +- :func:`clouddrift.datasets.andro`: The ANDRO dataset as a ragged array + processed from the upstream dataset hosted at the `SEANOE repository + `_. +- :func:`clouddrift.datasets.gdp1h`: 1-hourly Global Drifter Program (GDP) data + from a `cloud-optimized Zarr dataset on AWS `_. +- :func:`clouddrift.datasets.gdp6h`: 6-hourly GDP data from a ragged-array + NetCDF file hosted by the public HTTPS server at + `NOAA's Atlantic Oceanographic and Meteorological Laboratory (AOML) `_. +- :func:`clouddrift.datasets.glad`: 15-minute Grand LAgrangian Deployment (GLAD) + data produced by the Consortium for Advanced Research on Transport of + Hydrocarbon in the Environment (CARTHE) and hosted upstream at the `Gulf of + Mexico Research Initiative Information and Data Cooperative (GRIIDC) + `_. +- :func:`clouddrift.datasets.mosaic`: MOSAiC sea-ice drift dataset as a ragged + array processed from the upstream dataset hosted at the + `NSF's Arctic Data Center `_. +- :func:`clouddrift.datasets.subsurface_floats`: The subsurface float trajectories dataset as + hosted by NOAA AOML at + `NOAA's Atlantic Oceanographic and Meteorological Laboratory (AOML) _` + and maintained by Andree Ramsey and Heather Furey from the Woods Hole Oceanographic Institution. +- :func:`clouddrift.datasets.spotters`: The Sofar Ocean Spotters archive dataset as hosted at the public `AWS S3 bucket `_. +- :func:`clouddrift.datasets.yomaha`: The YoMaHa'07 dataset as a ragged array + processed from the upstream dataset hosted at the `Asia-Pacific Data-Research + Center (APDRC) `_. + +The GDP and the Spotters datasets are accessed lazily, so the data is only downloaded when +specific array values are referenced. The ANDRO, GLAD, MOSAiC, Subsurface Floats, and YoMaHa'07 +datasets are downloaded in their entirety when the function is called for the first +time and stored locally for later use. \ No newline at end of file diff --git a/_sources/index.rst b/_sources/index.rst new file mode 100644 index 00000000..1ec0e625 --- /dev/null +++ b/_sources/index.rst @@ -0,0 +1,62 @@ +CloudDrift, a platform for accelerating research with Lagrangian climate data +============================================================================= + +Lagrangian data typically refers to oceanic and atmosphere information acquired by observing platforms drifting with the flow they are embedded within, but also refers more broadly to the data originating from uncrewed platforms, vehicles, and animals that gather data along their unrestricted and often complex paths. Because such paths traverse both spatial and temporal dimensions, Lagrangian data can convolve spatial and temporal information that cannot always readily be organized in common data structures and stored in standard file formats with the help of common libraries and standards. + +As such, for both originators and users, Lagrangian data present challenges that the CloudDrift project aims to overcome. This project is funded by the `NSF EarthCube program `_ through `EarthCube Capabilities Grant No. 2126413 `_. + +Motivations +----------- + +The `Global Drifter Program (GDP) `_ of the US National Oceanic and Atmospheric Administration has released to date nearly 25,000 drifting buoys, or drifters, with the goal of obtaining observations of oceanic velocity, sea surface temperature, and sea level pressure. From these drifter observations, the GDP generates two data products: one of oceanic variables estimated along drifter trajectories at `hourly `_ time steps, and one at `six-hourly `_ steps. + +There are a few ways to retrieve the data, but all typically require time-consuming preprocessing steps in order to prepare the data for analysis. As an example, the datasets can be retrieved through an `ERDDAP server `_, but requests are limited in size. The latest `6-hourly dataset `_ is distributed as a collection of thousands of individual NetCDF files or as a series of `ASCII files `_. Until recently, the `hourly dataset `_ was distributed as a collection of individual NetCDF files (17,324 for version 1.04c) but is now distributed by NOAA NCEI as a `single NetCDF file `_ containing a series of ragged arrays, thanks to the work of CloudDrift. A single file simplifies data distribution, decreases metadata redundancies, and efficiently stores a Lagrangian data collection of uneven lengths. + +CloudDrift's analysis functions are centered around the ragged-array data +structure: + +.. image:: img/ragged_array.png + :width: 800 + :align: center + :alt: Ragged array schematic + +CloudDrift's goals are to simplify the necessary steps to get started with +Lagrangian datasets and to provide a cloud-ready library to accelerate +Lagrangian analysis. + +Getting started +--------------- + +* :doc:`install` +* :doc:`usage` +* :doc:`datasets` + +.. toctree:: + :hidden: + :maxdepth: 2 + :caption: Getting started + + install + usage + datasets + +Reference +--------- + +* :doc:`contributing` +* :doc:`api` + +.. toctree:: + :maxdepth: 2 + :hidden: + :caption: Reference + + contributing + api + +.. Indices and tables +.. ================== + +.. * :ref:`genindex` +.. * :ref:`modindex` +.. * :ref:`search` diff --git a/_sources/install.rst b/_sources/install.rst new file mode 100644 index 00000000..efcc947c --- /dev/null +++ b/_sources/install.rst @@ -0,0 +1,78 @@ +.. _install: + +Installation +============ + +You can install the latest release of CloudDrift using pip or Conda. +You can also install the latest development (unreleased) version from GitHub. + +pip +--- + +In your virtual environment, type: + +.. code-block:: text + + pip install clouddrift + +To install optional dependencies needed by the ``clouddrift.plotting`` module, +type: + +.. code-block:: text + + pip install matplotlib-base cartopy + +Conda +----- + +First add ``conda-forge`` to your channels in your Conda environment: + +.. code-block:: text + + conda config --add channels conda-forge + conda config --set channel_priority strict + +then install CloudDrift: + +.. code-block:: text + + conda install clouddrift + +To install optional dependencies needed by the ``clouddrift.plotting`` module, +type: + +.. code-block:: text + + conda install matplotlib-base cartopy + +Developers +---------- + +If you need the latest development version, get it from GitHub using pip: + +.. code-block:: text + + pip install git+https://github.com/Cloud-Drift/clouddrift + +Running tests +============= + +To run the tests, you need to first download the CloudDrift source code from +GitHub and install it in your virtual environment: + + +.. code-block:: text + + git clone https://github.com/cloud-drift/clouddrift + cd clouddrift + python3 -m venv venv + source venv/bin/activate + pip install . + +Then, run the tests like this: + +.. code-block:: text + + python -m unittest tests/*.py + +A quick how-to guide is provided on the `Usage `_ page. diff --git a/_sources/usage.rst b/_sources/usage.rst new file mode 100644 index 00000000..e9bae403 --- /dev/null +++ b/_sources/usage.rst @@ -0,0 +1,265 @@ +.. _usage: + +Usage +===== + +The CloudDrift library provides functions for: + +* Easy access to cloud-ready Lagrangian ragged-array datasets; +* Common Lagrangian analysis tasks on ragged arrays; +* Adapting custom Lagrangian datasets into ragged arrays. + +Let's start by importing the library and accessing a ready-to-use ragged-array +dataset. + +Accessing ragged-array Lagrangian datasets +------------------------------------------ + +We recommend to import the ``clouddrift`` using the ``cd`` shorthand, for convenience: + +>>> import clouddrift as cd + +CloudDrift provides a set of Lagrangian datasets that are ready to use. +They can be accessed via the ``datasets`` submodule. +In this example, we will load the NOAA's Global Drifter Program (GDP) hourly +dataset, which is hosted in a public AWS bucket as a cloud-optimized Zarr +dataset: + +>>> ds = cd.datasets.gdp1h() +>>> ds + +Dimensions: (traj: 17324, obs: 165754333) +Coordinates: + ids (obs) int64 ... + lat (obs) float32 ... + lon (obs) float32 ... + time (obs) datetime64[ns] ... +Dimensions without coordinates: traj, obs +Data variables: (12/55) + BuoyTypeManufacturer (traj) |S20 ... + BuoyTypeSensorArray (traj) |S20 ... + CurrentProgram (traj) float64 ... + DeployingCountry (traj) |S20 ... + DeployingShip (traj) |S20 ... + DeploymentComments (traj) |S20 ... + ... ... + sst1 (obs) float64 ... + sst2 (obs) float64 ... + typebuoy (traj) |S10 ... + typedeath (traj) int8 ... + ve (obs) float32 ... + vn (obs) float32 ... +Attributes: (12/16) + Conventions: CF-1.6 + acknowledgement: Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurio... + contributor_name: NOAA Global Drifter Program + contributor_role: Data Acquisition Center + date_created: 2022-12-09T06:02:29.684949 + doi: 10.25921/x46c-3620 + ... ... + processing_level: Level 2 QC by GDP drifter DAC + publisher_email: aoml.dftr@noaa.gov + publisher_name: GDP Drifter DAC + publisher_url: https://www.aoml.noaa.gov/phod/gdp + summary: Global Drifter Program hourly data + title: Global Drifter Program hourly drifting buoy collection + +The ``gdp1h`` function returns an Xarray ``Dataset`` instance of the ragged-array dataset. +While the dataset is quite large, around a dozen GB, it is not downloaded to your +local machine. Instead, the dataset is accessed directly from the cloud, and only +the data that is needed for the analysis is downloaded. This is possible thanks to +the cloud-optimized Zarr format, which allows for efficient access to the data +stored in the cloud. + +Let's look at some variables in this dataset: + +>>> ds.lon + +[165754333 values with dtype=float32] +Coordinates: + ids (obs) int64 ... + lat (obs) float32 ... + lon (obs) float32 ... + time (obs) datetime64[ns] ... +Dimensions without coordinates: obs +Attributes: + long_name: Longitude + units: degrees_east + +You see that this array is very long--it has 165754333 elements. +This is because in a ragged array, many varying-length arrays are laid out as a +contiguous 1-dimensional array in memory. + +Let's look at the dataset dimensions: + +>>> ds.sizes +Frozen({'traj': 17324, 'obs': 165754333}) + +The ``traj`` dimension has 17324 elements, which is the number of individual +trajectories in the dataset. +The sum of their lengths equals the length of the ``obs`` dimension. +Internally, these dimensions, their lengths, and the ``rowsize`` +variable are used internally to make CloudDrift's analysis functions aware of +the bounds of each contiguous array within the ragged-array data structure. + +Doing common analysis tasks on ragged arrays +-------------------------------------------- + +Now that we have a ragged-array dataset loaded as an Xarray ``Dataset`` instance, +let's do some common analysis tasks on it. +Our dataset is on a remote server and fairly large (a dozen GB or so), so let's +first subset it to several trajectories so that we can more easily work with it. +The variable ``ID`` is the unique identifier for each trajectory: + +>>> ds.ID[:10].values +array([2578, 2582, 2583, 2592, 2612, 2613, 2622, 2623, 2931, 2932]) + +>>> from clouddrift.ragged import subset + +``subset`` allows you to subset a ragged array by some criterion. +In this case, we will subset it by the ``ID`` variable: + +>>> ds_sub = subset(ds, {"ID": list(ds.ID[:5])}) +>>> ds_sub + +Dimensions: (traj: 5, obs: 13612) +Coordinates: + ids (obs) int64 2578 2578 2578 2578 ... 2612 2612 2612 + lat (obs) float32 ... + lon (obs) float32 ... + time (obs) datetime64[ns] ... +Dimensions without coordinates: traj, obs +Data variables: (12/55) + BuoyTypeManufacturer (traj) |S20 ... + BuoyTypeSensorArray (traj) |S20 ... + CurrentProgram (traj) float64 ... + DeployingCountry (traj) |S20 ... + DeployingShip (traj) |S20 ... + DeploymentComments (traj) |S20 ... + ... ... + sst1 (obs) float64 ... + sst2 (obs) float64 ... + typebuoy (traj) |S10 ... + typedeath (traj) int8 ... + ve (obs) float32 ... + vn (obs) float32 ... +Attributes: (12/16) + Conventions: CF-1.6 + acknowledgement: Elipot, Shane; Sykulski, Adam; Lumpkin, Rick; Centurio... + contributor_name: NOAA Global Drifter Program + contributor_role: Data Acquisition Center + date_created: 2022-12-09T06:02:29.684949 + doi: 10.25921/x46c-3620 + ... ... + processing_level: Level 2 QC by GDP drifter DAC + publisher_email: aoml.dftr@noaa.gov + publisher_name: GDP Drifter DAC + publisher_url: https://www.aoml.noaa.gov/phod/gdp + summary: Global Drifter Program hourly data + title: Global Drifter Program hourly drifting buoy collection + +You see that we now have a subset of the original dataset, with 5 trajectories +and a total of 13612 observations. +This subset is small enough to quickly and easily work with for demonstration +purposes. +Let's see how we can compute the mean and maximum velocities of each trajectory. +To start, we'll need to obtain the velocities over all trajectory times. +Although the GDP dataset already comes with velocity variables, we won't use +them here so that we can learn how to compute them ourselves from positions. +``clouddrift``'s ``kinematics`` module provides the ``velocity_from_position`` +function that allows you to do just that. + +>>> from clouddrift.kinematics import velocity_from_position + +At a minimum ``velocity_from_position`` requires three input parameters: +consecutive x- and y-coordinates and time, so we could do: + +>>> u, v = velocity_from_position(ds_sub.lon, ds_sub.lat, ds_sub.time) + +``velocity_from_position`` returns two arrays, ``u`` and ``v``, which are the +zonal and meridional velocities, respectively. +By default, it assumes that the coordinates are in degrees, and it handles the +great circle path calculation and longitude wraparound under the hood. +However, recall that ``ds_sub.lon``, ``ds_sub.lat``, and ``ds_sub.time`` are +ragged arrays, so we need a different approach to calculate velocities while +respecting the trajectory boundaries. +For this, we can use the ``ragged_apply`` function, which applies a function +to each trajectory in a ragged array, and returns the concatenated result. + +>>> from clouddrift.ragged import apply_ragged +>>> u, v = apply_ragged(velocity_from_position, [ds_sub.lon, ds_sub.lat, ds_sub.time], ds_sub.rowsize) + +``u`` and ``v`` here are still ragged arrays, which means that the five +contiguous trajectories are concatenated into 1-dimensional arrays. + +Now, let's compute the velocity magnitude in meters per second. +The time in this dataset is loaded in nanoseconds by default: + +>>> ds_sub.time.values +array(['2005-04-15T20:00:00.000000000', '2005-04-15T21:00:00.000000000', + '2005-04-15T22:00:00.000000000', ..., + '2005-10-02T03:00:00.000000000', '2005-10-02T04:00:00.000000000', + '2005-10-02T05:00:00.000000000'], dtype='datetime64[ns]') + +So, to obtain the velocity magnitude in meters per second, we'll need to +multiply our velocities by ``1e9``. + +>>> velocity_magnitude = np.sqrt(u**2 + v**2) * 1e9 +>>> velocity_magnitude +array([0.28053388, 0.6164632 , 0.89032112, ..., 0.2790803 , 0.20095603, + 0.20095603]) + +>>> velocity_magnitude.mean(), velocity_magnitude.max() +(0.22115242718877506, 1.6958275672626286) + +However, these aren't the results we are looking for! Recall that we have the +velocity magnitude of five different trajectories concatenated into one array. +This means that we need to use ``apply_ragged`` again to compute the mean and +maximum values: + +>>> apply_ragged(np.mean, [velocity_magnitude], ds_sub.rowsize) +array([0.32865148, 0.17752435, 0.1220523 , 0.13281067, 0.14041268]) +>>> apply_ragged(np.max, [velocity_magnitude], ds_sub.rowsize) +array([1.69582757, 1.36804354, 0.97343434, 0.60353528, 1.05044213]) + +And there you go! We used ``clouddrift`` to: + +#. Load a real-world Lagrangian dataset from the cloud; +#. Subset the dataset by trajectory IDs; +#. Compute the velocity vectors and their magnitudes for each trajectory; +#. Compute the mean and maximum velocity magnitudes for each trajectory. + +``clouddrift`` offers many more functions for common Lagrangian analysis tasks. +Please explore the `API `_ +to learn about other functions and how to use them. + +Adapting custom Lagrangian datasets into ragged arrays +------------------------------------------------------ + +CloudDrift provides an easy way to convert custom Lagrangian datasets into +`contiguous ragged arrays `_. + +.. code-block:: python + + # Import a GDP-hourly adapter function + from clouddrift.adapters.gdp import to_raggedarray + + # Download 100 random GDP-hourly trajectories as a ragged array + ra = to_raggedarray(n_random_id=100) + + # Store to NetCDF and Parquet files + ra.to_netcdf("gdp.nc") + ra.to_parquet("gdp.parquet") + + # Convert to Xarray Dataset for analysis + ds = ra.to_xarray() + + # Alternatively, convert to Awkward Array for analysis + ds = ra.to_awkward() + +This snippet is specific to the hourly GDP dataset, however, you can use the +``RaggedArray`` class directly to convert other custom datasets into a ragged +array structure that is analysis ready via Xarray or Awkward Array packages. +The functions to do that are defined in the ``clouddrift.adapters`` submodule. +You can use these examples as a reference to ingest your own or other custom +Lagrangian datasets into ``RaggedArray``. \ No newline at end of file diff --git a/_static/basic.css b/_static/basic.css index 01192852..e760386b 100644 --- a/_static/basic.css +++ b/_static/basic.css @@ -4,7 +4,7 @@ * * Sphinx stylesheet -- basic theme. * - * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ @@ -15,6 +15,12 @@ div.clearer { clear: both; } +div.section::after { + display: block; + content: ''; + clear: left; +} + /* -- relbar ---------------------------------------------------------------- */ div.related { @@ -49,7 +55,7 @@ div.sphinxsidebarwrapper { div.sphinxsidebar { float: left; - width: 230px; + width: 270px; margin-left: -100%; font-size: 90%; word-wrap: break-word; @@ -124,7 +130,7 @@ ul.search li a { font-weight: bold; } -ul.search li div.context { +ul.search li p.context { color: #888; margin: 2px 0 0 30px; text-align: left; @@ -216,7 +222,7 @@ table.modindextable td { /* -- general body styles --------------------------------------------------- */ div.body { - min-width: 450px; + min-width: 360px; max-width: 800px; } @@ -231,14 +237,8 @@ a.headerlink { visibility: hidden; } -a.brackets:before, -span.brackets > a:before{ - content: "["; -} - -a.brackets:after, -span.brackets > a:after { - content: "]"; +a:visited { + color: #551A8B; } h1:hover > a.headerlink, @@ -271,25 +271,25 @@ p.rubric { font-weight: bold; } -img.align-left, .figure.align-left, object.align-left { +img.align-left, figure.align-left, .figure.align-left, object.align-left { clear: left; float: left; margin-right: 1em; } -img.align-right, .figure.align-right, object.align-right { +img.align-right, figure.align-right, .figure.align-right, object.align-right { clear: right; float: right; margin-left: 1em; } -img.align-center, .figure.align-center, object.align-center { +img.align-center, figure.align-center, .figure.align-center, object.align-center { display: block; margin-left: auto; margin-right: auto; } -img.align-default, .figure.align-default { +img.align-default, figure.align-default, .figure.align-default { display: block; margin-left: auto; margin-right: auto; @@ -313,24 +313,35 @@ img.align-default, .figure.align-default { /* -- sidebars -------------------------------------------------------------- */ -div.sidebar { +div.sidebar, +aside.sidebar { margin: 0 0 0.5em 1em; border: 1px solid #ddb; - padding: 7px 7px 0 7px; + padding: 7px; background-color: #ffe; width: 40%; float: right; + clear: right; + overflow-x: auto; } p.sidebar-title { font-weight: bold; } +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + /* -- topics ---------------------------------------------------------------- */ +nav.contents, +aside.topic, div.topic { border: 1px solid #ccc; - padding: 7px 7px 0 7px; + padding: 7px; margin: 10px 0 10px 0; } @@ -352,10 +363,6 @@ div.admonition dt { font-weight: bold; } -div.admonition dl { - margin-bottom: 0; -} - p.admonition-title { margin: 0px 10px 5px 0px; font-weight: bold; @@ -366,9 +373,34 @@ div.body p.centered { margin-top: 25px; } +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + /* -- tables ---------------------------------------------------------------- */ table.docutils { + margin-top: 10px; + margin-bottom: 10px; border: 0; border-collapse: collapse; } @@ -398,10 +430,6 @@ table.docutils td, table.docutils th { border-bottom: 1px solid #aaa; } -table.footnote td, table.footnote th { - border: 0 !important; -} - th { text-align: left; padding-right: 5px; @@ -416,32 +444,34 @@ table.citation td { border-bottom: none; } -th > p:first-child, -td > p:first-child { +th > :first-child, +td > :first-child { margin-top: 0px; } -th > p:last-child, -td > p:last-child { +th > :last-child, +td > :last-child { margin-bottom: 0px; } /* -- figures --------------------------------------------------------------- */ -div.figure { +div.figure, figure { margin: 0.5em; padding: 0.5em; } -div.figure p.caption { +div.figure p.caption, figcaption { padding: 0.3em; } -div.figure p.caption span.caption-number { +div.figure p.caption span.caption-number, +figcaption span.caption-number { font-style: italic; } -div.figure p.caption span.caption-text { +div.figure p.caption span.caption-text, +figcaption span.caption-text { } /* -- field list styles ----------------------------------------------------- */ @@ -468,10 +498,71 @@ table.field-list td, table.field-list th { /* -- hlist styles ---------------------------------------------------------- */ +table.hlist { + margin: 1em 0; +} + table.hlist td { vertical-align: top; } +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + /* -- other body styles ----------------------------------------------------- */ @@ -495,26 +586,53 @@ ol.upperroman { list-style: upper-roman; } -li > p:first-child { +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { margin-top: 0px; } -li > p:last-child { +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { margin-bottom: 0px; } -dl.footnote > dt, -dl.citation > dt { - float: left; +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; } -dl.footnote > dd, -dl.citation > dd { - margin-bottom: 0em; +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; } -dl.footnote > dd:after, -dl.citation > dd:after { +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { content: ""; clear: both; } @@ -531,10 +649,6 @@ dl.field-list > dt { padding-right: 5px; } -dl.field-list > dt:after { - content: ":"; -} - dl.field-list > dd { padding-left: 0.5em; margin-top: 0em; @@ -546,7 +660,7 @@ dl { margin-bottom: 15px; } -dd > p:first-child { +dd > :first-child { margin-top: 0px; } @@ -560,6 +674,21 @@ dd { margin-left: 30px; } +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + dt:target, span.highlighted { background-color: #fbe54e; } @@ -573,14 +702,6 @@ dl.glossary dt { font-size: 1.1em; } -.optional { - font-size: 1.3em; -} - -.sig-paren { - font-size: larger; -} - .versionmodified { font-style: italic; } @@ -621,8 +742,9 @@ dl.glossary dt { .classifier:before { font-style: normal; - margin: 0.5em; + margin: 0 0.5em; content: ":"; + display: inline-block; } abbr, acronym { @@ -630,6 +752,14 @@ abbr, acronym { cursor: help; } +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + /* -- code displays --------------------------------------------------------- */ pre { @@ -637,29 +767,69 @@ pre { overflow-y: hidden; /* fixes display issues on Chrome browsers */ } +pre, div[class*="highlight-"] { + clear: both; +} + span.pre { -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; } td.linenos pre { - padding: 5px 0px; border: 0; background-color: transparent; color: #aaa; } table.highlighttable { - margin-left: 0.5em; + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; } table.highlighttable td { - padding: 0 0.5em 0 0.5em; + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; } div.code-block-caption { + margin-top: 1em; padding: 2px 5px; font-size: small; } @@ -668,12 +838,14 @@ div.code-block-caption code { background-color: transparent; } -div.code-block-caption + div > div.highlight > pre { - margin-top: 0; -} - -div.doctest > div.highlight span.gp { /* gp: Generic.Prompt */ - user-select: none; +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ } div.code-block-caption span.caption-number { @@ -685,21 +857,7 @@ div.code-block-caption span.caption-text { } div.literal-block-wrapper { - padding: 1em 1em 0; -} - -div.literal-block-wrapper div.highlight { - margin: 0; -} - -code.descname { - background-color: transparent; - font-weight: bold; - font-size: 1.2em; -} - -code.descclassname { - background-color: transparent; + margin: 1em 0; } code.xref, a code { @@ -740,8 +898,7 @@ span.eqno { } span.eqno a.headerlink { - position: relative; - left: 0px; + position: absolute; z-index: 1; } diff --git a/_static/check-solid.svg b/_static/check-solid.svg new file mode 100644 index 00000000..92fad4b5 --- /dev/null +++ b/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/_static/clipboard.min.js b/_static/clipboard.min.js new file mode 100644 index 00000000..54b3c463 --- /dev/null +++ b/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/_static/copybutton.css b/_static/copybutton.css new file mode 100644 index 00000000..f1916ec7 --- /dev/null +++ b/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/_static/copybutton.js b/_static/copybutton.js new file mode 100644 index 00000000..b3987037 --- /dev/null +++ b/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos, .gp'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/_static/copybutton_funcs.js b/_static/copybutton_funcs.js new file mode 100644 index 00000000..dbe1aaad --- /dev/null +++ b/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/_static/doctools.js b/_static/doctools.js index daccd209..d06a71d7 100644 --- a/_static/doctools.js +++ b/_static/doctools.js @@ -2,314 +2,155 @@ * doctools.js * ~~~~~~~~~~~ * - * Sphinx JavaScript utilities for all documentation. + * Base JavaScript utilities for all Sphinx HTML documentation. * - * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ - -/** - * select a different prefix for underscore - */ -$u = _.noConflict(); - -/** - * make the code below compatible with browsers without - * an installed firebug like debugger -if (!window.console || !console.firebug) { - var names = ["log", "debug", "info", "warn", "error", "assert", "dir", - "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", - "profile", "profileEnd"]; - window.console = {}; - for (var i = 0; i < names.length; ++i) - window.console[names[i]] = function() {}; -} - */ - -/** - * small helper function to urldecode strings - */ -jQuery.urldecode = function(x) { - return decodeURIComponent(x).replace(/\+/g, ' '); -}; - -/** - * small helper function to urlencode strings - */ -jQuery.urlencode = encodeURIComponent; - -/** - * This function returns the parsed url parameters of the - * current request. Multiple values per key are supported, - * it will always return arrays of strings for the value parts. - */ -jQuery.getQueryParameters = function(s) { - if (typeof s === 'undefined') - s = document.location.search; - var parts = s.substr(s.indexOf('?') + 1).split('&'); - var result = {}; - for (var i = 0; i < parts.length; i++) { - var tmp = parts[i].split('=', 2); - var key = jQuery.urldecode(tmp[0]); - var value = jQuery.urldecode(tmp[1]); - if (key in result) - result[key].push(value); - else - result[key] = [value]; +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); } - return result; }; -/** - * highlight a given string on a jquery object by wrapping it in - * span elements with the given class name. - */ -jQuery.fn.highlightText = function(text, className) { - function highlight(node, addItems) { - if (node.nodeType === 3) { - var val = node.nodeValue; - var pos = val.toLowerCase().indexOf(text); - if (pos >= 0 && - !jQuery(node.parentNode).hasClass(className) && - !jQuery(node.parentNode).hasClass("nohighlight")) { - var span; - var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); - if (isInSVG) { - span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); - } else { - span = document.createElement("span"); - span.className = className; - } - span.appendChild(document.createTextNode(val.substr(pos, text.length))); - node.parentNode.insertBefore(span, node.parentNode.insertBefore( - document.createTextNode(val.substr(pos + text.length)), - node.nextSibling)); - node.nodeValue = val.substr(0, pos); - if (isInSVG) { - var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); - var bbox = node.parentElement.getBBox(); - rect.x.baseVal.value = bbox.x; - rect.y.baseVal.value = bbox.y; - rect.width.baseVal.value = bbox.width; - rect.height.baseVal.value = bbox.height; - rect.setAttribute('class', className); - addItems.push({ - "parent": node.parentNode, - "target": rect}); - } - } - } - else if (!jQuery(node).is("button, select, textarea")) { - jQuery.each(node.childNodes, function() { - highlight(this, addItems); - }); - } - } - var addItems = []; - var result = this.each(function() { - highlight(this, addItems); - }); - for (var i = 0; i < addItems.length; ++i) { - jQuery(addItems[i].parent).before(addItems[i].target); - } - return result; -}; - -/* - * backward compatibility for jQuery.browser - * This will be supported until firefox bug is fixed. - */ -if (!jQuery.browser) { - jQuery.uaMatch = function(ua) { - ua = ua.toLowerCase(); - - var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || - /(webkit)[ \/]([\w.]+)/.exec(ua) || - /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || - /(msie) ([\w.]+)/.exec(ua) || - ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || - []; - - return { - browser: match[ 1 ] || "", - version: match[ 2 ] || "0" - }; - }; - jQuery.browser = {}; - jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; -} - /** * Small JavaScript module for the documentation. */ -var Documentation = { - - init : function() { - this.fixFirefoxAnchorBug(); - this.highlightSearchWords(); - this.initIndexTable(); - if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { - this.initOnKeyListeners(); - } +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); }, /** * i18n support */ - TRANSLATIONS : {}, - PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, - LOCALE : 'unknown', + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", // gettext and ngettext don't access this so that the functions // can safely bound to a different name (_ = Documentation.gettext) - gettext : function(string) { - var translated = Documentation.TRANSLATIONS[string]; - if (typeof translated === 'undefined') - return string; - return (typeof translated === 'string') ? translated : translated[0]; - }, - - ngettext : function(singular, plural, n) { - var translated = Documentation.TRANSLATIONS[singular]; - if (typeof translated === 'undefined') - return (n == 1) ? singular : plural; - return translated[Documentation.PLURALEXPR(n)]; + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } }, - addTranslations : function(catalog) { - for (var key in catalog.messages) - this.TRANSLATIONS[key] = catalog.messages[key]; - this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); - this.LOCALE = catalog.locale; + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; }, - /** - * add context elements like header anchor links - */ - addContextElements : function() { - $('div[id] > :header:first').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this headline')). - appendTo(this); - }); - $('dt[id]').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this definition')). - appendTo(this); - }); + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; }, /** - * workaround a firefox stupidity - * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 + * helper function to focus on search bar */ - fixFirefoxAnchorBug : function() { - if (document.location.hash && $.browser.mozilla) - window.setTimeout(function() { - document.location.href += ''; - }, 10); + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); }, /** - * highlight the search words provided in the url in the text + * Initialise the domain index toggle buttons */ - highlightSearchWords : function() { - var params = $.getQueryParameters(); - var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; - if (terms.length) { - var body = $('div.body'); - if (!body.length) { - body = $('body'); + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); } - window.setTimeout(function() { - $.each(terms, function() { - body.highlightText(this.toLowerCase(), 'highlighted'); - }); - }, 10); - $('') - .appendTo($('#searchbox')); - } - }, - - /** - * init the domain index toggle buttons - */ - initIndexTable : function() { - var togglers = $('img.toggler').click(function() { - var src = $(this).attr('src'); - var idnum = $(this).attr('id').substr(7); - $('tr.cg-' + idnum).toggle(); - if (src.substr(-9) === 'minus.png') - $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); - else - $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); - }).css('display', ''); - if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { - togglers.click(); - } - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords : function() { - $('#searchbox .highlight-link').fadeOut(300); - $('span.highlighted').removeClass('highlighted'); - }, - - /** - * make the url absolute - */ - makeURL : function(relativeURL) { - return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; - }, + }; - /** - * get the current relative url - */ - getCurrentURL : function() { - var path = document.location.pathname; - var parts = path.split(/\//); - $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { - if (this === '..') - parts.pop(); - }); - var url = parts.join('/'); - return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); }, - initOnKeyListeners: function() { - $(document).keydown(function(event) { - var activeElementType = document.activeElement.tagName; - // don't navigate when in search box or textarea - if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' - && !event.altKey && !event.ctrlKey && !event.metaKey && !event.shiftKey) { - switch (event.keyCode) { - case 37: // left - var prevHref = $('link[rel="prev"]').prop('href'); - if (prevHref) { - window.location.href = prevHref; - return false; + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); } - case 39: // right - var nextHref = $('link[rel="next"]').prop('href'); - if (nextHref) { - window.location.href = nextHref; - return false; + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); } + break; } } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } }); - } + }, }; // quick alias for translations -_ = Documentation.gettext; +const _ = Documentation.gettext; -$(document).ready(function() { - Documentation.init(); -}); +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js index 4790c4d3..dab586c0 100644 --- a/_static/documentation_options.js +++ b/_static/documentation_options.js @@ -1,11 +1,13 @@ -var DOCUMENTATION_OPTIONS = { - URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), +const DOCUMENTATION_OPTIONS = { VERSION: '', - LANGUAGE: 'None', + LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', HAS_SOURCE: true, - SOURCELINK_SUFFIX: '.txt', - NAVIGATION_WITH_KEYS: false + SOURCELINK_SUFFIX: '', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, }; \ No newline at end of file diff --git a/_static/favicon.ico b/_static/favicon.ico new file mode 100644 index 00000000..77b7bd31 Binary files /dev/null and b/_static/favicon.ico differ diff --git a/_static/graphviz.css b/_static/graphviz.css new file mode 100644 index 00000000..8d81c02e --- /dev/null +++ b/_static/graphviz.css @@ -0,0 +1,19 @@ +/* + * graphviz.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- graphviz extension. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +img.graphviz { + border: 0; + max-width: 100%; +} + +object.graphviz { + max-width: 100%; +} diff --git a/_static/images/logo_binder.svg b/_static/images/logo_binder.svg new file mode 100644 index 00000000..45fecf75 --- /dev/null +++ b/_static/images/logo_binder.svg @@ -0,0 +1,19 @@ + + + + +logo + + + + + + + + diff --git a/_static/images/logo_colab.png b/_static/images/logo_colab.png new file mode 100644 index 00000000..b7560ec2 Binary files /dev/null and b/_static/images/logo_colab.png differ diff --git a/_static/images/logo_deepnote.svg b/_static/images/logo_deepnote.svg new file mode 100644 index 00000000..fa77ebfc --- /dev/null +++ b/_static/images/logo_deepnote.svg @@ -0,0 +1 @@ + diff --git a/_static/images/logo_jupyterhub.svg b/_static/images/logo_jupyterhub.svg new file mode 100644 index 00000000..60cfe9f2 --- /dev/null +++ b/_static/images/logo_jupyterhub.svg @@ -0,0 +1 @@ +logo_jupyterhubHub diff --git a/_static/language_data.js b/_static/language_data.js index d2b4ee91..250f5665 100644 --- a/_static/language_data.js +++ b/_static/language_data.js @@ -5,15 +5,16 @@ * This script contains the language-specific data used by searchtools.js, * namely the list of stopwords, stemmer, scorer and splitter. * - * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ -var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"]; +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; -/* Non-minified version JS is _stemmer.js if file is provided */ +/* Non-minified version is copied as a separate JS file, is available */ + /** * Porter Stemmer */ @@ -196,102 +197,3 @@ var Stemmer = function() { } } - - - - -var splitChars = (function() { - var result = {}; - var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648, - 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702, - 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971, - 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345, - 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761, - 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823, - 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125, - 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695, - 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587, - 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141]; - var i, j, start, end; - for (i = 0; i < singles.length; i++) { - result[singles[i]] = true; - } - var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709], - [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161], - [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568], - [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807], - [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047], - [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383], - [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450], - [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547], - [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673], - [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820], - [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946], - [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023], - [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173], - [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332], - [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481], - [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718], - [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791], - [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095], - [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205], - [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687], - [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968], - [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869], - [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102], - [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271], - [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592], - [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822], - [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167], - [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959], - [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143], - [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318], - [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483], - [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101], - [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567], - [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292], - [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444], - [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783], - [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311], - [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511], - [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774], - [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071], - [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263], - [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519], - [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647], - [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967], - [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295], - [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274], - [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007], - [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381], - [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]]; - for (i = 0; i < ranges.length; i++) { - start = ranges[i][0]; - end = ranges[i][1]; - for (j = start; j <= end; j++) { - result[j] = true; - } - } - return result; -})(); - -function splitQuery(query) { - var result = []; - var start = -1; - for (var i = 0; i < query.length; i++) { - if (splitChars[query.charCodeAt(i)]) { - if (start !== -1) { - result.push(query.slice(start, i)); - start = -1; - } - } else if (start === -1) { - start = i; - } - } - if (start !== -1) { - result.push(query.slice(start)); - } - return result; -} - - diff --git a/_static/locales/ar/LC_MESSAGES/booktheme.mo b/_static/locales/ar/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..15541a6a Binary files /dev/null and b/_static/locales/ar/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ar/LC_MESSAGES/booktheme.po b/_static/locales/ar/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..1237f343 --- /dev/null +++ b/_static/locales/ar/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ar\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "تنزيل ملف المصدر" + +msgid "suggest edit" +msgstr "أقترح تحرير" + +msgid "Toggle navigation" +msgstr "تبديل التنقل" + +msgid "open issue" +msgstr "قضية مفتوحة" + +msgid "Download notebook file" +msgstr "تنزيل ملف دفتر الملاحظات" + +msgid "repository" +msgstr "مخزن" + +msgid "Theme by the" +msgstr "موضوع بواسطة" + +msgid "Print to PDF" +msgstr "طباعة إلى PDF" + +msgid "Download this page" +msgstr "قم بتنزيل هذه الصفحة" + +msgid "Copyright" +msgstr "حقوق النشر" + +msgid "Last updated on" +msgstr "آخر تحديث في" + +msgid "Launch" +msgstr "إطلاق" + +msgid "Open an issue" +msgstr "افتح قضية" + +msgid "Fullscreen mode" +msgstr "وضع ملء الشاشة" + +msgid "Sphinx Book Theme" +msgstr "موضوع كتاب أبو الهول" + +msgid "Contents" +msgstr "محتويات" + +msgid "Edit this page" +msgstr "قم بتحرير هذه الصفحة" + +msgid "next page" +msgstr "الصفحة التالية" + +msgid "Source repository" +msgstr "مستودع المصدر" + +msgid "By" +msgstr "بواسطة" + +msgid "By the" +msgstr "بواسطة" + +msgid "previous page" +msgstr "الصفحة السابقة" diff --git a/_static/locales/bg/LC_MESSAGES/booktheme.mo b/_static/locales/bg/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..da951200 Binary files /dev/null and b/_static/locales/bg/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/bg/LC_MESSAGES/booktheme.po b/_static/locales/bg/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..45a6ed0a --- /dev/null +++ b/_static/locales/bg/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: bg\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Изтеглете изходния файл" + +msgid "suggest edit" +msgstr "предложи редактиране" + +msgid "Toggle navigation" +msgstr "Превключване на навигацията" + +msgid "open issue" +msgstr "отворен брой" + +msgid "Download notebook file" +msgstr "Изтеглете файла на бележника" + +msgid "repository" +msgstr "хранилище" + +msgid "Theme by the" +msgstr "Тема от" + +msgid "Print to PDF" +msgstr "Печат в PDF" + +msgid "Download this page" +msgstr "Изтеглете тази страница" + +msgid "Copyright" +msgstr "Авторско право" + +msgid "Last updated on" +msgstr "Последна актуализация на" + +msgid "Launch" +msgstr "Стартиране" + +msgid "Open an issue" +msgstr "Отворете проблем" + +msgid "Fullscreen mode" +msgstr "Режим на цял екран" + +msgid "Sphinx Book Theme" +msgstr "Тема на книгата Sphinx" + +msgid "Contents" +msgstr "Съдържание" + +msgid "Edit this page" +msgstr "Редактирайте тази страница" + +msgid "next page" +msgstr "Следваща страница" + +msgid "Source repository" +msgstr "Хранилище на източника" + +msgid "By" +msgstr "От" + +msgid "By the" +msgstr "По" + +msgid "previous page" +msgstr "предишна страница" diff --git a/_static/locales/bn/LC_MESSAGES/booktheme.mo b/_static/locales/bn/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..6b96639b Binary files /dev/null and b/_static/locales/bn/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/bn/LC_MESSAGES/booktheme.po b/_static/locales/bn/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..122a369e --- /dev/null +++ b/_static/locales/bn/LC_MESSAGES/booktheme.po @@ -0,0 +1,63 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: bn\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "উত্স ফাইল ডাউনলোড করুন" + +msgid "Toggle navigation" +msgstr "নেভিগেশন টগল করুন" + +msgid "open issue" +msgstr "খোলা সমস্যা" + +msgid "Download notebook file" +msgstr "নোটবুক ফাইল ডাউনলোড করুন" + +msgid "Theme by the" +msgstr "থিম দ্বারা" + +msgid "Print to PDF" +msgstr "পিডিএফ প্রিন্ট করুন" + +msgid "Download this page" +msgstr "এই পৃষ্ঠাটি ডাউনলোড করুন" + +msgid "Copyright" +msgstr "কপিরাইট" + +msgid "Last updated on" +msgstr "সর্বশেষ আপডেট" + +msgid "Launch" +msgstr "শুরু করা" + +msgid "Open an issue" +msgstr "একটি সমস্যা খুলুন" + +msgid "Sphinx Book Theme" +msgstr "স্পিনিক্স বুক থিম" + +msgid "Edit this page" +msgstr "এই পৃষ্ঠাটি সম্পাদনা করুন" + +msgid "next page" +msgstr "পরবর্তী পৃষ্ঠা" + +msgid "Source repository" +msgstr "উত্স সংগ্রহস্থল" + +msgid "By" +msgstr "দ্বারা" + +msgid "By the" +msgstr "দ্বারা" + +msgid "previous page" +msgstr "আগের পৃষ্ঠা" diff --git a/_static/locales/ca/LC_MESSAGES/booktheme.mo b/_static/locales/ca/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..a4dd30e9 Binary files /dev/null and b/_static/locales/ca/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ca/LC_MESSAGES/booktheme.po b/_static/locales/ca/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..c757deb8 --- /dev/null +++ b/_static/locales/ca/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ca\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Baixeu el fitxer font" + +msgid "suggest edit" +msgstr "suggerir edició" + +msgid "Toggle navigation" +msgstr "Commuta la navegació" + +msgid "open issue" +msgstr "número obert" + +msgid "Download notebook file" +msgstr "Descarregar fitxer de quadern" + +msgid "Theme by the" +msgstr "Tema del" + +msgid "Print to PDF" +msgstr "Imprimeix a PDF" + +msgid "Download this page" +msgstr "Descarregueu aquesta pàgina" + +msgid "Copyright" +msgstr "Copyright" + +msgid "Last updated on" +msgstr "Darrera actualització el" + +msgid "Launch" +msgstr "Llançament" + +msgid "Open an issue" +msgstr "Obriu un número" + +msgid "Sphinx Book Theme" +msgstr "Tema del llibre Esfinx" + +msgid "Edit this page" +msgstr "Editeu aquesta pàgina" + +msgid "next page" +msgstr "pàgina següent" + +msgid "Source repository" +msgstr "Dipòsit de fonts" + +msgid "By" +msgstr "Per" + +msgid "By the" +msgstr "Per la" + +msgid "previous page" +msgstr "Pàgina anterior" diff --git a/_static/locales/cs/LC_MESSAGES/booktheme.mo b/_static/locales/cs/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..c39e01a6 Binary files /dev/null and b/_static/locales/cs/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/cs/LC_MESSAGES/booktheme.po b/_static/locales/cs/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..3571c23b --- /dev/null +++ b/_static/locales/cs/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: cs\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Stáhněte si zdrojový soubor" + +msgid "suggest edit" +msgstr "navrhnout úpravy" + +msgid "Toggle navigation" +msgstr "Přepnout navigaci" + +msgid "open issue" +msgstr "otevřené číslo" + +msgid "Download notebook file" +msgstr "Stáhnout soubor poznámkového bloku" + +msgid "repository" +msgstr "úložiště" + +msgid "Theme by the" +msgstr "Téma od" + +msgid "Print to PDF" +msgstr "Tisk do PDF" + +msgid "Download this page" +msgstr "Stáhněte si tuto stránku" + +msgid "Copyright" +msgstr "autorská práva" + +msgid "Last updated on" +msgstr "Naposledy aktualizováno" + +msgid "Launch" +msgstr "Zahájení" + +msgid "Open an issue" +msgstr "Otevřete problém" + +msgid "Fullscreen mode" +msgstr "Režim celé obrazovky" + +msgid "Sphinx Book Theme" +msgstr "Téma knihy Sfinga" + +msgid "Contents" +msgstr "Obsah" + +msgid "Edit this page" +msgstr "Upravit tuto stránku" + +msgid "next page" +msgstr "další strana" + +msgid "Source repository" +msgstr "Zdrojové úložiště" + +msgid "By" +msgstr "Podle" + +msgid "By the" +msgstr "Podle" + +msgid "previous page" +msgstr "předchozí stránka" diff --git a/_static/locales/da/LC_MESSAGES/booktheme.mo b/_static/locales/da/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..f43157d7 Binary files /dev/null and b/_static/locales/da/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/da/LC_MESSAGES/booktheme.po b/_static/locales/da/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..c39223fd --- /dev/null +++ b/_static/locales/da/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: da\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Download kildefil" + +msgid "suggest edit" +msgstr "foreslå redigering" + +msgid "Toggle navigation" +msgstr "Skift navigation" + +msgid "open issue" +msgstr "åbent nummer" + +msgid "Download notebook file" +msgstr "Download notesbog-fil" + +msgid "repository" +msgstr "lager" + +msgid "Theme by the" +msgstr "Tema af" + +msgid "Print to PDF" +msgstr "Udskriv til PDF" + +msgid "Download this page" +msgstr "Download denne side" + +msgid "Copyright" +msgstr "ophavsret" + +msgid "Last updated on" +msgstr "Sidst opdateret den" + +msgid "Launch" +msgstr "Start" + +msgid "Open an issue" +msgstr "Åbn et problem" + +msgid "Fullscreen mode" +msgstr "Fuldskærmstilstand" + +msgid "Sphinx Book Theme" +msgstr "Sphinx bogtema" + +msgid "Contents" +msgstr "Indhold" + +msgid "Edit this page" +msgstr "Rediger denne side" + +msgid "next page" +msgstr "Næste side" + +msgid "Source repository" +msgstr "Kildelager" + +msgid "By" +msgstr "Ved" + +msgid "By the" +msgstr "Ved" + +msgid "previous page" +msgstr "forrige side" diff --git a/_static/locales/de/LC_MESSAGES/booktheme.mo b/_static/locales/de/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..648b565c Binary files /dev/null and b/_static/locales/de/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/de/LC_MESSAGES/booktheme.po b/_static/locales/de/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..e22b505f --- /dev/null +++ b/_static/locales/de/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: de\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Quelldatei herunterladen" + +msgid "suggest edit" +msgstr "vorschlagen zu bearbeiten" + +msgid "Toggle navigation" +msgstr "Navigation umschalten" + +msgid "open issue" +msgstr "offenes Thema" + +msgid "Download notebook file" +msgstr "Notebook-Datei herunterladen" + +msgid "repository" +msgstr "Repository" + +msgid "Theme by the" +msgstr "Thema von der" + +msgid "Print to PDF" +msgstr "In PDF drucken" + +msgid "Download this page" +msgstr "Laden Sie diese Seite herunter" + +msgid "Copyright" +msgstr "Urheberrechte ©" + +msgid "Last updated on" +msgstr "Zuletzt aktualisiert am" + +msgid "Launch" +msgstr "Starten" + +msgid "Open an issue" +msgstr "Öffnen Sie ein Problem" + +msgid "Fullscreen mode" +msgstr "Vollbildmodus" + +msgid "Sphinx Book Theme" +msgstr "Sphinx-Buch-Thema" + +msgid "Contents" +msgstr "Inhalt" + +msgid "Edit this page" +msgstr "Bearbeite diese Seite" + +msgid "next page" +msgstr "Nächste Seite" + +msgid "Source repository" +msgstr "Quell-Repository" + +msgid "By" +msgstr "Durch" + +msgid "By the" +msgstr "Bis zum" + +msgid "previous page" +msgstr "vorherige Seite" diff --git a/_static/locales/el/LC_MESSAGES/booktheme.mo b/_static/locales/el/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..fca6e935 Binary files /dev/null and b/_static/locales/el/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/el/LC_MESSAGES/booktheme.po b/_static/locales/el/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..588f2efd --- /dev/null +++ b/_static/locales/el/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: el\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Λήψη αρχείου προέλευσης" + +msgid "suggest edit" +msgstr "προτείνω επεξεργασία" + +msgid "Toggle navigation" +msgstr "Εναλλαγή πλοήγησης" + +msgid "open issue" +msgstr "ανοιχτό ζήτημα" + +msgid "Download notebook file" +msgstr "Λήψη αρχείου σημειωματάριου" + +msgid "repository" +msgstr "αποθήκη" + +msgid "Theme by the" +msgstr "Θέμα από το" + +msgid "Print to PDF" +msgstr "Εκτύπωση σε PDF" + +msgid "Download this page" +msgstr "Λήψη αυτής της σελίδας" + +msgid "Copyright" +msgstr "Πνευματική ιδιοκτησία" + +msgid "Last updated on" +msgstr "Τελευταία ενημέρωση στις" + +msgid "Launch" +msgstr "Εκτόξευση" + +msgid "Open an issue" +msgstr "Ανοίξτε ένα ζήτημα" + +msgid "Fullscreen mode" +msgstr "ΛΕΙΤΟΥΡΓΙΑ ΠΛΗΡΟΥΣ ΟΘΟΝΗΣ" + +msgid "Sphinx Book Theme" +msgstr "Θέμα βιβλίου Sphinx" + +msgid "Contents" +msgstr "Περιεχόμενα" + +msgid "Edit this page" +msgstr "Επεξεργαστείτε αυτήν τη σελίδα" + +msgid "next page" +msgstr "επόμενη σελίδα" + +msgid "Source repository" +msgstr "Αποθήκη πηγής" + +msgid "By" +msgstr "Με" + +msgid "By the" +msgstr "Από το" + +msgid "previous page" +msgstr "προηγούμενη σελίδα" diff --git a/_static/locales/eo/LC_MESSAGES/booktheme.mo b/_static/locales/eo/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..d1072bbe Binary files /dev/null and b/_static/locales/eo/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/eo/LC_MESSAGES/booktheme.po b/_static/locales/eo/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..4614fe0a --- /dev/null +++ b/_static/locales/eo/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: eo\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Elŝutu fontodosieron" + +msgid "suggest edit" +msgstr "sugesti redaktadon" + +msgid "Toggle navigation" +msgstr "Ŝalti navigadon" + +msgid "open issue" +msgstr "malferma numero" + +msgid "Download notebook file" +msgstr "Elŝutu kajeran dosieron" + +msgid "repository" +msgstr "deponejo" + +msgid "Theme by the" +msgstr "Temo de la" + +msgid "Print to PDF" +msgstr "Presi al PDF" + +msgid "Download this page" +msgstr "Elŝutu ĉi tiun paĝon" + +msgid "Copyright" +msgstr "Kopirajto" + +msgid "Last updated on" +msgstr "Laste ĝisdatigita la" + +msgid "Launch" +msgstr "Lanĉo" + +msgid "Open an issue" +msgstr "Malfermu numeron" + +msgid "Fullscreen mode" +msgstr "Plenekrana reĝimo" + +msgid "Sphinx Book Theme" +msgstr "Sfinksa Libro-Temo" + +msgid "Contents" +msgstr "Enhavo" + +msgid "Edit this page" +msgstr "Redaktu ĉi tiun paĝon" + +msgid "next page" +msgstr "sekva paĝo" + +msgid "Source repository" +msgstr "Fonto-deponejo" + +msgid "By" +msgstr "De" + +msgid "By the" +msgstr "Per la" + +msgid "previous page" +msgstr "antaŭa paĝo" diff --git a/_static/locales/es/LC_MESSAGES/booktheme.mo b/_static/locales/es/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..ba2ee4dc Binary files /dev/null and b/_static/locales/es/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/es/LC_MESSAGES/booktheme.po b/_static/locales/es/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..b4fccf19 --- /dev/null +++ b/_static/locales/es/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: es\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Descargar archivo fuente" + +msgid "suggest edit" +msgstr "sugerir editar" + +msgid "Toggle navigation" +msgstr "Navegación de palanca" + +msgid "open issue" +msgstr "Tema abierto" + +msgid "Download notebook file" +msgstr "Descargar archivo de cuaderno" + +msgid "repository" +msgstr "repositorio" + +msgid "Theme by the" +msgstr "Tema por el" + +msgid "Print to PDF" +msgstr "Imprimir en PDF" + +msgid "Download this page" +msgstr "Descarga esta pagina" + +msgid "Copyright" +msgstr "Derechos de autor" + +msgid "Last updated on" +msgstr "Ultima actualización en" + +msgid "Launch" +msgstr "Lanzamiento" + +msgid "Open an issue" +msgstr "Abrir un problema" + +msgid "Fullscreen mode" +msgstr "Modo de pantalla completa" + +msgid "Sphinx Book Theme" +msgstr "Tema del libro de la esfinge" + +msgid "Contents" +msgstr "Contenido" + +msgid "Edit this page" +msgstr "Edita esta página" + +msgid "next page" +msgstr "siguiente página" + +msgid "Source repository" +msgstr "Repositorio de origen" + +msgid "By" +msgstr "Por" + +msgid "By the" +msgstr "Por el" + +msgid "previous page" +msgstr "pagina anterior" diff --git a/_static/locales/et/LC_MESSAGES/booktheme.mo b/_static/locales/et/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..983b8239 Binary files /dev/null and b/_static/locales/et/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/et/LC_MESSAGES/booktheme.po b/_static/locales/et/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..b748b37f --- /dev/null +++ b/_static/locales/et/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: et\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Laadige alla lähtefail" + +msgid "suggest edit" +msgstr "soovita muuta" + +msgid "Toggle navigation" +msgstr "Lülita navigeerimine sisse" + +msgid "open issue" +msgstr "avatud küsimus" + +msgid "Download notebook file" +msgstr "Laadige sülearvuti fail alla" + +msgid "repository" +msgstr "hoidla" + +msgid "Theme by the" +msgstr "Teema" + +msgid "Print to PDF" +msgstr "Prindi PDF-i" + +msgid "Download this page" +msgstr "Laadige see leht alla" + +msgid "Copyright" +msgstr "Autoriõigus" + +msgid "Last updated on" +msgstr "Viimati uuendatud" + +msgid "Launch" +msgstr "Käivitage" + +msgid "Open an issue" +msgstr "Avage probleem" + +msgid "Fullscreen mode" +msgstr "Täisekraanirežiim" + +msgid "Sphinx Book Theme" +msgstr "Sfinksiraamatu teema" + +msgid "Contents" +msgstr "Sisu" + +msgid "Edit this page" +msgstr "Muutke seda lehte" + +msgid "next page" +msgstr "järgmine leht" + +msgid "Source repository" +msgstr "Allikahoidla" + +msgid "By" +msgstr "Kõrval" + +msgid "By the" +msgstr "Autor" + +msgid "previous page" +msgstr "eelmine leht" diff --git a/_static/locales/fi/LC_MESSAGES/booktheme.mo b/_static/locales/fi/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..d8ac0545 Binary files /dev/null and b/_static/locales/fi/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/fi/LC_MESSAGES/booktheme.po b/_static/locales/fi/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..f58cf58d --- /dev/null +++ b/_static/locales/fi/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fi\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Lataa lähdetiedosto" + +msgid "suggest edit" +msgstr "ehdottaa muokkausta" + +msgid "Toggle navigation" +msgstr "Vaihda navigointia" + +msgid "open issue" +msgstr "avoin ongelma" + +msgid "Download notebook file" +msgstr "Lataa muistikirjatiedosto" + +msgid "repository" +msgstr "arkisto" + +msgid "Theme by the" +msgstr "Teeman tekijä" + +msgid "Print to PDF" +msgstr "Tulosta PDF-tiedostoon" + +msgid "Download this page" +msgstr "Lataa tämä sivu" + +msgid "Copyright" +msgstr "Tekijänoikeus" + +msgid "Last updated on" +msgstr "Viimeksi päivitetty" + +msgid "Launch" +msgstr "Tuoda markkinoille" + +msgid "Open an issue" +msgstr "Avaa ongelma" + +msgid "Fullscreen mode" +msgstr "Koko näytön tila" + +msgid "Sphinx Book Theme" +msgstr "Sphinx-kirjan teema" + +msgid "Contents" +msgstr "Sisällys" + +msgid "Edit this page" +msgstr "Muokkaa tätä sivua" + +msgid "next page" +msgstr "seuraava sivu" + +msgid "Source repository" +msgstr "Lähteen arkisto" + +msgid "By" +msgstr "Tekijä" + +msgid "By the" +msgstr "Mukaan" + +msgid "previous page" +msgstr "Edellinen sivu" diff --git a/_static/locales/fr/LC_MESSAGES/booktheme.mo b/_static/locales/fr/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..f663d39f Binary files /dev/null and b/_static/locales/fr/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/fr/LC_MESSAGES/booktheme.po b/_static/locales/fr/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..8a6c9461 --- /dev/null +++ b/_static/locales/fr/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fr\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Télécharger le fichier source" + +msgid "suggest edit" +msgstr "suggestion de modification" + +msgid "Toggle navigation" +msgstr "Basculer la navigation" + +msgid "open issue" +msgstr "signaler un problème" + +msgid "Download notebook file" +msgstr "Télécharger le fichier notebook" + +msgid "repository" +msgstr "dépôt" + +msgid "Theme by the" +msgstr "Thème par le" + +msgid "Print to PDF" +msgstr "Imprimer au format PDF" + +msgid "Download this page" +msgstr "Téléchargez cette page" + +msgid "Copyright" +msgstr "droits d'auteur" + +msgid "Last updated on" +msgstr "Dernière mise à jour le" + +msgid "Launch" +msgstr "lancement" + +msgid "Open an issue" +msgstr "Ouvrez un problème" + +msgid "Fullscreen mode" +msgstr "Mode plein écran" + +msgid "Sphinx Book Theme" +msgstr "Thème du livre Sphinx" + +msgid "Contents" +msgstr "Contenu" + +msgid "Edit this page" +msgstr "Modifier cette page" + +msgid "next page" +msgstr "page suivante" + +msgid "Source repository" +msgstr "Dépôt source" + +msgid "By" +msgstr "Par" + +msgid "By the" +msgstr "Par le" + +msgid "previous page" +msgstr "page précédente" diff --git a/_static/locales/hr/LC_MESSAGES/booktheme.mo b/_static/locales/hr/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..eca4a1a2 Binary files /dev/null and b/_static/locales/hr/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/hr/LC_MESSAGES/booktheme.po b/_static/locales/hr/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..4ceb3899 --- /dev/null +++ b/_static/locales/hr/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: hr\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Preuzmi izvornu datoteku" + +msgid "suggest edit" +msgstr "predloži uređivanje" + +msgid "Toggle navigation" +msgstr "Uključi / isključi navigaciju" + +msgid "open issue" +msgstr "otvoreno izdanje" + +msgid "Download notebook file" +msgstr "Preuzmi datoteku bilježnice" + +msgid "repository" +msgstr "spremište" + +msgid "Theme by the" +msgstr "Tema autora" + +msgid "Print to PDF" +msgstr "Ispis u PDF" + +msgid "Download this page" +msgstr "Preuzmite ovu stranicu" + +msgid "Copyright" +msgstr "Autorska prava" + +msgid "Last updated on" +msgstr "Posljednje ažuriranje:" + +msgid "Launch" +msgstr "Pokrenite" + +msgid "Open an issue" +msgstr "Otvorite izdanje" + +msgid "Fullscreen mode" +msgstr "Način preko cijelog zaslona" + +msgid "Sphinx Book Theme" +msgstr "Tema knjige Sphinx" + +msgid "Contents" +msgstr "Sadržaj" + +msgid "Edit this page" +msgstr "Uredite ovu stranicu" + +msgid "next page" +msgstr "sljedeća stranica" + +msgid "Source repository" +msgstr "Izvorno spremište" + +msgid "By" +msgstr "Po" + +msgid "By the" +msgstr "Od strane" + +msgid "previous page" +msgstr "Prethodna stranica" diff --git a/_static/locales/id/LC_MESSAGES/booktheme.mo b/_static/locales/id/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..d07a06a9 Binary files /dev/null and b/_static/locales/id/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/id/LC_MESSAGES/booktheme.po b/_static/locales/id/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..1eca5994 --- /dev/null +++ b/_static/locales/id/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: id\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Unduh file sumber" + +msgid "suggest edit" +msgstr "menyarankan edit" + +msgid "Toggle navigation" +msgstr "Alihkan navigasi" + +msgid "open issue" +msgstr "masalah terbuka" + +msgid "Download notebook file" +msgstr "Unduh file notebook" + +msgid "repository" +msgstr "gudang" + +msgid "Theme by the" +msgstr "Tema oleh" + +msgid "Print to PDF" +msgstr "Cetak ke PDF" + +msgid "Download this page" +msgstr "Unduh halaman ini" + +msgid "Copyright" +msgstr "hak cipta" + +msgid "Last updated on" +msgstr "Terakhir diperbarui saat" + +msgid "Launch" +msgstr "Meluncurkan" + +msgid "Open an issue" +msgstr "Buka masalah" + +msgid "Fullscreen mode" +msgstr "Mode layar penuh" + +msgid "Sphinx Book Theme" +msgstr "Tema Buku Sphinx" + +msgid "Contents" +msgstr "Isi" + +msgid "Edit this page" +msgstr "Edit halaman ini" + +msgid "next page" +msgstr "halaman selanjutnya" + +msgid "Source repository" +msgstr "Repositori sumber" + +msgid "By" +msgstr "Oleh" + +msgid "By the" +msgstr "Oleh" + +msgid "previous page" +msgstr "halaman sebelumnya" diff --git a/_static/locales/it/LC_MESSAGES/booktheme.mo b/_static/locales/it/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..53ba476e Binary files /dev/null and b/_static/locales/it/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/it/LC_MESSAGES/booktheme.po b/_static/locales/it/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..f3169000 --- /dev/null +++ b/_static/locales/it/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: it\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Scarica il file sorgente" + +msgid "suggest edit" +msgstr "suggerisci modifica" + +msgid "Toggle navigation" +msgstr "Attiva / disattiva la navigazione" + +msgid "open issue" +msgstr "questione aperta" + +msgid "Download notebook file" +msgstr "Scarica il file del taccuino" + +msgid "repository" +msgstr "repository" + +msgid "Theme by the" +msgstr "Tema di" + +msgid "Print to PDF" +msgstr "Stampa in PDF" + +msgid "Download this page" +msgstr "Scarica questa pagina" + +msgid "Copyright" +msgstr "Diritto d'autore" + +msgid "Last updated on" +msgstr "Ultimo aggiornamento il" + +msgid "Launch" +msgstr "Lanciare" + +msgid "Open an issue" +msgstr "Apri un problema" + +msgid "Fullscreen mode" +msgstr "Modalità schermo intero" + +msgid "Sphinx Book Theme" +msgstr "Tema del libro della Sfinge" + +msgid "Contents" +msgstr "Contenuti" + +msgid "Edit this page" +msgstr "Modifica questa pagina" + +msgid "next page" +msgstr "pagina successiva" + +msgid "Source repository" +msgstr "Repository di origine" + +msgid "By" +msgstr "Di" + +msgid "By the" +msgstr "Dal" + +msgid "previous page" +msgstr "pagina precedente" diff --git a/_static/locales/iw/LC_MESSAGES/booktheme.mo b/_static/locales/iw/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..a45c6575 Binary files /dev/null and b/_static/locales/iw/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/iw/LC_MESSAGES/booktheme.po b/_static/locales/iw/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..9e6d753e --- /dev/null +++ b/_static/locales/iw/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: iw\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "הורד את קובץ המקור" + +msgid "suggest edit" +msgstr "מציע לערוך" + +msgid "Toggle navigation" +msgstr "החלף ניווט" + +msgid "open issue" +msgstr "בעיה פתוחה" + +msgid "Download notebook file" +msgstr "הורד קובץ מחברת" + +msgid "repository" +msgstr "מאגר" + +msgid "Theme by the" +msgstr "נושא מאת" + +msgid "Print to PDF" +msgstr "הדפס לקובץ PDF" + +msgid "Download this page" +msgstr "הורד דף זה" + +msgid "Copyright" +msgstr "זכויות יוצרים" + +msgid "Last updated on" +msgstr "עודכן לאחרונה ב" + +msgid "Launch" +msgstr "לְהַשִׁיק" + +msgid "Open an issue" +msgstr "פתח גיליון" + +msgid "Fullscreen mode" +msgstr "מצב מסך מלא" + +msgid "Sphinx Book Theme" +msgstr "נושא ספר ספינקס" + +msgid "Contents" +msgstr "תוכן" + +msgid "Edit this page" +msgstr "ערוך דף זה" + +msgid "next page" +msgstr "עמוד הבא" + +msgid "Source repository" +msgstr "מאגר המקורות" + +msgid "By" +msgstr "על ידי" + +msgid "By the" +msgstr "דרך" + +msgid "previous page" +msgstr "עמוד קודם" diff --git a/_static/locales/ja/LC_MESSAGES/booktheme.mo b/_static/locales/ja/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..1cefd29c Binary files /dev/null and b/_static/locales/ja/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ja/LC_MESSAGES/booktheme.po b/_static/locales/ja/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..c1a31489 --- /dev/null +++ b/_static/locales/ja/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ja\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "ソースファイルをダウンロード" + +msgid "suggest edit" +msgstr "編集を提案する" + +msgid "Toggle navigation" +msgstr "ナビゲーションを切り替え" + +msgid "open issue" +msgstr "未解決の問題" + +msgid "Download notebook file" +msgstr "ノートブックファイルをダウンロード" + +msgid "repository" +msgstr "リポジトリ" + +msgid "Theme by the" +msgstr "のテーマ" + +msgid "Print to PDF" +msgstr "PDFに印刷" + +msgid "Download this page" +msgstr "このページをダウンロード" + +msgid "Copyright" +msgstr "Copyright" + +msgid "Last updated on" +msgstr "最終更新日" + +msgid "Launch" +msgstr "起動" + +msgid "Open an issue" +msgstr "問題を報告" + +msgid "Fullscreen mode" +msgstr "全画面モード" + +msgid "Sphinx Book Theme" +msgstr "スフィンクスの本のテーマ" + +msgid "Contents" +msgstr "目次" + +msgid "Edit this page" +msgstr "このページを編集" + +msgid "next page" +msgstr "次のページ" + +msgid "Source repository" +msgstr "ソースリポジトリ" + +msgid "By" +msgstr "著者" + +msgid "By the" +msgstr "によって" + +msgid "previous page" +msgstr "前のページ" diff --git a/_static/locales/ko/LC_MESSAGES/booktheme.mo b/_static/locales/ko/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..06c7ec93 Binary files /dev/null and b/_static/locales/ko/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ko/LC_MESSAGES/booktheme.po b/_static/locales/ko/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..80142313 --- /dev/null +++ b/_static/locales/ko/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ko\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "소스 파일 다운로드" + +msgid "suggest edit" +msgstr "편집 제안" + +msgid "Toggle navigation" +msgstr "탐색 전환" + +msgid "open issue" +msgstr "열린 문제" + +msgid "Download notebook file" +msgstr "노트북 파일 다운로드" + +msgid "repository" +msgstr "저장소" + +msgid "Theme by the" +msgstr "테마별" + +msgid "Print to PDF" +msgstr "PDF로 인쇄" + +msgid "Download this page" +msgstr "이 페이지 다운로드" + +msgid "Copyright" +msgstr "저작권" + +msgid "Last updated on" +msgstr "마지막 업데이트" + +msgid "Launch" +msgstr "시작하다" + +msgid "Open an issue" +msgstr "이슈 열기" + +msgid "Fullscreen mode" +msgstr "전체 화면으로보기" + +msgid "Sphinx Book Theme" +msgstr "스핑크스 도서 테마" + +msgid "Contents" +msgstr "내용" + +msgid "Edit this page" +msgstr "이 페이지 편집" + +msgid "next page" +msgstr "다음 페이지" + +msgid "Source repository" +msgstr "소스 저장소" + +msgid "By" +msgstr "으로" + +msgid "By the" +msgstr "에 의해" + +msgid "previous page" +msgstr "이전 페이지" diff --git a/_static/locales/lt/LC_MESSAGES/booktheme.mo b/_static/locales/lt/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..4468ba04 Binary files /dev/null and b/_static/locales/lt/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/lt/LC_MESSAGES/booktheme.po b/_static/locales/lt/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..2e6915a9 --- /dev/null +++ b/_static/locales/lt/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: lt\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Atsisiųsti šaltinio failą" + +msgid "suggest edit" +msgstr "pasiūlyti redaguoti" + +msgid "Toggle navigation" +msgstr "Perjungti naršymą" + +msgid "open issue" +msgstr "atviras klausimas" + +msgid "Download notebook file" +msgstr "Atsisiųsti nešiojamojo kompiuterio failą" + +msgid "repository" +msgstr "saugykla" + +msgid "Theme by the" +msgstr "Tema" + +msgid "Print to PDF" +msgstr "Spausdinti į PDF" + +msgid "Download this page" +msgstr "Atsisiųskite šį puslapį" + +msgid "Copyright" +msgstr "Autorių teisės" + +msgid "Last updated on" +msgstr "Paskutinį kartą atnaujinta" + +msgid "Launch" +msgstr "Paleiskite" + +msgid "Open an issue" +msgstr "Atidarykite problemą" + +msgid "Fullscreen mode" +msgstr "Pilno ekrano režimas" + +msgid "Sphinx Book Theme" +msgstr "Sfinkso knygos tema" + +msgid "Contents" +msgstr "Turinys" + +msgid "Edit this page" +msgstr "Redaguoti šį puslapį" + +msgid "next page" +msgstr "Kitas puslapis" + +msgid "Source repository" +msgstr "Šaltinio saugykla" + +msgid "By" +msgstr "Iki" + +msgid "By the" +msgstr "Prie" + +msgid "previous page" +msgstr "Ankstesnis puslapis" diff --git a/_static/locales/lv/LC_MESSAGES/booktheme.mo b/_static/locales/lv/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..74aa4d89 Binary files /dev/null and b/_static/locales/lv/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/lv/LC_MESSAGES/booktheme.po b/_static/locales/lv/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..d4f4b150 --- /dev/null +++ b/_static/locales/lv/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: lv\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Lejupielādēt avota failu" + +msgid "suggest edit" +msgstr "ieteikt rediģēt" + +msgid "Toggle navigation" +msgstr "Pārslēgt navigāciju" + +msgid "open issue" +msgstr "atklāts jautājums" + +msgid "Download notebook file" +msgstr "Lejupielādēt piezīmju grāmatiņu" + +msgid "repository" +msgstr "krātuve" + +msgid "Theme by the" +msgstr "Autora tēma" + +msgid "Print to PDF" +msgstr "Drukāt PDF formātā" + +msgid "Download this page" +msgstr "Lejupielādējiet šo lapu" + +msgid "Copyright" +msgstr "Autortiesības" + +msgid "Last updated on" +msgstr "Pēdējoreiz atjaunināts" + +msgid "Launch" +msgstr "Uzsākt" + +msgid "Open an issue" +msgstr "Atveriet problēmu" + +msgid "Fullscreen mode" +msgstr "Pilnekrāna režīms" + +msgid "Sphinx Book Theme" +msgstr "Sfinksa grāmatas tēma" + +msgid "Contents" +msgstr "Saturs" + +msgid "Edit this page" +msgstr "Rediģēt šo lapu" + +msgid "next page" +msgstr "nākamā lapaspuse" + +msgid "Source repository" +msgstr "Avota krātuve" + +msgid "By" +msgstr "Autors" + +msgid "By the" +msgstr "Ar" + +msgid "previous page" +msgstr "iepriekšējā lapa" diff --git a/_static/locales/ml/LC_MESSAGES/booktheme.mo b/_static/locales/ml/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..2736e8fc Binary files /dev/null and b/_static/locales/ml/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ml/LC_MESSAGES/booktheme.po b/_static/locales/ml/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..5d8ed33b --- /dev/null +++ b/_static/locales/ml/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ml\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "ഉറവിട ഫയൽ ഡൗൺലോഡുചെയ്യുക" + +msgid "suggest edit" +msgstr "എഡിറ്റുചെയ്യാൻ നിർദ്ദേശിക്കുക" + +msgid "Toggle navigation" +msgstr "നാവിഗേഷൻ ടോഗിൾ ചെയ്യുക" + +msgid "open issue" +msgstr "തുറന്ന പ്രശ്നം" + +msgid "Download notebook file" +msgstr "നോട്ട്ബുക്ക് ഫയൽ ഡൺലോഡ് ചെയ്യുക" + +msgid "Theme by the" +msgstr "പ്രമേയം" + +msgid "Print to PDF" +msgstr "PDF- ലേക്ക് പ്രിന്റുചെയ്യുക" + +msgid "Download this page" +msgstr "ഈ പേജ് ഡൗൺലോഡുചെയ്യുക" + +msgid "Copyright" +msgstr "പകർപ്പവകാശം" + +msgid "Last updated on" +msgstr "അവസാനം അപ്‌ഡേറ്റുചെയ്‌തത്" + +msgid "Launch" +msgstr "സമാരംഭിക്കുക" + +msgid "Open an issue" +msgstr "ഒരു പ്രശ്നം തുറക്കുക" + +msgid "Sphinx Book Theme" +msgstr "സ്ഫിങ്ക്സ് പുസ്തക തീം" + +msgid "Edit this page" +msgstr "ഈ പേജ് എഡിറ്റുചെയ്യുക" + +msgid "next page" +msgstr "അടുത്ത പേജ്" + +msgid "Source repository" +msgstr "ഉറവിട ശേഖരം" + +msgid "By" +msgstr "എഴുതിയത്" + +msgid "By the" +msgstr "എഴുതിയത്" + +msgid "previous page" +msgstr "മുൻപത്തെ താൾ" diff --git a/_static/locales/mr/LC_MESSAGES/booktheme.mo b/_static/locales/mr/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..fe530100 Binary files /dev/null and b/_static/locales/mr/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/mr/LC_MESSAGES/booktheme.po b/_static/locales/mr/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..64389fac --- /dev/null +++ b/_static/locales/mr/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: mr\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "स्त्रोत फाइल डाउनलोड करा" + +msgid "suggest edit" +msgstr "संपादन सुचवा" + +msgid "Toggle navigation" +msgstr "नेव्हिगेशन टॉगल करा" + +msgid "open issue" +msgstr "खुला मुद्दा" + +msgid "Download notebook file" +msgstr "नोटबुक फाईल डाउनलोड करा" + +msgid "Theme by the" +msgstr "द्वारा थीम" + +msgid "Print to PDF" +msgstr "पीडीएफवर मुद्रित करा" + +msgid "Download this page" +msgstr "हे पृष्ठ डाउनलोड करा" + +msgid "Copyright" +msgstr "कॉपीराइट" + +msgid "Last updated on" +msgstr "अखेरचे अद्यतनित" + +msgid "Launch" +msgstr "लाँच करा" + +msgid "Open an issue" +msgstr "एक मुद्दा उघडा" + +msgid "Sphinx Book Theme" +msgstr "स्फिंक्स बुक थीम" + +msgid "Edit this page" +msgstr "हे पृष्ठ संपादित करा" + +msgid "next page" +msgstr "पुढील पृष्ठ" + +msgid "Source repository" +msgstr "स्त्रोत भांडार" + +msgid "By" +msgstr "द्वारा" + +msgid "By the" +msgstr "द्वारा" + +msgid "previous page" +msgstr "मागील पान" diff --git a/_static/locales/ms/LC_MESSAGES/booktheme.mo b/_static/locales/ms/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..f02603fa Binary files /dev/null and b/_static/locales/ms/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ms/LC_MESSAGES/booktheme.po b/_static/locales/ms/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..4365ff54 --- /dev/null +++ b/_static/locales/ms/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ms\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Muat turun fail sumber" + +msgid "suggest edit" +msgstr "cadangkan edit" + +msgid "Toggle navigation" +msgstr "Togol navigasi" + +msgid "open issue" +msgstr "isu terbuka" + +msgid "Download notebook file" +msgstr "Muat turun fail buku nota" + +msgid "Theme by the" +msgstr "Tema oleh" + +msgid "Print to PDF" +msgstr "Cetak ke PDF" + +msgid "Download this page" +msgstr "Muat turun halaman ini" + +msgid "Copyright" +msgstr "hak cipta" + +msgid "Last updated on" +msgstr "Terakhir dikemas kini pada" + +msgid "Launch" +msgstr "Lancarkan" + +msgid "Open an issue" +msgstr "Buka masalah" + +msgid "Sphinx Book Theme" +msgstr "Tema Buku Sphinx" + +msgid "Edit this page" +msgstr "Edit halaman ini" + +msgid "next page" +msgstr "muka surat seterusnya" + +msgid "Source repository" +msgstr "Repositori sumber" + +msgid "By" +msgstr "Oleh" + +msgid "By the" +msgstr "Oleh" + +msgid "previous page" +msgstr "halaman sebelumnya" diff --git a/_static/locales/nl/LC_MESSAGES/booktheme.mo b/_static/locales/nl/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..e59e7ecb Binary files /dev/null and b/_static/locales/nl/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/nl/LC_MESSAGES/booktheme.po b/_static/locales/nl/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..f7b18e5d --- /dev/null +++ b/_static/locales/nl/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: nl\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Download het bronbestand" + +msgid "suggest edit" +msgstr "suggereren bewerken" + +msgid "Toggle navigation" +msgstr "Schakel navigatie" + +msgid "open issue" +msgstr "open probleem" + +msgid "Download notebook file" +msgstr "Download notebookbestand" + +msgid "repository" +msgstr "repository" + +msgid "Theme by the" +msgstr "Thema door de" + +msgid "Print to PDF" +msgstr "Afdrukken naar pdf" + +msgid "Download this page" +msgstr "Download deze pagina" + +msgid "Copyright" +msgstr "auteursrechten" + +msgid "Last updated on" +msgstr "Laatst geupdate op" + +msgid "Launch" +msgstr "Lancering" + +msgid "Open an issue" +msgstr "Open een probleem" + +msgid "Fullscreen mode" +msgstr "Volledig scherm" + +msgid "Sphinx Book Theme" +msgstr "Sphinx-boekthema" + +msgid "Contents" +msgstr "Inhoud" + +msgid "Edit this page" +msgstr "bewerk deze pagina" + +msgid "next page" +msgstr "volgende bladzijde" + +msgid "Source repository" +msgstr "Bronopslagplaats" + +msgid "By" +msgstr "Door" + +msgid "By the" +msgstr "Door de" + +msgid "previous page" +msgstr "vorige pagina" diff --git a/_static/locales/no/LC_MESSAGES/booktheme.mo b/_static/locales/no/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..6cd15c88 Binary files /dev/null and b/_static/locales/no/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/no/LC_MESSAGES/booktheme.po b/_static/locales/no/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..274823a8 --- /dev/null +++ b/_static/locales/no/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: no\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Last ned kildefilen" + +msgid "suggest edit" +msgstr "foreslå redigering" + +msgid "Toggle navigation" +msgstr "Bytt navigasjon" + +msgid "open issue" +msgstr "åpent nummer" + +msgid "Download notebook file" +msgstr "Last ned notatbokfilen" + +msgid "repository" +msgstr "oppbevaringssted" + +msgid "Theme by the" +msgstr "Tema av" + +msgid "Print to PDF" +msgstr "Skriv ut til PDF" + +msgid "Download this page" +msgstr "Last ned denne siden" + +msgid "Copyright" +msgstr "opphavsrett" + +msgid "Last updated on" +msgstr "Sist oppdatert den" + +msgid "Launch" +msgstr "Start" + +msgid "Open an issue" +msgstr "Åpne et problem" + +msgid "Fullscreen mode" +msgstr "Fullskjerm-modus" + +msgid "Sphinx Book Theme" +msgstr "Sphinx boktema" + +msgid "Contents" +msgstr "Innhold" + +msgid "Edit this page" +msgstr "Rediger denne siden" + +msgid "next page" +msgstr "neste side" + +msgid "Source repository" +msgstr "Kildedepot" + +msgid "By" +msgstr "Av" + +msgid "By the" +msgstr "Ved" + +msgid "previous page" +msgstr "forrige side" diff --git a/_static/locales/pl/LC_MESSAGES/booktheme.mo b/_static/locales/pl/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..9ebb584f Binary files /dev/null and b/_static/locales/pl/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/pl/LC_MESSAGES/booktheme.po b/_static/locales/pl/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..dfba3f69 --- /dev/null +++ b/_static/locales/pl/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pl\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Pobierz plik źródłowy" + +msgid "suggest edit" +msgstr "zaproponuj edycję" + +msgid "Toggle navigation" +msgstr "Przełącz nawigację" + +msgid "open issue" +msgstr "otwarty problem" + +msgid "Download notebook file" +msgstr "Pobierz plik notatnika" + +msgid "repository" +msgstr "magazyn" + +msgid "Theme by the" +msgstr "Motyw autorstwa" + +msgid "Print to PDF" +msgstr "Drukuj do PDF" + +msgid "Download this page" +msgstr "Pobierz tę stronę" + +msgid "Copyright" +msgstr "prawa autorskie" + +msgid "Last updated on" +msgstr "Ostatnia aktualizacja" + +msgid "Launch" +msgstr "Uruchomić" + +msgid "Open an issue" +msgstr "Otwórz problem" + +msgid "Fullscreen mode" +msgstr "Pełny ekran" + +msgid "Sphinx Book Theme" +msgstr "Motyw książki Sphinx" + +msgid "Contents" +msgstr "Zawartość" + +msgid "Edit this page" +msgstr "Edytuj tę strone" + +msgid "next page" +msgstr "Następna strona" + +msgid "Source repository" +msgstr "Repozytorium źródłowe" + +msgid "By" +msgstr "Przez" + +msgid "By the" +msgstr "Przez" + +msgid "previous page" +msgstr "Poprzednia strona" diff --git a/_static/locales/pt/LC_MESSAGES/booktheme.mo b/_static/locales/pt/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..d0ddb872 Binary files /dev/null and b/_static/locales/pt/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/pt/LC_MESSAGES/booktheme.po b/_static/locales/pt/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..4c24eb9e --- /dev/null +++ b/_static/locales/pt/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pt\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Baixar arquivo fonte" + +msgid "suggest edit" +msgstr "sugerir edição" + +msgid "Toggle navigation" +msgstr "Alternar de navegação" + +msgid "open issue" +msgstr "questão aberta" + +msgid "Download notebook file" +msgstr "Baixar arquivo de notebook" + +msgid "repository" +msgstr "repositório" + +msgid "Theme by the" +msgstr "Tema por" + +msgid "Print to PDF" +msgstr "Imprimir em PDF" + +msgid "Download this page" +msgstr "Baixe esta página" + +msgid "Copyright" +msgstr "direito autoral" + +msgid "Last updated on" +msgstr "Última atualização em" + +msgid "Launch" +msgstr "Lançamento" + +msgid "Open an issue" +msgstr "Abra um problema" + +msgid "Fullscreen mode" +msgstr "Modo tela cheia" + +msgid "Sphinx Book Theme" +msgstr "Tema do livro Sphinx" + +msgid "Contents" +msgstr "Conteúdo" + +msgid "Edit this page" +msgstr "Edite essa página" + +msgid "next page" +msgstr "próxima página" + +msgid "Source repository" +msgstr "Repositório fonte" + +msgid "By" +msgstr "De" + +msgid "By the" +msgstr "Pelo" + +msgid "previous page" +msgstr "página anterior" diff --git a/_static/locales/ro/LC_MESSAGES/booktheme.mo b/_static/locales/ro/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..3c36ab1d Binary files /dev/null and b/_static/locales/ro/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ro/LC_MESSAGES/booktheme.po b/_static/locales/ro/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..5f03d9cd --- /dev/null +++ b/_static/locales/ro/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ro\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Descărcați fișierul sursă" + +msgid "suggest edit" +msgstr "sugerează editare" + +msgid "Toggle navigation" +msgstr "Comutare navigare" + +msgid "open issue" +msgstr "problema deschisă" + +msgid "Download notebook file" +msgstr "Descărcați fișierul notebook" + +msgid "repository" +msgstr "repertoriu" + +msgid "Theme by the" +msgstr "Tema de" + +msgid "Print to PDF" +msgstr "Imprimați în PDF" + +msgid "Download this page" +msgstr "Descarcă această pagină" + +msgid "Copyright" +msgstr "Drepturi de autor" + +msgid "Last updated on" +msgstr "Ultima actualizare la" + +msgid "Launch" +msgstr "Lansa" + +msgid "Open an issue" +msgstr "Deschideți o problemă" + +msgid "Fullscreen mode" +msgstr "Modul ecran întreg" + +msgid "Sphinx Book Theme" +msgstr "Tema Sphinx Book" + +msgid "Contents" +msgstr "Cuprins" + +msgid "Edit this page" +msgstr "Editați această pagină" + +msgid "next page" +msgstr "pagina următoare" + +msgid "Source repository" +msgstr "Depozit sursă" + +msgid "By" +msgstr "De" + +msgid "By the" +msgstr "Langa" + +msgid "previous page" +msgstr "pagina anterioară" diff --git a/_static/locales/ru/LC_MESSAGES/booktheme.mo b/_static/locales/ru/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..6b8ca41f Binary files /dev/null and b/_static/locales/ru/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ru/LC_MESSAGES/booktheme.po b/_static/locales/ru/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..2886570d --- /dev/null +++ b/_static/locales/ru/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ru\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Скачать исходный файл" + +msgid "suggest edit" +msgstr "предложить редактировать" + +msgid "Toggle navigation" +msgstr "Переключить навигацию" + +msgid "open issue" +msgstr "открытый вопрос" + +msgid "Download notebook file" +msgstr "Скачать файл записной книжки" + +msgid "repository" +msgstr "хранилище" + +msgid "Theme by the" +msgstr "Тема от" + +msgid "Print to PDF" +msgstr "Распечатать в PDF" + +msgid "Download this page" +msgstr "Загрузите эту страницу" + +msgid "Copyright" +msgstr "авторское право" + +msgid "Last updated on" +msgstr "Последнее обновление" + +msgid "Launch" +msgstr "Запуск" + +msgid "Open an issue" +msgstr "Открыть вопрос" + +msgid "Fullscreen mode" +msgstr "Полноэкранный режим" + +msgid "Sphinx Book Theme" +msgstr "Тема книги Сфинкс" + +msgid "Contents" +msgstr "Содержание" + +msgid "Edit this page" +msgstr "Редактировать эту страницу" + +msgid "next page" +msgstr "Следующая страница" + +msgid "Source repository" +msgstr "Исходный репозиторий" + +msgid "By" +msgstr "По" + +msgid "By the" +msgstr "Посредством" + +msgid "previous page" +msgstr "Предыдущая страница" diff --git a/_static/locales/sk/LC_MESSAGES/booktheme.mo b/_static/locales/sk/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..59bd0ddf Binary files /dev/null and b/_static/locales/sk/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/sk/LC_MESSAGES/booktheme.po b/_static/locales/sk/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..25517aa5 --- /dev/null +++ b/_static/locales/sk/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: sk\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Stiahnite si zdrojový súbor" + +msgid "suggest edit" +msgstr "navrhnúť úpravu" + +msgid "Toggle navigation" +msgstr "Prepnúť navigáciu" + +msgid "open issue" +msgstr "otvorené vydanie" + +msgid "Download notebook file" +msgstr "Stiahnite si zošit" + +msgid "repository" +msgstr "Úložisko" + +msgid "Theme by the" +msgstr "Téma od" + +msgid "Print to PDF" +msgstr "Tlač do PDF" + +msgid "Download this page" +msgstr "Stiahnite si túto stránku" + +msgid "Copyright" +msgstr "Autorské práva" + +msgid "Last updated on" +msgstr "Posledná aktualizácia dňa" + +msgid "Launch" +msgstr "Spustiť" + +msgid "Open an issue" +msgstr "Otvorte problém" + +msgid "Fullscreen mode" +msgstr "Režim celej obrazovky" + +msgid "Sphinx Book Theme" +msgstr "Téma knihy Sfinga" + +msgid "Contents" +msgstr "Obsah" + +msgid "Edit this page" +msgstr "Upraviť túto stránku" + +msgid "next page" +msgstr "ďalšia strana" + +msgid "Source repository" +msgstr "Zdrojové úložisko" + +msgid "By" +msgstr "Autor:" + +msgid "By the" +msgstr "Podľa" + +msgid "previous page" +msgstr "predchádzajúca strana" diff --git a/_static/locales/sl/LC_MESSAGES/booktheme.mo b/_static/locales/sl/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..87bf26de Binary files /dev/null and b/_static/locales/sl/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/sl/LC_MESSAGES/booktheme.po b/_static/locales/sl/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..346c3648 --- /dev/null +++ b/_static/locales/sl/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: sl\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Prenesite izvorno datoteko" + +msgid "suggest edit" +msgstr "predlagajte urejanje" + +msgid "Toggle navigation" +msgstr "Preklopi navigacijo" + +msgid "open issue" +msgstr "odprto vprašanje" + +msgid "Download notebook file" +msgstr "Prenesite datoteko zvezka" + +msgid "repository" +msgstr "odlagališče" + +msgid "Theme by the" +msgstr "Tema avtorja" + +msgid "Print to PDF" +msgstr "Natisni v PDF" + +msgid "Download this page" +msgstr "Prenesite to stran" + +msgid "Copyright" +msgstr "avtorske pravice" + +msgid "Last updated on" +msgstr "Nazadnje posodobljeno dne" + +msgid "Launch" +msgstr "Kosilo" + +msgid "Open an issue" +msgstr "Odprite številko" + +msgid "Fullscreen mode" +msgstr "Celozaslonski način" + +msgid "Sphinx Book Theme" +msgstr "Tema knjige Sphinx" + +msgid "Contents" +msgstr "Vsebina" + +msgid "Edit this page" +msgstr "Uredite to stran" + +msgid "next page" +msgstr "Naslednja stran" + +msgid "Source repository" +msgstr "Izvorno skladišče" + +msgid "By" +msgstr "Avtor" + +msgid "By the" +msgstr "Avtor" + +msgid "previous page" +msgstr "Prejšnja stran" diff --git a/_static/locales/sr/LC_MESSAGES/booktheme.mo b/_static/locales/sr/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..ec740f48 Binary files /dev/null and b/_static/locales/sr/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/sr/LC_MESSAGES/booktheme.po b/_static/locales/sr/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..b1a97ada --- /dev/null +++ b/_static/locales/sr/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: sr\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Преузми изворну датотеку" + +msgid "suggest edit" +msgstr "предложи уређивање" + +msgid "Toggle navigation" +msgstr "Укључи / искључи навигацију" + +msgid "open issue" +msgstr "отворено издање" + +msgid "Download notebook file" +msgstr "Преузмите датотеку бележнице" + +msgid "repository" +msgstr "спремиште" + +msgid "Theme by the" +msgstr "Тхеме би" + +msgid "Print to PDF" +msgstr "Испис у ПДФ" + +msgid "Download this page" +msgstr "Преузмите ову страницу" + +msgid "Copyright" +msgstr "Ауторско право" + +msgid "Last updated on" +msgstr "Последње ажурирање" + +msgid "Launch" +msgstr "Лансирање" + +msgid "Open an issue" +msgstr "Отворите издање" + +msgid "Fullscreen mode" +msgstr "Режим целог екрана" + +msgid "Sphinx Book Theme" +msgstr "Тема књиге Спхинк" + +msgid "Contents" +msgstr "Садржај" + +msgid "Edit this page" +msgstr "Уредите ову страницу" + +msgid "next page" +msgstr "Следећа страна" + +msgid "Source repository" +msgstr "Изворно спремиште" + +msgid "By" +msgstr "Од стране" + +msgid "By the" +msgstr "Од" + +msgid "previous page" +msgstr "Претходна страница" diff --git a/_static/locales/sv/LC_MESSAGES/booktheme.mo b/_static/locales/sv/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..b07dc76f Binary files /dev/null and b/_static/locales/sv/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/sv/LC_MESSAGES/booktheme.po b/_static/locales/sv/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..8fc0146e --- /dev/null +++ b/_static/locales/sv/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: sv\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Ladda ner källfil" + +msgid "suggest edit" +msgstr "föreslå ändring" + +msgid "Toggle navigation" +msgstr "Växla navigering" + +msgid "open issue" +msgstr "öppna problemrapport" + +msgid "Download notebook file" +msgstr "Ladda ner notebook-fil" + +msgid "repository" +msgstr "repositorium" + +msgid "Theme by the" +msgstr "Tema av" + +msgid "Print to PDF" +msgstr "Skriv ut till PDF" + +msgid "Download this page" +msgstr "Ladda ner den här sidan" + +msgid "Copyright" +msgstr "Upphovsrätt" + +msgid "Last updated on" +msgstr "Senast uppdaterad den" + +msgid "Launch" +msgstr "Öppna" + +msgid "Open an issue" +msgstr "Öppna en problemrapport" + +msgid "Fullscreen mode" +msgstr "Fullskärmsläge" + +msgid "Sphinx Book Theme" +msgstr "Sphinx Boktema" + +msgid "Contents" +msgstr "Innehåll" + +msgid "Edit this page" +msgstr "Redigera den här sidan" + +msgid "next page" +msgstr "nästa sida" + +msgid "Source repository" +msgstr "Källkodsrepositorium" + +msgid "By" +msgstr "Av" + +msgid "By the" +msgstr "Av den" + +msgid "previous page" +msgstr "föregående sida" diff --git a/_static/locales/ta/LC_MESSAGES/booktheme.mo b/_static/locales/ta/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..29f52e1f Binary files /dev/null and b/_static/locales/ta/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ta/LC_MESSAGES/booktheme.po b/_static/locales/ta/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..116d7433 --- /dev/null +++ b/_static/locales/ta/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ta\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "மூல கோப்பைப் பதிவிறக்குக" + +msgid "suggest edit" +msgstr "திருத்த பரிந்துரைக்கவும்" + +msgid "Toggle navigation" +msgstr "வழிசெலுத்தலை நிலைமாற்று" + +msgid "open issue" +msgstr "திறந்த பிரச்சினை" + +msgid "Download notebook file" +msgstr "நோட்புக் கோப்பைப் பதிவிறக்கவும்" + +msgid "Theme by the" +msgstr "வழங்கிய தீம்" + +msgid "Print to PDF" +msgstr "PDF இல் அச்சிடுக" + +msgid "Download this page" +msgstr "இந்தப் பக்கத்தைப் பதிவிறக்கவும்" + +msgid "Copyright" +msgstr "பதிப்புரிமை" + +msgid "Last updated on" +msgstr "கடைசியாக புதுப்பிக்கப்பட்டது" + +msgid "Launch" +msgstr "தொடங்க" + +msgid "Open an issue" +msgstr "சிக்கலைத் திறக்கவும்" + +msgid "Sphinx Book Theme" +msgstr "ஸ்பிங்க்ஸ் புத்தக தீம்" + +msgid "Edit this page" +msgstr "இந்தப் பக்கத்தைத் திருத்தவும்" + +msgid "next page" +msgstr "அடுத்த பக்கம்" + +msgid "Source repository" +msgstr "மூல களஞ்சியம்" + +msgid "By" +msgstr "வழங்கியவர்" + +msgid "By the" +msgstr "மூலம்" + +msgid "previous page" +msgstr "முந்தைய பக்கம்" diff --git a/_static/locales/te/LC_MESSAGES/booktheme.mo b/_static/locales/te/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..0a5f4b46 Binary files /dev/null and b/_static/locales/te/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/te/LC_MESSAGES/booktheme.po b/_static/locales/te/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..635bdf96 --- /dev/null +++ b/_static/locales/te/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: te\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "మూల ఫైల్‌ను డౌన్‌లోడ్ చేయండి" + +msgid "suggest edit" +msgstr "సవరించమని సూచించండి" + +msgid "Toggle navigation" +msgstr "నావిగేషన్‌ను టోగుల్ చేయండి" + +msgid "open issue" +msgstr "ఓపెన్ ఇష్యూ" + +msgid "Download notebook file" +msgstr "నోట్బుక్ ఫైల్ను డౌన్లోడ్ చేయండి" + +msgid "Theme by the" +msgstr "ద్వారా థీమ్" + +msgid "Print to PDF" +msgstr "PDF కి ముద్రించండి" + +msgid "Download this page" +msgstr "ఈ పేజీని డౌన్‌లోడ్ చేయండి" + +msgid "Copyright" +msgstr "కాపీరైట్" + +msgid "Last updated on" +msgstr "చివరిగా నవీకరించబడింది" + +msgid "Launch" +msgstr "ప్రారంభించండి" + +msgid "Open an issue" +msgstr "సమస్యను తెరవండి" + +msgid "Sphinx Book Theme" +msgstr "సింహిక పుస్తక థీమ్" + +msgid "Edit this page" +msgstr "ఈ పేజీని సవరించండి" + +msgid "next page" +msgstr "తరువాతి పేజీ" + +msgid "Source repository" +msgstr "మూల రిపోజిటరీ" + +msgid "By" +msgstr "ద్వారా" + +msgid "By the" +msgstr "ద్వారా" + +msgid "previous page" +msgstr "ముందు పేజి" diff --git a/_static/locales/tg/LC_MESSAGES/booktheme.mo b/_static/locales/tg/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..b21c6c63 Binary files /dev/null and b/_static/locales/tg/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/tg/LC_MESSAGES/booktheme.po b/_static/locales/tg/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..a92c069a --- /dev/null +++ b/_static/locales/tg/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: tg\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Файли манбаъро зеркашӣ кунед" + +msgid "suggest edit" +msgstr "пешниҳод вироиш" + +msgid "Toggle navigation" +msgstr "Гузаришро иваз кунед" + +msgid "open issue" +msgstr "барориши кушод" + +msgid "Download notebook file" +msgstr "Файли дафтарро зеркашӣ кунед" + +msgid "repository" +msgstr "анбор" + +msgid "Theme by the" +msgstr "Мавзӯъи аз" + +msgid "Print to PDF" +msgstr "Чоп ба PDF" + +msgid "Download this page" +msgstr "Ин саҳифаро зеркашӣ кунед" + +msgid "Copyright" +msgstr "Ҳуқуқи муаллиф" + +msgid "Last updated on" +msgstr "Last навсозӣ дар" + +msgid "Launch" +msgstr "Оғоз" + +msgid "Open an issue" +msgstr "Масъаларо кушоед" + +msgid "Fullscreen mode" +msgstr "Ҳолати экрани пурра" + +msgid "Sphinx Book Theme" +msgstr "Сфинкс Мавзӯи китоб" + +msgid "Contents" +msgstr "Мундариҷа" + +msgid "Edit this page" +msgstr "Ин саҳифаро таҳрир кунед" + +msgid "next page" +msgstr "саҳифаи оянда" + +msgid "Source repository" +msgstr "Анбори манбаъ" + +msgid "By" +msgstr "Бо" + +msgid "By the" +msgstr "Бо" + +msgid "previous page" +msgstr "саҳифаи қаблӣ" diff --git a/_static/locales/th/LC_MESSAGES/booktheme.mo b/_static/locales/th/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..abede98a Binary files /dev/null and b/_static/locales/th/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/th/LC_MESSAGES/booktheme.po b/_static/locales/th/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..25d9837f --- /dev/null +++ b/_static/locales/th/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: th\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "ดาวน์โหลดไฟล์ต้นฉบับ" + +msgid "suggest edit" +msgstr "แนะนำแก้ไข" + +msgid "Toggle navigation" +msgstr "ไม่ต้องสลับช่องทาง" + +msgid "open issue" +msgstr "เปิดปัญหา" + +msgid "Download notebook file" +msgstr "ดาวน์โหลดไฟล์สมุดบันทึก" + +msgid "repository" +msgstr "ที่เก็บ" + +msgid "Theme by the" +msgstr "ธีมโดย" + +msgid "Print to PDF" +msgstr "พิมพ์เป็น PDF" + +msgid "Download this page" +msgstr "ดาวน์โหลดหน้านี้" + +msgid "Copyright" +msgstr "ลิขสิทธิ์" + +msgid "Last updated on" +msgstr "ปรับปรุงล่าสุดเมื่อ" + +msgid "Launch" +msgstr "เปิด" + +msgid "Open an issue" +msgstr "เปิดปัญหา" + +msgid "Fullscreen mode" +msgstr "โหมดเต็มหน้าจอ" + +msgid "Sphinx Book Theme" +msgstr "ธีมหนังสือสฟิงซ์" + +msgid "Contents" +msgstr "สารบัญ" + +msgid "Edit this page" +msgstr "แก้ไขหน้านี้" + +msgid "next page" +msgstr "หน้าต่อไป" + +msgid "Source repository" +msgstr "ที่เก็บซอร์ส" + +msgid "By" +msgstr "โดย" + +msgid "By the" +msgstr "โดย" + +msgid "previous page" +msgstr "หน้าที่แล้ว" diff --git a/_static/locales/tl/LC_MESSAGES/booktheme.mo b/_static/locales/tl/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..8df1b733 Binary files /dev/null and b/_static/locales/tl/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/tl/LC_MESSAGES/booktheme.po b/_static/locales/tl/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..7e28b05f --- /dev/null +++ b/_static/locales/tl/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: tl\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Mag-download ng file ng pinagmulan" + +msgid "suggest edit" +msgstr "iminumungkahi i-edit" + +msgid "Toggle navigation" +msgstr "I-toggle ang pag-navigate" + +msgid "open issue" +msgstr "bukas na isyu" + +msgid "Download notebook file" +msgstr "Mag-download ng file ng notebook" + +msgid "Theme by the" +msgstr "Tema ng" + +msgid "Print to PDF" +msgstr "I-print sa PDF" + +msgid "Download this page" +msgstr "I-download ang pahinang ito" + +msgid "Copyright" +msgstr "Copyright" + +msgid "Last updated on" +msgstr "Huling na-update noong" + +msgid "Launch" +msgstr "Ilunsad" + +msgid "Open an issue" +msgstr "Magbukas ng isyu" + +msgid "Sphinx Book Theme" +msgstr "Tema ng Sphinx Book" + +msgid "Edit this page" +msgstr "I-edit ang pahinang ito" + +msgid "next page" +msgstr "Susunod na pahina" + +msgid "Source repository" +msgstr "Pinagmulan ng imbakan" + +msgid "By" +msgstr "Ni" + +msgid "By the" +msgstr "Sa pamamagitan ng" + +msgid "previous page" +msgstr "Nakaraang pahina" diff --git a/_static/locales/tr/LC_MESSAGES/booktheme.mo b/_static/locales/tr/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..029ae18a Binary files /dev/null and b/_static/locales/tr/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/tr/LC_MESSAGES/booktheme.po b/_static/locales/tr/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..54d6f583 --- /dev/null +++ b/_static/locales/tr/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: tr\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Kaynak dosyayı indirin" + +msgid "suggest edit" +msgstr "düzenleme öner" + +msgid "Toggle navigation" +msgstr "Gezinmeyi değiştir" + +msgid "open issue" +msgstr "Açık konu" + +msgid "Download notebook file" +msgstr "Defter dosyasını indirin" + +msgid "repository" +msgstr "depo" + +msgid "Theme by the" +msgstr "Tarafından tema" + +msgid "Print to PDF" +msgstr "PDF olarak yazdır" + +msgid "Download this page" +msgstr "Bu sayfayı indirin" + +msgid "Copyright" +msgstr "Telif hakkı" + +msgid "Last updated on" +msgstr "Son güncelleme tarihi" + +msgid "Launch" +msgstr "Başlatmak" + +msgid "Open an issue" +msgstr "Bir sorunu açın" + +msgid "Fullscreen mode" +msgstr "Tam ekran modu" + +msgid "Sphinx Book Theme" +msgstr "Sfenks Kitap Teması" + +msgid "Contents" +msgstr "İçindekiler" + +msgid "Edit this page" +msgstr "Bu sayfayı düzenle" + +msgid "next page" +msgstr "sonraki Sayfa" + +msgid "Source repository" +msgstr "Kaynak kod deposu" + +msgid "By" +msgstr "Tarafından" + +msgid "By the" +msgstr "Tarafından" + +msgid "previous page" +msgstr "önceki sayfa" diff --git a/_static/locales/uk/LC_MESSAGES/booktheme.mo b/_static/locales/uk/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..16ab7890 Binary files /dev/null and b/_static/locales/uk/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/uk/LC_MESSAGES/booktheme.po b/_static/locales/uk/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..6ecaec67 --- /dev/null +++ b/_static/locales/uk/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: uk\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Завантажити вихідний файл" + +msgid "suggest edit" +msgstr "запропонувати редагувати" + +msgid "Toggle navigation" +msgstr "Переключити навігацію" + +msgid "open issue" +msgstr "відкритий випуск" + +msgid "Download notebook file" +msgstr "Завантажте файл блокнота" + +msgid "repository" +msgstr "сховище" + +msgid "Theme by the" +msgstr "Тема від" + +msgid "Print to PDF" +msgstr "Друк у форматі PDF" + +msgid "Download this page" +msgstr "Завантажте цю сторінку" + +msgid "Copyright" +msgstr "Авторське право" + +msgid "Last updated on" +msgstr "Останнє оновлення:" + +msgid "Launch" +msgstr "Запуск" + +msgid "Open an issue" +msgstr "Відкрийте випуск" + +msgid "Fullscreen mode" +msgstr "Повноекранний режим" + +msgid "Sphinx Book Theme" +msgstr "Тема книги \"Сфінкс\"" + +msgid "Contents" +msgstr "Зміст" + +msgid "Edit this page" +msgstr "Редагувати цю сторінку" + +msgid "next page" +msgstr "Наступна сторінка" + +msgid "Source repository" +msgstr "Джерело сховища" + +msgid "By" +msgstr "Автор" + +msgid "By the" +msgstr "По" + +msgid "previous page" +msgstr "Попередня сторінка" diff --git a/_static/locales/ur/LC_MESSAGES/booktheme.mo b/_static/locales/ur/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..de8c84b9 Binary files /dev/null and b/_static/locales/ur/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/ur/LC_MESSAGES/booktheme.po b/_static/locales/ur/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..818e03de --- /dev/null +++ b/_static/locales/ur/LC_MESSAGES/booktheme.po @@ -0,0 +1,66 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ur\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "سورس فائل ڈاؤن لوڈ کریں" + +msgid "suggest edit" +msgstr "ترمیم کی تجویز کریں" + +msgid "Toggle navigation" +msgstr "نیویگیشن ٹوگل کریں" + +msgid "open issue" +msgstr "کھلا مسئلہ" + +msgid "Download notebook file" +msgstr "نوٹ بک فائل ڈاؤن لوڈ کریں" + +msgid "Theme by the" +msgstr "کے ذریعہ تھیم" + +msgid "Print to PDF" +msgstr "پی ڈی ایف پرنٹ کریں" + +msgid "Download this page" +msgstr "اس صفحے کو ڈاؤن لوڈ کریں" + +msgid "Copyright" +msgstr "کاپی رائٹ" + +msgid "Last updated on" +msgstr "آخری بار تازہ کاری ہوئی" + +msgid "Launch" +msgstr "لانچ کریں" + +msgid "Open an issue" +msgstr "ایک مسئلہ کھولیں" + +msgid "Sphinx Book Theme" +msgstr "سپنکس بک تھیم" + +msgid "Edit this page" +msgstr "اس صفحے میں ترمیم کریں" + +msgid "next page" +msgstr "اگلا صفحہ" + +msgid "Source repository" +msgstr "ماخذ ذخیرہ" + +msgid "By" +msgstr "بذریعہ" + +msgid "By the" +msgstr "کی طرف" + +msgid "previous page" +msgstr "سابقہ ​​صفحہ" diff --git a/_static/locales/vi/LC_MESSAGES/booktheme.mo b/_static/locales/vi/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..2bb32555 Binary files /dev/null and b/_static/locales/vi/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/vi/LC_MESSAGES/booktheme.po b/_static/locales/vi/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..59db2669 --- /dev/null +++ b/_static/locales/vi/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: vi\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "Tải xuống tệp nguồn" + +msgid "suggest edit" +msgstr "đề nghị chỉnh sửa" + +msgid "Toggle navigation" +msgstr "Chuyển đổi điều hướng thành" + +msgid "open issue" +msgstr "vấn đề mở" + +msgid "Download notebook file" +msgstr "Tải xuống tệp sổ tay" + +msgid "repository" +msgstr "kho" + +msgid "Theme by the" +msgstr "Chủ đề của" + +msgid "Print to PDF" +msgstr "In sang PDF" + +msgid "Download this page" +msgstr "Tải xuống trang này" + +msgid "Copyright" +msgstr "Bản quyền" + +msgid "Last updated on" +msgstr "Cập nhật lần cuối vào" + +msgid "Launch" +msgstr "Phóng" + +msgid "Open an issue" +msgstr "Mở một vấn đề" + +msgid "Fullscreen mode" +msgstr "Chế độ toàn màn hình" + +msgid "Sphinx Book Theme" +msgstr "Chủ đề sách nhân sư" + +msgid "Contents" +msgstr "Nội dung" + +msgid "Edit this page" +msgstr "chỉnh sửa trang này" + +msgid "next page" +msgstr "Trang tiếp theo" + +msgid "Source repository" +msgstr "Kho nguồn" + +msgid "By" +msgstr "Bởi" + +msgid "By the" +msgstr "Bằng" + +msgid "previous page" +msgstr "trang trước" diff --git a/_static/locales/zh_CN/LC_MESSAGES/booktheme.mo b/_static/locales/zh_CN/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..0e3235d0 Binary files /dev/null and b/_static/locales/zh_CN/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/zh_CN/LC_MESSAGES/booktheme.po b/_static/locales/zh_CN/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..e8d12dd7 --- /dev/null +++ b/_static/locales/zh_CN/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: zh_CN\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "下载源文件" + +msgid "suggest edit" +msgstr "提出修改建议" + +msgid "Toggle navigation" +msgstr "显示或隐藏导航栏" + +msgid "open issue" +msgstr "创建议题" + +msgid "Download notebook file" +msgstr "下载笔记本文件" + +msgid "repository" +msgstr "仓库" + +msgid "Theme by the" +msgstr "主题作者:" + +msgid "Print to PDF" +msgstr "列印成 PDF" + +msgid "Download this page" +msgstr "下载此页面" + +msgid "Copyright" +msgstr "版权" + +msgid "Last updated on" +msgstr "上次更新时间:" + +msgid "Launch" +msgstr "启动" + +msgid "Open an issue" +msgstr "创建议题" + +msgid "Fullscreen mode" +msgstr "全屏模式" + +msgid "Sphinx Book Theme" +msgstr "Sphinx Book 主题" + +msgid "Contents" +msgstr "目录" + +msgid "Edit this page" +msgstr "编辑此页面" + +msgid "next page" +msgstr "下一页" + +msgid "Source repository" +msgstr "源码库" + +msgid "By" +msgstr "作者:" + +msgid "By the" +msgstr "作者:" + +msgid "previous page" +msgstr "上一页" diff --git a/_static/locales/zh_TW/LC_MESSAGES/booktheme.mo b/_static/locales/zh_TW/LC_MESSAGES/booktheme.mo new file mode 100644 index 00000000..9116fa95 Binary files /dev/null and b/_static/locales/zh_TW/LC_MESSAGES/booktheme.mo differ diff --git a/_static/locales/zh_TW/LC_MESSAGES/booktheme.po b/_static/locales/zh_TW/LC_MESSAGES/booktheme.po new file mode 100644 index 00000000..0ed32f74 --- /dev/null +++ b/_static/locales/zh_TW/LC_MESSAGES/booktheme.po @@ -0,0 +1,75 @@ + +msgid "" +msgstr "" +"Project-Id-Version: Sphinx-Book-Theme\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: zh_TW\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Download source file" +msgstr "下載原始檔" + +msgid "suggest edit" +msgstr "提出修改建議" + +msgid "Toggle navigation" +msgstr "顯示或隱藏導覽列" + +msgid "open issue" +msgstr "公開的問題" + +msgid "Download notebook file" +msgstr "下載 Notebook 檔案" + +msgid "repository" +msgstr "儲存庫" + +msgid "Theme by the" +msgstr "佈景主題作者:" + +msgid "Print to PDF" +msgstr "列印成 PDF" + +msgid "Download this page" +msgstr "下載此頁面" + +msgid "Copyright" +msgstr "Copyright" + +msgid "Last updated on" +msgstr "最後更新時間:" + +msgid "Launch" +msgstr "啟動" + +msgid "Open an issue" +msgstr "開啟議題" + +msgid "Fullscreen mode" +msgstr "全螢幕模式" + +msgid "Sphinx Book Theme" +msgstr "Sphinx Book 佈景主題" + +msgid "Contents" +msgstr "目錄" + +msgid "Edit this page" +msgstr "編輯此頁面" + +msgid "next page" +msgstr "下一頁" + +msgid "Source repository" +msgstr "來源儲存庫" + +msgid "By" +msgstr "作者:" + +msgid "By the" +msgstr "作者:" + +msgid "previous page" +msgstr "上一頁" diff --git a/_static/logo.png b/_static/logo.png new file mode 100644 index 00000000..1e902eed Binary files /dev/null and b/_static/logo.png differ diff --git a/_static/pygments.css b/_static/pygments.css index dd6621d8..997797f2 100644 --- a/_static/pygments.css +++ b/_static/pygments.css @@ -1,77 +1,152 @@ -.highlight .hll { background-color: #ffffcc } -.highlight { background: #f8f8f8; } -.highlight .c { color: #8f5902; font-style: italic } /* Comment */ -.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */ -.highlight .g { color: #000000 } /* Generic */ -.highlight .k { color: #004461; font-weight: bold } /* Keyword */ -.highlight .l { color: #000000 } /* Literal */ -.highlight .n { color: #000000 } /* Name */ -.highlight .o { color: #582800 } /* Operator */ -.highlight .x { color: #000000 } /* Other */ -.highlight .p { color: #000000; font-weight: bold } /* Punctuation */ -.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */ -.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */ -.highlight .cp { color: #8f5902 } /* Comment.Preproc */ -.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */ -.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */ -.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */ -.highlight .gd { color: #a40000 } /* Generic.Deleted */ -.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */ -.highlight .gr { color: #ef2929 } /* Generic.Error */ -.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ -.highlight .gi { color: #00A000 } /* Generic.Inserted */ -.highlight .go { color: #888888 } /* Generic.Output */ -.highlight .gp { color: #745334 } /* Generic.Prompt */ -.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */ -.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ -.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */ -.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */ -.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */ -.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */ -.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */ -.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */ -.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */ -.highlight .ld { color: #000000 } /* Literal.Date */ -.highlight .m { color: #990000 } /* Literal.Number */ -.highlight .s { color: #4e9a06 } /* Literal.String */ -.highlight .na { color: #c4a000 } /* Name.Attribute */ -.highlight .nb { color: #004461 } /* Name.Builtin */ -.highlight .nc { color: #000000 } /* Name.Class */ -.highlight .no { color: #000000 } /* Name.Constant */ -.highlight .nd { color: #888888 } /* Name.Decorator */ -.highlight .ni { color: #ce5c00 } /* Name.Entity */ -.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */ -.highlight .nf { color: #000000 } /* Name.Function */ -.highlight .nl { color: #f57900 } /* Name.Label */ -.highlight .nn { color: #000000 } /* Name.Namespace */ -.highlight .nx { color: #000000 } /* Name.Other */ -.highlight .py { color: #000000 } /* Name.Property */ -.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */ -.highlight .nv { color: #000000 } /* Name.Variable */ -.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */ -.highlight .w { color: #f8f8f8; text-decoration: underline } /* Text.Whitespace */ -.highlight .mb { color: #990000 } /* Literal.Number.Bin */ -.highlight .mf { color: #990000 } /* Literal.Number.Float */ -.highlight .mh { color: #990000 } /* Literal.Number.Hex */ -.highlight .mi { color: #990000 } /* Literal.Number.Integer */ -.highlight .mo { color: #990000 } /* Literal.Number.Oct */ -.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */ -.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */ -.highlight .sc { color: #4e9a06 } /* Literal.String.Char */ -.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */ -.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */ -.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */ -.highlight .se { color: #4e9a06 } /* Literal.String.Escape */ -.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */ -.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */ -.highlight .sx { color: #4e9a06 } /* Literal.String.Other */ -.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */ -.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */ -.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */ -.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */ -.highlight .fm { color: #000000 } /* Name.Function.Magic */ -.highlight .vc { color: #000000 } /* Name.Variable.Class */ -.highlight .vg { color: #000000 } /* Name.Variable.Global */ -.highlight .vi { color: #000000 } /* Name.Variable.Instance */ -.highlight .vm { color: #000000 } /* Name.Variable.Magic */ -.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */ \ No newline at end of file +html[data-theme="light"] .highlight pre { line-height: 125%; } +html[data-theme="light"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="light"] .highlight .hll { background-color: #7971292e } +html[data-theme="light"] .highlight { background: #fefefe; color: #545454 } +html[data-theme="light"] .highlight .c { color: #797129 } /* Comment */ +html[data-theme="light"] .highlight .err { color: #d91e18 } /* Error */ +html[data-theme="light"] .highlight .k { color: #7928a1 } /* Keyword */ +html[data-theme="light"] .highlight .l { color: #797129 } /* Literal */ +html[data-theme="light"] .highlight .n { color: #545454 } /* Name */ +html[data-theme="light"] .highlight .o { color: #008000 } /* Operator */ +html[data-theme="light"] .highlight .p { color: #545454 } /* Punctuation */ +html[data-theme="light"] .highlight .ch { color: #797129 } /* Comment.Hashbang */ +html[data-theme="light"] .highlight .cm { color: #797129 } /* Comment.Multiline */ +html[data-theme="light"] .highlight .cp { color: #797129 } /* Comment.Preproc */ +html[data-theme="light"] .highlight .cpf { color: #797129 } /* Comment.PreprocFile */ +html[data-theme="light"] .highlight .c1 { color: #797129 } /* Comment.Single */ +html[data-theme="light"] .highlight .cs { color: #797129 } /* Comment.Special */ +html[data-theme="light"] .highlight .gd { color: #007faa } /* Generic.Deleted */ +html[data-theme="light"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="light"] .highlight .gh { color: #007faa } /* Generic.Heading */ +html[data-theme="light"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="light"] .highlight .gu { color: #007faa } /* Generic.Subheading */ +html[data-theme="light"] .highlight .kc { color: #7928a1 } /* Keyword.Constant */ +html[data-theme="light"] .highlight .kd { color: #7928a1 } /* Keyword.Declaration */ +html[data-theme="light"] .highlight .kn { color: #7928a1 } /* Keyword.Namespace */ +html[data-theme="light"] .highlight .kp { color: #7928a1 } /* Keyword.Pseudo */ +html[data-theme="light"] .highlight .kr { color: #7928a1 } /* Keyword.Reserved */ +html[data-theme="light"] .highlight .kt { color: #797129 } /* Keyword.Type */ +html[data-theme="light"] .highlight .ld { color: #797129 } /* Literal.Date */ +html[data-theme="light"] .highlight .m { color: #797129 } /* Literal.Number */ +html[data-theme="light"] .highlight .s { color: #008000 } /* Literal.String */ +html[data-theme="light"] .highlight .na { color: #797129 } /* Name.Attribute */ +html[data-theme="light"] .highlight .nb { color: #797129 } /* Name.Builtin */ +html[data-theme="light"] .highlight .nc { color: #007faa } /* Name.Class */ +html[data-theme="light"] .highlight .no { color: #007faa } /* Name.Constant */ +html[data-theme="light"] .highlight .nd { color: #797129 } /* Name.Decorator */ +html[data-theme="light"] .highlight .ni { color: #008000 } /* Name.Entity */ +html[data-theme="light"] .highlight .ne { color: #7928a1 } /* Name.Exception */ +html[data-theme="light"] .highlight .nf { color: #007faa } /* Name.Function */ +html[data-theme="light"] .highlight .nl { color: #797129 } /* Name.Label */ +html[data-theme="light"] .highlight .nn { color: #545454 } /* Name.Namespace */ +html[data-theme="light"] .highlight .nx { color: #545454 } /* Name.Other */ +html[data-theme="light"] .highlight .py { color: #007faa } /* Name.Property */ +html[data-theme="light"] .highlight .nt { color: #007faa } /* Name.Tag */ +html[data-theme="light"] .highlight .nv { color: #d91e18 } /* Name.Variable */ +html[data-theme="light"] .highlight .ow { color: #7928a1 } /* Operator.Word */ +html[data-theme="light"] .highlight .pm { color: #545454 } /* Punctuation.Marker */ +html[data-theme="light"] .highlight .w { color: #545454 } /* Text.Whitespace */ +html[data-theme="light"] .highlight .mb { color: #797129 } /* Literal.Number.Bin */ +html[data-theme="light"] .highlight .mf { color: #797129 } /* Literal.Number.Float */ +html[data-theme="light"] .highlight .mh { color: #797129 } /* Literal.Number.Hex */ +html[data-theme="light"] .highlight .mi { color: #797129 } /* Literal.Number.Integer */ +html[data-theme="light"] .highlight .mo { color: #797129 } /* Literal.Number.Oct */ +html[data-theme="light"] .highlight .sa { color: #008000 } /* Literal.String.Affix */ +html[data-theme="light"] .highlight .sb { color: #008000 } /* Literal.String.Backtick */ +html[data-theme="light"] .highlight .sc { color: #008000 } /* Literal.String.Char */ +html[data-theme="light"] .highlight .dl { color: #008000 } /* Literal.String.Delimiter */ +html[data-theme="light"] .highlight .sd { color: #008000 } /* Literal.String.Doc */ +html[data-theme="light"] .highlight .s2 { color: #008000 } /* Literal.String.Double */ +html[data-theme="light"] .highlight .se { color: #008000 } /* Literal.String.Escape */ +html[data-theme="light"] .highlight .sh { color: #008000 } /* Literal.String.Heredoc */ +html[data-theme="light"] .highlight .si { color: #008000 } /* Literal.String.Interpol */ +html[data-theme="light"] .highlight .sx { color: #008000 } /* Literal.String.Other */ +html[data-theme="light"] .highlight .sr { color: #d91e18 } /* Literal.String.Regex */ +html[data-theme="light"] .highlight .s1 { color: #008000 } /* Literal.String.Single */ +html[data-theme="light"] .highlight .ss { color: #007faa } /* Literal.String.Symbol */ +html[data-theme="light"] .highlight .bp { color: #797129 } /* Name.Builtin.Pseudo */ +html[data-theme="light"] .highlight .fm { color: #007faa } /* Name.Function.Magic */ +html[data-theme="light"] .highlight .vc { color: #d91e18 } /* Name.Variable.Class */ +html[data-theme="light"] .highlight .vg { color: #d91e18 } /* Name.Variable.Global */ +html[data-theme="light"] .highlight .vi { color: #d91e18 } /* Name.Variable.Instance */ +html[data-theme="light"] .highlight .vm { color: #797129 } /* Name.Variable.Magic */ +html[data-theme="light"] .highlight .il { color: #797129 } /* Literal.Number.Integer.Long */ +html[data-theme="dark"] .highlight pre { line-height: 125%; } +html[data-theme="dark"] .highlight td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +html[data-theme="dark"] .highlight .hll { background-color: #ffd9002e } +html[data-theme="dark"] .highlight { background: #2b2b2b; color: #f8f8f2 } +html[data-theme="dark"] .highlight .c { color: #ffd900 } /* Comment */ +html[data-theme="dark"] .highlight .err { color: #ffa07a } /* Error */ +html[data-theme="dark"] .highlight .k { color: #dcc6e0 } /* Keyword */ +html[data-theme="dark"] .highlight .l { color: #ffd900 } /* Literal */ +html[data-theme="dark"] .highlight .n { color: #f8f8f2 } /* Name */ +html[data-theme="dark"] .highlight .o { color: #abe338 } /* Operator */ +html[data-theme="dark"] .highlight .p { color: #f8f8f2 } /* Punctuation */ +html[data-theme="dark"] .highlight .ch { color: #ffd900 } /* Comment.Hashbang */ +html[data-theme="dark"] .highlight .cm { color: #ffd900 } /* Comment.Multiline */ +html[data-theme="dark"] .highlight .cp { color: #ffd900 } /* Comment.Preproc */ +html[data-theme="dark"] .highlight .cpf { color: #ffd900 } /* Comment.PreprocFile */ +html[data-theme="dark"] .highlight .c1 { color: #ffd900 } /* Comment.Single */ +html[data-theme="dark"] .highlight .cs { color: #ffd900 } /* Comment.Special */ +html[data-theme="dark"] .highlight .gd { color: #00e0e0 } /* Generic.Deleted */ +html[data-theme="dark"] .highlight .ge { font-style: italic } /* Generic.Emph */ +html[data-theme="dark"] .highlight .gh { color: #00e0e0 } /* Generic.Heading */ +html[data-theme="dark"] .highlight .gs { font-weight: bold } /* Generic.Strong */ +html[data-theme="dark"] .highlight .gu { color: #00e0e0 } /* Generic.Subheading */ +html[data-theme="dark"] .highlight .kc { color: #dcc6e0 } /* Keyword.Constant */ +html[data-theme="dark"] .highlight .kd { color: #dcc6e0 } /* Keyword.Declaration */ +html[data-theme="dark"] .highlight .kn { color: #dcc6e0 } /* Keyword.Namespace */ +html[data-theme="dark"] .highlight .kp { color: #dcc6e0 } /* Keyword.Pseudo */ +html[data-theme="dark"] .highlight .kr { color: #dcc6e0 } /* Keyword.Reserved */ +html[data-theme="dark"] .highlight .kt { color: #ffd900 } /* Keyword.Type */ +html[data-theme="dark"] .highlight .ld { color: #ffd900 } /* Literal.Date */ +html[data-theme="dark"] .highlight .m { color: #ffd900 } /* Literal.Number */ +html[data-theme="dark"] .highlight .s { color: #abe338 } /* Literal.String */ +html[data-theme="dark"] .highlight .na { color: #ffd900 } /* Name.Attribute */ +html[data-theme="dark"] .highlight .nb { color: #ffd900 } /* Name.Builtin */ +html[data-theme="dark"] .highlight .nc { color: #00e0e0 } /* Name.Class */ +html[data-theme="dark"] .highlight .no { color: #00e0e0 } /* Name.Constant */ +html[data-theme="dark"] .highlight .nd { color: #ffd900 } /* Name.Decorator */ +html[data-theme="dark"] .highlight .ni { color: #abe338 } /* Name.Entity */ +html[data-theme="dark"] .highlight .ne { color: #dcc6e0 } /* Name.Exception */ +html[data-theme="dark"] .highlight .nf { color: #00e0e0 } /* Name.Function */ +html[data-theme="dark"] .highlight .nl { color: #ffd900 } /* Name.Label */ +html[data-theme="dark"] .highlight .nn { color: #f8f8f2 } /* Name.Namespace */ +html[data-theme="dark"] .highlight .nx { color: #f8f8f2 } /* Name.Other */ +html[data-theme="dark"] .highlight .py { color: #00e0e0 } /* Name.Property */ +html[data-theme="dark"] .highlight .nt { color: #00e0e0 } /* Name.Tag */ +html[data-theme="dark"] .highlight .nv { color: #ffa07a } /* Name.Variable */ +html[data-theme="dark"] .highlight .ow { color: #dcc6e0 } /* Operator.Word */ +html[data-theme="dark"] .highlight .pm { color: #f8f8f2 } /* Punctuation.Marker */ +html[data-theme="dark"] .highlight .w { color: #f8f8f2 } /* Text.Whitespace */ +html[data-theme="dark"] .highlight .mb { color: #ffd900 } /* Literal.Number.Bin */ +html[data-theme="dark"] .highlight .mf { color: #ffd900 } /* Literal.Number.Float */ +html[data-theme="dark"] .highlight .mh { color: #ffd900 } /* Literal.Number.Hex */ +html[data-theme="dark"] .highlight .mi { color: #ffd900 } /* Literal.Number.Integer */ +html[data-theme="dark"] .highlight .mo { color: #ffd900 } /* Literal.Number.Oct */ +html[data-theme="dark"] .highlight .sa { color: #abe338 } /* Literal.String.Affix */ +html[data-theme="dark"] .highlight .sb { color: #abe338 } /* Literal.String.Backtick */ +html[data-theme="dark"] .highlight .sc { color: #abe338 } /* Literal.String.Char */ +html[data-theme="dark"] .highlight .dl { color: #abe338 } /* Literal.String.Delimiter */ +html[data-theme="dark"] .highlight .sd { color: #abe338 } /* Literal.String.Doc */ +html[data-theme="dark"] .highlight .s2 { color: #abe338 } /* Literal.String.Double */ +html[data-theme="dark"] .highlight .se { color: #abe338 } /* Literal.String.Escape */ +html[data-theme="dark"] .highlight .sh { color: #abe338 } /* Literal.String.Heredoc */ +html[data-theme="dark"] .highlight .si { color: #abe338 } /* Literal.String.Interpol */ +html[data-theme="dark"] .highlight .sx { color: #abe338 } /* Literal.String.Other */ +html[data-theme="dark"] .highlight .sr { color: #ffa07a } /* Literal.String.Regex */ +html[data-theme="dark"] .highlight .s1 { color: #abe338 } /* Literal.String.Single */ +html[data-theme="dark"] .highlight .ss { color: #00e0e0 } /* Literal.String.Symbol */ +html[data-theme="dark"] .highlight .bp { color: #ffd900 } /* Name.Builtin.Pseudo */ +html[data-theme="dark"] .highlight .fm { color: #00e0e0 } /* Name.Function.Magic */ +html[data-theme="dark"] .highlight .vc { color: #ffa07a } /* Name.Variable.Class */ +html[data-theme="dark"] .highlight .vg { color: #ffa07a } /* Name.Variable.Global */ +html[data-theme="dark"] .highlight .vi { color: #ffa07a } /* Name.Variable.Instance */ +html[data-theme="dark"] .highlight .vm { color: #ffd900 } /* Name.Variable.Magic */ +html[data-theme="dark"] .highlight .il { color: #ffd900 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/sbt-webpack-macros.html b/_static/sbt-webpack-macros.html new file mode 100644 index 00000000..6cbf559f --- /dev/null +++ b/_static/sbt-webpack-macros.html @@ -0,0 +1,11 @@ + +{% macro head_pre_bootstrap() %} + +{% endmacro %} + +{% macro body_post() %} + +{% endmacro %} diff --git a/_static/scripts/bootstrap.js b/_static/scripts/bootstrap.js new file mode 100644 index 00000000..4e209b0e --- /dev/null +++ b/_static/scripts/bootstrap.js @@ -0,0 +1,3 @@ +/*! For license information please see bootstrap.js.LICENSE.txt */ +(()=>{"use strict";var t={d:(e,i)=>{for(var n in i)t.o(i,n)&&!t.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:i[n]})},o:(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},e={};t.r(e),t.d(e,{afterMain:()=>E,afterRead:()=>v,afterWrite:()=>C,applyStyles:()=>$,arrow:()=>J,auto:()=>a,basePlacements:()=>l,beforeMain:()=>y,beforeRead:()=>_,beforeWrite:()=>A,bottom:()=>s,clippingParents:()=>d,computeStyles:()=>it,createPopper:()=>Dt,createPopperBase:()=>St,createPopperLite:()=>$t,detectOverflow:()=>_t,end:()=>h,eventListeners:()=>st,flip:()=>bt,hide:()=>wt,left:()=>r,main:()=>w,modifierPhases:()=>O,offset:()=>Et,placements:()=>g,popper:()=>f,popperGenerator:()=>Lt,popperOffsets:()=>At,preventOverflow:()=>Tt,read:()=>b,reference:()=>p,right:()=>o,start:()=>c,top:()=>n,variationPlacements:()=>m,viewport:()=>u,write:()=>T});var i={};t.r(i),t.d(i,{Alert:()=>Oe,Button:()=>ke,Carousel:()=>ri,Collapse:()=>yi,Dropdown:()=>Vi,Modal:()=>xn,Offcanvas:()=>Vn,Popover:()=>fs,ScrollSpy:()=>Ts,Tab:()=>Ks,Toast:()=>lo,Tooltip:()=>hs});var n="top",s="bottom",o="right",r="left",a="auto",l=[n,s,o,r],c="start",h="end",d="clippingParents",u="viewport",f="popper",p="reference",m=l.reduce((function(t,e){return t.concat([e+"-"+c,e+"-"+h])}),[]),g=[].concat(l,[a]).reduce((function(t,e){return t.concat([e,e+"-"+c,e+"-"+h])}),[]),_="beforeRead",b="read",v="afterRead",y="beforeMain",w="main",E="afterMain",A="beforeWrite",T="write",C="afterWrite",O=[_,b,v,y,w,E,A,T,C];function x(t){return t?(t.nodeName||"").toLowerCase():null}function k(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function L(t){return t instanceof k(t).Element||t instanceof Element}function S(t){return t instanceof k(t).HTMLElement||t instanceof HTMLElement}function D(t){return"undefined"!=typeof ShadowRoot&&(t instanceof k(t).ShadowRoot||t instanceof ShadowRoot)}const $={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];S(s)&&x(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});S(n)&&x(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function I(t){return t.split("-")[0]}var N=Math.max,P=Math.min,M=Math.round;function j(){var t=navigator.userAgentData;return null!=t&&t.brands&&Array.isArray(t.brands)?t.brands.map((function(t){return t.brand+"/"+t.version})).join(" "):navigator.userAgent}function F(){return!/^((?!chrome|android).)*safari/i.test(j())}function H(t,e,i){void 0===e&&(e=!1),void 0===i&&(i=!1);var n=t.getBoundingClientRect(),s=1,o=1;e&&S(t)&&(s=t.offsetWidth>0&&M(n.width)/t.offsetWidth||1,o=t.offsetHeight>0&&M(n.height)/t.offsetHeight||1);var r=(L(t)?k(t):window).visualViewport,a=!F()&&i,l=(n.left+(a&&r?r.offsetLeft:0))/s,c=(n.top+(a&&r?r.offsetTop:0))/o,h=n.width/s,d=n.height/o;return{width:h,height:d,top:c,right:l+h,bottom:c+d,left:l,x:l,y:c}}function B(t){var e=H(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function W(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&D(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function z(t){return k(t).getComputedStyle(t)}function R(t){return["table","td","th"].indexOf(x(t))>=0}function q(t){return((L(t)?t.ownerDocument:t.document)||window.document).documentElement}function V(t){return"html"===x(t)?t:t.assignedSlot||t.parentNode||(D(t)?t.host:null)||q(t)}function Y(t){return S(t)&&"fixed"!==z(t).position?t.offsetParent:null}function K(t){for(var e=k(t),i=Y(t);i&&R(i)&&"static"===z(i).position;)i=Y(i);return i&&("html"===x(i)||"body"===x(i)&&"static"===z(i).position)?e:i||function(t){var e=/firefox/i.test(j());if(/Trident/i.test(j())&&S(t)&&"fixed"===z(t).position)return null;var i=V(t);for(D(i)&&(i=i.host);S(i)&&["html","body"].indexOf(x(i))<0;){var n=z(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function Q(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}function X(t,e,i){return N(t,P(e,i))}function U(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function G(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const J={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,i=t.state,a=t.name,c=t.options,h=i.elements.arrow,d=i.modifiersData.popperOffsets,u=I(i.placement),f=Q(u),p=[r,o].indexOf(u)>=0?"height":"width";if(h&&d){var m=function(t,e){return U("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:G(t,l))}(c.padding,i),g=B(h),_="y"===f?n:r,b="y"===f?s:o,v=i.rects.reference[p]+i.rects.reference[f]-d[f]-i.rects.popper[p],y=d[f]-i.rects.reference[f],w=K(h),E=w?"y"===f?w.clientHeight||0:w.clientWidth||0:0,A=v/2-y/2,T=m[_],C=E-g[p]-m[b],O=E/2-g[p]/2+A,x=X(T,O,C),k=f;i.modifiersData[a]=((e={})[k]=x,e.centerOffset=x-O,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&W(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function Z(t){return t.split("-")[1]}var tt={top:"auto",right:"auto",bottom:"auto",left:"auto"};function et(t){var e,i=t.popper,a=t.popperRect,l=t.placement,c=t.variation,d=t.offsets,u=t.position,f=t.gpuAcceleration,p=t.adaptive,m=t.roundOffsets,g=t.isFixed,_=d.x,b=void 0===_?0:_,v=d.y,y=void 0===v?0:v,w="function"==typeof m?m({x:b,y}):{x:b,y};b=w.x,y=w.y;var E=d.hasOwnProperty("x"),A=d.hasOwnProperty("y"),T=r,C=n,O=window;if(p){var x=K(i),L="clientHeight",S="clientWidth";x===k(i)&&"static"!==z(x=q(i)).position&&"absolute"===u&&(L="scrollHeight",S="scrollWidth"),(l===n||(l===r||l===o)&&c===h)&&(C=s,y-=(g&&x===O&&O.visualViewport?O.visualViewport.height:x[L])-a.height,y*=f?1:-1),l!==r&&(l!==n&&l!==s||c!==h)||(T=o,b-=(g&&x===O&&O.visualViewport?O.visualViewport.width:x[S])-a.width,b*=f?1:-1)}var D,$=Object.assign({position:u},p&&tt),I=!0===m?function(t,e){var i=t.x,n=t.y,s=e.devicePixelRatio||1;return{x:M(i*s)/s||0,y:M(n*s)/s||0}}({x:b,y},k(i)):{x:b,y};return b=I.x,y=I.y,f?Object.assign({},$,((D={})[C]=A?"0":"",D[T]=E?"0":"",D.transform=(O.devicePixelRatio||1)<=1?"translate("+b+"px, "+y+"px)":"translate3d("+b+"px, "+y+"px, 0)",D)):Object.assign({},$,((e={})[C]=A?y+"px":"",e[T]=E?b+"px":"",e.transform="",e))}const it={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:I(e.placement),variation:Z(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s,isFixed:"fixed"===e.options.strategy};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,et(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,et(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var nt={passive:!0};const st={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=k(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,nt)})),a&&l.addEventListener("resize",i.update,nt),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,nt)})),a&&l.removeEventListener("resize",i.update,nt)}},data:{}};var ot={left:"right",right:"left",bottom:"top",top:"bottom"};function rt(t){return t.replace(/left|right|bottom|top/g,(function(t){return ot[t]}))}var at={start:"end",end:"start"};function lt(t){return t.replace(/start|end/g,(function(t){return at[t]}))}function ct(t){var e=k(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function ht(t){return H(q(t)).left+ct(t).scrollLeft}function dt(t){var e=z(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function ut(t){return["html","body","#document"].indexOf(x(t))>=0?t.ownerDocument.body:S(t)&&dt(t)?t:ut(V(t))}function ft(t,e){var i;void 0===e&&(e=[]);var n=ut(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=k(n),r=s?[o].concat(o.visualViewport||[],dt(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(ft(V(r)))}function pt(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function mt(t,e,i){return e===u?pt(function(t,e){var i=k(t),n=q(t),s=i.visualViewport,o=n.clientWidth,r=n.clientHeight,a=0,l=0;if(s){o=s.width,r=s.height;var c=F();(c||!c&&"fixed"===e)&&(a=s.offsetLeft,l=s.offsetTop)}return{width:o,height:r,x:a+ht(t),y:l}}(t,i)):L(e)?function(t,e){var i=H(t,!1,"fixed"===e);return i.top=i.top+t.clientTop,i.left=i.left+t.clientLeft,i.bottom=i.top+t.clientHeight,i.right=i.left+t.clientWidth,i.width=t.clientWidth,i.height=t.clientHeight,i.x=i.left,i.y=i.top,i}(e,i):pt(function(t){var e,i=q(t),n=ct(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=N(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=N(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+ht(t),l=-n.scrollTop;return"rtl"===z(s||i).direction&&(a+=N(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(q(t)))}function gt(t){var e,i=t.reference,a=t.element,l=t.placement,d=l?I(l):null,u=l?Z(l):null,f=i.x+i.width/2-a.width/2,p=i.y+i.height/2-a.height/2;switch(d){case n:e={x:f,y:i.y-a.height};break;case s:e={x:f,y:i.y+i.height};break;case o:e={x:i.x+i.width,y:p};break;case r:e={x:i.x-a.width,y:p};break;default:e={x:i.x,y:i.y}}var m=d?Q(d):null;if(null!=m){var g="y"===m?"height":"width";switch(u){case c:e[m]=e[m]-(i[g]/2-a[g]/2);break;case h:e[m]=e[m]+(i[g]/2-a[g]/2)}}return e}function _t(t,e){void 0===e&&(e={});var i=e,r=i.placement,a=void 0===r?t.placement:r,c=i.strategy,h=void 0===c?t.strategy:c,m=i.boundary,g=void 0===m?d:m,_=i.rootBoundary,b=void 0===_?u:_,v=i.elementContext,y=void 0===v?f:v,w=i.altBoundary,E=void 0!==w&&w,A=i.padding,T=void 0===A?0:A,C=U("number"!=typeof T?T:G(T,l)),O=y===f?p:f,k=t.rects.popper,D=t.elements[E?O:y],$=function(t,e,i,n){var s="clippingParents"===e?function(t){var e=ft(V(t)),i=["absolute","fixed"].indexOf(z(t).position)>=0&&S(t)?K(t):t;return L(i)?e.filter((function(t){return L(t)&&W(t,i)&&"body"!==x(t)})):[]}(t):[].concat(e),o=[].concat(s,[i]),r=o[0],a=o.reduce((function(e,i){var s=mt(t,i,n);return e.top=N(s.top,e.top),e.right=P(s.right,e.right),e.bottom=P(s.bottom,e.bottom),e.left=N(s.left,e.left),e}),mt(t,r,n));return a.width=a.right-a.left,a.height=a.bottom-a.top,a.x=a.left,a.y=a.top,a}(L(D)?D:D.contextElement||q(t.elements.popper),g,b,h),I=H(t.elements.reference),M=gt({reference:I,element:k,strategy:"absolute",placement:a}),j=pt(Object.assign({},k,M)),F=y===f?j:I,B={top:$.top-F.top+C.top,bottom:F.bottom-$.bottom+C.bottom,left:$.left-F.left+C.left,right:F.right-$.right+C.right},R=t.modifiersData.offset;if(y===f&&R){var Y=R[a];Object.keys(B).forEach((function(t){var e=[o,s].indexOf(t)>=0?1:-1,i=[n,s].indexOf(t)>=0?"y":"x";B[t]+=Y[i]*e}))}return B}const bt={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,h=t.name;if(!e.modifiersData[h]._skip){for(var d=i.mainAxis,u=void 0===d||d,f=i.altAxis,p=void 0===f||f,_=i.fallbackPlacements,b=i.padding,v=i.boundary,y=i.rootBoundary,w=i.altBoundary,E=i.flipVariations,A=void 0===E||E,T=i.allowedAutoPlacements,C=e.options.placement,O=I(C),x=_||(O!==C&&A?function(t){if(I(t)===a)return[];var e=rt(t);return[lt(t),e,lt(e)]}(C):[rt(C)]),k=[C].concat(x).reduce((function(t,i){return t.concat(I(i)===a?function(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,a=i.flipVariations,c=i.allowedAutoPlacements,h=void 0===c?g:c,d=Z(n),u=d?a?m:m.filter((function(t){return Z(t)===d})):l,f=u.filter((function(t){return h.indexOf(t)>=0}));0===f.length&&(f=u);var p=f.reduce((function(e,i){return e[i]=_t(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[I(i)],e}),{});return Object.keys(p).sort((function(t,e){return p[t]-p[e]}))}(e,{placement:i,boundary:v,rootBoundary:y,padding:b,flipVariations:A,allowedAutoPlacements:T}):i)}),[]),L=e.rects.reference,S=e.rects.popper,D=new Map,$=!0,N=k[0],P=0;P=0,B=H?"width":"height",W=_t(e,{placement:M,boundary:v,rootBoundary:y,altBoundary:w,padding:b}),z=H?F?o:r:F?s:n;L[B]>S[B]&&(z=rt(z));var R=rt(z),q=[];if(u&&q.push(W[j]<=0),p&&q.push(W[z]<=0,W[R]<=0),q.every((function(t){return t}))){N=M,$=!1;break}D.set(M,q)}if($)for(var V=function(t){var e=k.find((function(e){var i=D.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return N=e,"break"},Y=A?3:1;Y>0&&"break"!==V(Y);Y--);e.placement!==N&&(e.modifiersData[h]._skip=!0,e.placement=N,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function vt(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function yt(t){return[n,o,s,r].some((function(e){return t[e]>=0}))}const wt={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=_t(e,{elementContext:"reference"}),a=_t(e,{altBoundary:!0}),l=vt(r,n),c=vt(a,s,o),h=yt(l),d=yt(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:d},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":d})}},Et={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,i=t.options,s=t.name,a=i.offset,l=void 0===a?[0,0]:a,c=g.reduce((function(t,i){return t[i]=function(t,e,i){var s=I(t),a=[r,n].indexOf(s)>=0?-1:1,l="function"==typeof i?i(Object.assign({},e,{placement:t})):i,c=l[0],h=l[1];return c=c||0,h=(h||0)*a,[r,o].indexOf(s)>=0?{x:h,y:c}:{x:c,y:h}}(i,e.rects,l),t}),{}),h=c[e.placement],d=h.x,u=h.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=d,e.modifiersData.popperOffsets.y+=u),e.modifiersData[s]=c}},At={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=gt({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},Tt={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,a=t.name,l=i.mainAxis,h=void 0===l||l,d=i.altAxis,u=void 0!==d&&d,f=i.boundary,p=i.rootBoundary,m=i.altBoundary,g=i.padding,_=i.tether,b=void 0===_||_,v=i.tetherOffset,y=void 0===v?0:v,w=_t(e,{boundary:f,rootBoundary:p,padding:g,altBoundary:m}),E=I(e.placement),A=Z(e.placement),T=!A,C=Q(E),O="x"===C?"y":"x",x=e.modifiersData.popperOffsets,k=e.rects.reference,L=e.rects.popper,S="function"==typeof y?y(Object.assign({},e.rects,{placement:e.placement})):y,D="number"==typeof S?{mainAxis:S,altAxis:S}:Object.assign({mainAxis:0,altAxis:0},S),$=e.modifiersData.offset?e.modifiersData.offset[e.placement]:null,M={x:0,y:0};if(x){if(h){var j,F="y"===C?n:r,H="y"===C?s:o,W="y"===C?"height":"width",z=x[C],R=z+w[F],q=z-w[H],V=b?-L[W]/2:0,Y=A===c?k[W]:L[W],U=A===c?-L[W]:-k[W],G=e.elements.arrow,J=b&&G?B(G):{width:0,height:0},tt=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},et=tt[F],it=tt[H],nt=X(0,k[W],J[W]),st=T?k[W]/2-V-nt-et-D.mainAxis:Y-nt-et-D.mainAxis,ot=T?-k[W]/2+V+nt+it+D.mainAxis:U+nt+it+D.mainAxis,rt=e.elements.arrow&&K(e.elements.arrow),at=rt?"y"===C?rt.clientTop||0:rt.clientLeft||0:0,lt=null!=(j=null==$?void 0:$[C])?j:0,ct=z+ot-lt,ht=X(b?P(R,z+st-lt-at):R,z,b?N(q,ct):q);x[C]=ht,M[C]=ht-z}if(u){var dt,ut="x"===C?n:r,ft="x"===C?s:o,pt=x[O],mt="y"===O?"height":"width",gt=pt+w[ut],bt=pt-w[ft],vt=-1!==[n,r].indexOf(E),yt=null!=(dt=null==$?void 0:$[O])?dt:0,wt=vt?gt:pt-k[mt]-L[mt]-yt+D.altAxis,Et=vt?pt+k[mt]+L[mt]-yt-D.altAxis:bt,At=b&&vt?function(t,e,i){var n=X(t,e,i);return n>i?i:n}(wt,pt,Et):X(b?wt:gt,pt,b?Et:bt);x[O]=At,M[O]=At-pt}e.modifiersData[a]=M}},requiresIfExists:["offset"]};function Ct(t,e,i){void 0===i&&(i=!1);var n,s,o=S(e),r=S(e)&&function(t){var e=t.getBoundingClientRect(),i=M(e.width)/t.offsetWidth||1,n=M(e.height)/t.offsetHeight||1;return 1!==i||1!==n}(e),a=q(e),l=H(t,r,i),c={scrollLeft:0,scrollTop:0},h={x:0,y:0};return(o||!o&&!i)&&(("body"!==x(e)||dt(a))&&(c=(n=e)!==k(n)&&S(n)?{scrollLeft:(s=n).scrollLeft,scrollTop:s.scrollTop}:ct(n)),S(e)?((h=H(e,!0)).x+=e.clientLeft,h.y+=e.clientTop):a&&(h.x=ht(a))),{x:l.left+c.scrollLeft-h.x,y:l.top+c.scrollTop-h.y,width:l.width,height:l.height}}function Ot(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var xt={placement:"bottom",modifiers:[],strategy:"absolute"};function kt(){for(var t=arguments.length,e=new Array(t),i=0;iIt.has(t)&&It.get(t).get(e)||null,remove(t,e){if(!It.has(t))return;const i=It.get(t);i.delete(e),0===i.size&&It.delete(t)}},Pt="transitionend",Mt=t=>(t&&window.CSS&&window.CSS.escape&&(t=t.replace(/#([^\s"#']+)/g,((t,e)=>`#${CSS.escape(e)}`))),t),jt=t=>{t.dispatchEvent(new Event(Pt))},Ft=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),Ht=t=>Ft(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(Mt(t)):null,Bt=t=>{if(!Ft(t)||0===t.getClientRects().length)return!1;const e="visible"===getComputedStyle(t).getPropertyValue("visibility"),i=t.closest("details:not([open])");if(!i)return e;if(i!==t){const e=t.closest("summary");if(e&&e.parentNode!==i)return!1;if(null===e)return!1}return e},Wt=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),zt=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?zt(t.parentNode):null},Rt=()=>{},qt=t=>{t.offsetHeight},Vt=()=>window.jQuery&&!document.body.hasAttribute("data-bs-no-jquery")?window.jQuery:null,Yt=[],Kt=()=>"rtl"===document.documentElement.dir,Qt=t=>{var e;e=()=>{const e=Vt();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(Yt.length||document.addEventListener("DOMContentLoaded",(()=>{for(const t of Yt)t()})),Yt.push(e)):e()},Xt=(t,e=[],i=t)=>"function"==typeof t?t(...e):i,Ut=(t,e,i=!0)=>{if(!i)return void Xt(t);const n=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(e)+5;let s=!1;const o=({target:i})=>{i===e&&(s=!0,e.removeEventListener(Pt,o),Xt(t))};e.addEventListener(Pt,o),setTimeout((()=>{s||jt(e)}),n)},Gt=(t,e,i,n)=>{const s=t.length;let o=t.indexOf(e);return-1===o?!i&&n?t[s-1]:t[0]:(o+=i?1:-1,n&&(o=(o+s)%s),t[Math.max(0,Math.min(o,s-1))])},Jt=/[^.]*(?=\..*)\.|.*/,Zt=/\..*/,te=/::\d+$/,ee={};let ie=1;const ne={mouseenter:"mouseover",mouseleave:"mouseout"},se=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function oe(t,e){return e&&`${e}::${ie++}`||t.uidEvent||ie++}function re(t){const e=oe(t);return t.uidEvent=e,ee[e]=ee[e]||{},ee[e]}function ae(t,e,i=null){return Object.values(t).find((t=>t.callable===e&&t.delegationSelector===i))}function le(t,e,i){const n="string"==typeof e,s=n?i:e||i;let o=ue(t);return se.has(o)||(o=t),[n,s,o]}function ce(t,e,i,n,s){if("string"!=typeof e||!t)return;let[o,r,a]=le(e,i,n);if(e in ne){const t=t=>function(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};r=t(r)}const l=re(t),c=l[a]||(l[a]={}),h=ae(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const d=oe(r,e.replace(Jt,"")),u=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(const a of o)if(a===r)return pe(s,{delegateTarget:r}),n.oneOff&&fe.off(t,s.type,e,i),i.apply(r,[s])}}(t,i,r):function(t,e){return function i(n){return pe(n,{delegateTarget:t}),i.oneOff&&fe.off(t,n.type,e),e.apply(t,[n])}}(t,r);u.delegationSelector=o?i:null,u.callable=r,u.oneOff=s,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function he(t,e,i,n,s){const o=ae(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function de(t,e,i,n){const s=e[i]||{};for(const[o,r]of Object.entries(s))o.includes(n)&&he(t,e,i,r.callable,r.delegationSelector)}function ue(t){return t=t.replace(Zt,""),ne[t]||t}const fe={on(t,e,i,n){ce(t,e,i,n,!1)},one(t,e,i,n){ce(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=le(e,i,n),a=r!==e,l=re(t),c=l[r]||{},h=e.startsWith(".");if(void 0===o){if(h)for(const i of Object.keys(l))de(t,l,i,e.slice(1));for(const[i,n]of Object.entries(c)){const s=i.replace(te,"");a&&!e.includes(s)||he(t,l,r,n.callable,n.delegationSelector)}}else{if(!Object.keys(c).length)return;he(t,l,r,o,s?i:null)}},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=Vt();let s=null,o=!0,r=!0,a=!1;e!==ue(e)&&n&&(s=n.Event(e,i),n(t).trigger(s),o=!s.isPropagationStopped(),r=!s.isImmediatePropagationStopped(),a=s.isDefaultPrevented());const l=pe(new Event(e,{bubbles:o,cancelable:!0}),i);return a&&l.preventDefault(),r&&t.dispatchEvent(l),l.defaultPrevented&&s&&s.preventDefault(),l}};function pe(t,e={}){for(const[i,n]of Object.entries(e))try{t[i]=n}catch(e){Object.defineProperty(t,i,{configurable:!0,get:()=>n})}return t}function me(t){if("true"===t)return!0;if("false"===t)return!1;if(t===Number(t).toString())return Number(t);if(""===t||"null"===t)return null;if("string"!=typeof t)return t;try{return JSON.parse(decodeURIComponent(t))}catch(e){return t}}function ge(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}const _e={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${ge(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${ge(e)}`)},getDataAttributes(t){if(!t)return{};const e={},i=Object.keys(t.dataset).filter((t=>t.startsWith("bs")&&!t.startsWith("bsConfig")));for(const n of i){let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),e[i]=me(t.dataset[n])}return e},getDataAttribute:(t,e)=>me(t.getAttribute(`data-bs-${ge(e)}`))};class be{static get Default(){return{}}static get DefaultType(){return{}}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}_getConfig(t){return t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t}_mergeConfigObj(t,e){const i=Ft(e)?_e.getDataAttribute(e,"config"):{};return{...this.constructor.Default,..."object"==typeof i?i:{},...Ft(e)?_e.getDataAttributes(e):{},..."object"==typeof t?t:{}}}_typeCheckConfig(t,e=this.constructor.DefaultType){for(const[n,s]of Object.entries(e)){const e=t[n],o=Ft(e)?"element":null==(i=e)?`${i}`:Object.prototype.toString.call(i).match(/\s([a-z]+)/i)[1].toLowerCase();if(!new RegExp(s).test(o))throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option "${n}" provided type "${o}" but expected type "${s}".`)}var i}}class ve extends be{constructor(t,e){super(),(t=Ht(t))&&(this._element=t,this._config=this._getConfig(e),Nt.set(this._element,this.constructor.DATA_KEY,this))}dispose(){Nt.remove(this._element,this.constructor.DATA_KEY),fe.off(this._element,this.constructor.EVENT_KEY);for(const t of Object.getOwnPropertyNames(this))this[t]=null}_queueCallback(t,e,i=!0){Ut(t,e,i)}_getConfig(t){return t=this._mergeConfigObj(t,this._element),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}static getInstance(t){return Nt.get(Ht(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.3.2"}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}static eventName(t){return`${t}${this.EVENT_KEY}`}}const ye=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?Mt(i.trim()):null}return e},we={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode.closest(e);for(;n;)i.push(n),n=n.parentNode.closest(e);return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(",");return this.find(e,t).filter((t=>!Wt(t)&&Bt(t)))},getSelectorFromElement(t){const e=ye(t);return e&&we.findOne(e)?e:null},getElementFromSelector(t){const e=ye(t);return e?we.findOne(e):null},getMultipleElementsFromSelector(t){const e=ye(t);return e?we.find(e):[]}},Ee=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,n=t.NAME;fe.on(document,i,`[data-bs-dismiss="${n}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),Wt(this))return;const s=we.getElementFromSelector(this)||this.closest(`.${n}`);t.getOrCreateInstance(s)[e]()}))},Ae=".bs.alert",Te=`close${Ae}`,Ce=`closed${Ae}`;class Oe extends ve{static get NAME(){return"alert"}close(){if(fe.trigger(this._element,Te).defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),fe.trigger(this._element,Ce),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=Oe.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}Ee(Oe,"close"),Qt(Oe);const xe='[data-bs-toggle="button"]';class ke extends ve{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=ke.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}fe.on(document,"click.bs.button.data-api",xe,(t=>{t.preventDefault();const e=t.target.closest(xe);ke.getOrCreateInstance(e).toggle()})),Qt(ke);const Le=".bs.swipe",Se=`touchstart${Le}`,De=`touchmove${Le}`,$e=`touchend${Le}`,Ie=`pointerdown${Le}`,Ne=`pointerup${Le}`,Pe={endCallback:null,leftCallback:null,rightCallback:null},Me={endCallback:"(function|null)",leftCallback:"(function|null)",rightCallback:"(function|null)"};class je extends be{constructor(t,e){super(),this._element=t,t&&je.isSupported()&&(this._config=this._getConfig(e),this._deltaX=0,this._supportPointerEvents=Boolean(window.PointerEvent),this._initEvents())}static get Default(){return Pe}static get DefaultType(){return Me}static get NAME(){return"swipe"}dispose(){fe.off(this._element,Le)}_start(t){this._supportPointerEvents?this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX):this._deltaX=t.touches[0].clientX}_end(t){this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX-this._deltaX),this._handleSwipe(),Xt(this._config.endCallback)}_move(t){this._deltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this._deltaX}_handleSwipe(){const t=Math.abs(this._deltaX);if(t<=40)return;const e=t/this._deltaX;this._deltaX=0,e&&Xt(e>0?this._config.rightCallback:this._config.leftCallback)}_initEvents(){this._supportPointerEvents?(fe.on(this._element,Ie,(t=>this._start(t))),fe.on(this._element,Ne,(t=>this._end(t))),this._element.classList.add("pointer-event")):(fe.on(this._element,Se,(t=>this._start(t))),fe.on(this._element,De,(t=>this._move(t))),fe.on(this._element,$e,(t=>this._end(t))))}_eventIsPointerPenTouch(t){return this._supportPointerEvents&&("pen"===t.pointerType||"touch"===t.pointerType)}static isSupported(){return"ontouchstart"in document.documentElement||navigator.maxTouchPoints>0}}const Fe=".bs.carousel",He=".data-api",Be="next",We="prev",ze="left",Re="right",qe=`slide${Fe}`,Ve=`slid${Fe}`,Ye=`keydown${Fe}`,Ke=`mouseenter${Fe}`,Qe=`mouseleave${Fe}`,Xe=`dragstart${Fe}`,Ue=`load${Fe}${He}`,Ge=`click${Fe}${He}`,Je="carousel",Ze="active",ti=".active",ei=".carousel-item",ii=ti+ei,ni={ArrowLeft:Re,ArrowRight:ze},si={interval:5e3,keyboard:!0,pause:"hover",ride:!1,touch:!0,wrap:!0},oi={interval:"(number|boolean)",keyboard:"boolean",pause:"(string|boolean)",ride:"(boolean|string)",touch:"boolean",wrap:"boolean"};class ri extends ve{constructor(t,e){super(t,e),this._interval=null,this._activeElement=null,this._isSliding=!1,this.touchTimeout=null,this._swipeHelper=null,this._indicatorsElement=we.findOne(".carousel-indicators",this._element),this._addEventListeners(),this._config.ride===Je&&this.cycle()}static get Default(){return si}static get DefaultType(){return oi}static get NAME(){return"carousel"}next(){this._slide(Be)}nextWhenVisible(){!document.hidden&&Bt(this._element)&&this.next()}prev(){this._slide(We)}pause(){this._isSliding&&jt(this._element),this._clearInterval()}cycle(){this._clearInterval(),this._updateInterval(),this._interval=setInterval((()=>this.nextWhenVisible()),this._config.interval)}_maybeEnableCycle(){this._config.ride&&(this._isSliding?fe.one(this._element,Ve,(()=>this.cycle())):this.cycle())}to(t){const e=this._getItems();if(t>e.length-1||t<0)return;if(this._isSliding)return void fe.one(this._element,Ve,(()=>this.to(t)));const i=this._getItemIndex(this._getActive());if(i===t)return;const n=t>i?Be:We;this._slide(n,e[t])}dispose(){this._swipeHelper&&this._swipeHelper.dispose(),super.dispose()}_configAfterMerge(t){return t.defaultInterval=t.interval,t}_addEventListeners(){this._config.keyboard&&fe.on(this._element,Ye,(t=>this._keydown(t))),"hover"===this._config.pause&&(fe.on(this._element,Ke,(()=>this.pause())),fe.on(this._element,Qe,(()=>this._maybeEnableCycle()))),this._config.touch&&je.isSupported()&&this._addTouchEventListeners()}_addTouchEventListeners(){for(const t of we.find(".carousel-item img",this._element))fe.on(t,Xe,(t=>t.preventDefault()));const t={leftCallback:()=>this._slide(this._directionToOrder(ze)),rightCallback:()=>this._slide(this._directionToOrder(Re)),endCallback:()=>{"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((()=>this._maybeEnableCycle()),500+this._config.interval))}};this._swipeHelper=new je(this._element,t)}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=ni[t.key];e&&(t.preventDefault(),this._slide(this._directionToOrder(e)))}_getItemIndex(t){return this._getItems().indexOf(t)}_setActiveIndicatorElement(t){if(!this._indicatorsElement)return;const e=we.findOne(ti,this._indicatorsElement);e.classList.remove(Ze),e.removeAttribute("aria-current");const i=we.findOne(`[data-bs-slide-to="${t}"]`,this._indicatorsElement);i&&(i.classList.add(Ze),i.setAttribute("aria-current","true"))}_updateInterval(){const t=this._activeElement||this._getActive();if(!t)return;const e=Number.parseInt(t.getAttribute("data-bs-interval"),10);this._config.interval=e||this._config.defaultInterval}_slide(t,e=null){if(this._isSliding)return;const i=this._getActive(),n=t===Be,s=e||Gt(this._getItems(),i,n,this._config.wrap);if(s===i)return;const o=this._getItemIndex(s),r=e=>fe.trigger(this._element,e,{relatedTarget:s,direction:this._orderToDirection(t),from:this._getItemIndex(i),to:o});if(r(qe).defaultPrevented)return;if(!i||!s)return;const a=Boolean(this._interval);this.pause(),this._isSliding=!0,this._setActiveIndicatorElement(o),this._activeElement=s;const l=n?"carousel-item-start":"carousel-item-end",c=n?"carousel-item-next":"carousel-item-prev";s.classList.add(c),qt(s),i.classList.add(l),s.classList.add(l),this._queueCallback((()=>{s.classList.remove(l,c),s.classList.add(Ze),i.classList.remove(Ze,c,l),this._isSliding=!1,r(Ve)}),i,this._isAnimated()),a&&this.cycle()}_isAnimated(){return this._element.classList.contains("slide")}_getActive(){return we.findOne(ii,this._element)}_getItems(){return we.find(ei,this._element)}_clearInterval(){this._interval&&(clearInterval(this._interval),this._interval=null)}_directionToOrder(t){return Kt()?t===ze?We:Be:t===ze?Be:We}_orderToDirection(t){return Kt()?t===We?ze:Re:t===We?Re:ze}static jQueryInterface(t){return this.each((function(){const e=ri.getOrCreateInstance(this,t);if("number"!=typeof t){if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}else e.to(t)}))}}fe.on(document,Ge,"[data-bs-slide], [data-bs-slide-to]",(function(t){const e=we.getElementFromSelector(this);if(!e||!e.classList.contains(Je))return;t.preventDefault();const i=ri.getOrCreateInstance(e),n=this.getAttribute("data-bs-slide-to");return n?(i.to(n),void i._maybeEnableCycle()):"next"===_e.getDataAttribute(this,"slide")?(i.next(),void i._maybeEnableCycle()):(i.prev(),void i._maybeEnableCycle())})),fe.on(window,Ue,(()=>{const t=we.find('[data-bs-ride="carousel"]');for(const e of t)ri.getOrCreateInstance(e)})),Qt(ri);const ai=".bs.collapse",li=`show${ai}`,ci=`shown${ai}`,hi=`hide${ai}`,di=`hidden${ai}`,ui=`click${ai}.data-api`,fi="show",pi="collapse",mi="collapsing",gi=`:scope .${pi} .${pi}`,_i='[data-bs-toggle="collapse"]',bi={parent:null,toggle:!0},vi={parent:"(null|element)",toggle:"boolean"};class yi extends ve{constructor(t,e){super(t,e),this._isTransitioning=!1,this._triggerArray=[];const i=we.find(_i);for(const t of i){const e=we.getSelectorFromElement(t),i=we.find(e).filter((t=>t===this._element));null!==e&&i.length&&this._triggerArray.push(t)}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return bi}static get DefaultType(){return vi}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t=[];if(this._config.parent&&(t=this._getFirstLevelChildren(".collapse.show, .collapse.collapsing").filter((t=>t!==this._element)).map((t=>yi.getOrCreateInstance(t,{toggle:!1})))),t.length&&t[0]._isTransitioning)return;if(fe.trigger(this._element,li).defaultPrevented)return;for(const e of t)e.hide();const e=this._getDimension();this._element.classList.remove(pi),this._element.classList.add(mi),this._element.style[e]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const i=`scroll${e[0].toUpperCase()+e.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(mi),this._element.classList.add(pi,fi),this._element.style[e]="",fe.trigger(this._element,ci)}),this._element,!0),this._element.style[e]=`${this._element[i]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(fe.trigger(this._element,hi).defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,qt(this._element),this._element.classList.add(mi),this._element.classList.remove(pi,fi);for(const t of this._triggerArray){const e=we.getElementFromSelector(t);e&&!this._isShown(e)&&this._addAriaAndCollapsedClass([t],!1)}this._isTransitioning=!0,this._element.style[t]="",this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(mi),this._element.classList.add(pi),fe.trigger(this._element,di)}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(fi)}_configAfterMerge(t){return t.toggle=Boolean(t.toggle),t.parent=Ht(t.parent),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=this._getFirstLevelChildren(_i);for(const e of t){const t=we.getElementFromSelector(e);t&&this._addAriaAndCollapsedClass([e],this._isShown(t))}}_getFirstLevelChildren(t){const e=we.find(gi,this._config.parent);return we.find(t,this._config.parent).filter((t=>!e.includes(t)))}_addAriaAndCollapsedClass(t,e){if(t.length)for(const i of t)i.classList.toggle("collapsed",!e),i.setAttribute("aria-expanded",e)}static jQueryInterface(t){const e={};return"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1),this.each((function(){const i=yi.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}fe.on(document,ui,_i,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();for(const t of we.getMultipleElementsFromSelector(this))yi.getOrCreateInstance(t,{toggle:!1}).toggle()})),Qt(yi);const wi="dropdown",Ei=".bs.dropdown",Ai=".data-api",Ti="ArrowUp",Ci="ArrowDown",Oi=`hide${Ei}`,xi=`hidden${Ei}`,ki=`show${Ei}`,Li=`shown${Ei}`,Si=`click${Ei}${Ai}`,Di=`keydown${Ei}${Ai}`,$i=`keyup${Ei}${Ai}`,Ii="show",Ni='[data-bs-toggle="dropdown"]:not(.disabled):not(:disabled)',Pi=`${Ni}.${Ii}`,Mi=".dropdown-menu",ji=Kt()?"top-end":"top-start",Fi=Kt()?"top-start":"top-end",Hi=Kt()?"bottom-end":"bottom-start",Bi=Kt()?"bottom-start":"bottom-end",Wi=Kt()?"left-start":"right-start",zi=Kt()?"right-start":"left-start",Ri={autoClose:!0,boundary:"clippingParents",display:"dynamic",offset:[0,2],popperConfig:null,reference:"toggle"},qi={autoClose:"(boolean|string)",boundary:"(string|element)",display:"string",offset:"(array|string|function)",popperConfig:"(null|object|function)",reference:"(string|element|object)"};class Vi extends ve{constructor(t,e){super(t,e),this._popper=null,this._parent=this._element.parentNode,this._menu=we.next(this._element,Mi)[0]||we.prev(this._element,Mi)[0]||we.findOne(Mi,this._parent),this._inNavbar=this._detectNavbar()}static get Default(){return Ri}static get DefaultType(){return qi}static get NAME(){return wi}toggle(){return this._isShown()?this.hide():this.show()}show(){if(Wt(this._element)||this._isShown())return;const t={relatedTarget:this._element};if(!fe.trigger(this._element,ki,t).defaultPrevented){if(this._createPopper(),"ontouchstart"in document.documentElement&&!this._parent.closest(".navbar-nav"))for(const t of[].concat(...document.body.children))fe.on(t,"mouseover",Rt);this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(Ii),this._element.classList.add(Ii),fe.trigger(this._element,Li,t)}}hide(){if(Wt(this._element)||!this._isShown())return;const t={relatedTarget:this._element};this._completeHide(t)}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){if(!fe.trigger(this._element,Oi,t).defaultPrevented){if("ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.off(t,"mouseover",Rt);this._popper&&this._popper.destroy(),this._menu.classList.remove(Ii),this._element.classList.remove(Ii),this._element.setAttribute("aria-expanded","false"),_e.removeDataAttribute(this._menu,"popper"),fe.trigger(this._element,xi,t)}}_getConfig(t){if("object"==typeof(t=super._getConfig(t)).reference&&!Ft(t.reference)&&"function"!=typeof t.reference.getBoundingClientRect)throw new TypeError(`${wi.toUpperCase()}: Option "reference" provided type "object" without a required "getBoundingClientRect" method.`);return t}_createPopper(){if(void 0===e)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=this._parent:Ft(this._config.reference)?t=Ht(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);const i=this._getPopperConfig();this._popper=Dt(t,this._menu,i)}_isShown(){return this._menu.classList.contains(Ii)}_getPlacement(){const t=this._parent;if(t.classList.contains("dropend"))return Wi;if(t.classList.contains("dropstart"))return zi;if(t.classList.contains("dropup-center"))return"top";if(t.classList.contains("dropdown-center"))return"bottom";const e="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim();return t.classList.contains("dropup")?e?Fi:ji:e?Bi:Hi}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return(this._inNavbar||"static"===this._config.display)&&(_e.setDataAttribute(this._menu,"popper","static"),t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,...Xt(this._config.popperConfig,[t])}}_selectMenuItem({key:t,target:e}){const i=we.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter((t=>Bt(t)));i.length&&Gt(i,e,t===Ci,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=Vi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(2===t.button||"keyup"===t.type&&"Tab"!==t.key)return;const e=we.find(Pi);for(const i of e){const e=Vi.getInstance(i);if(!e||!1===e._config.autoClose)continue;const n=t.composedPath(),s=n.includes(e._menu);if(n.includes(e._element)||"inside"===e._config.autoClose&&!s||"outside"===e._config.autoClose&&s)continue;if(e._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName)))continue;const o={relatedTarget:e._element};"click"===t.type&&(o.clickEvent=t),e._completeHide(o)}}static dataApiKeydownHandler(t){const e=/input|textarea/i.test(t.target.tagName),i="Escape"===t.key,n=[Ti,Ci].includes(t.key);if(!n&&!i)return;if(e&&!i)return;t.preventDefault();const s=this.matches(Ni)?this:we.prev(this,Ni)[0]||we.next(this,Ni)[0]||we.findOne(Ni,t.delegateTarget.parentNode),o=Vi.getOrCreateInstance(s);if(n)return t.stopPropagation(),o.show(),void o._selectMenuItem(t);o._isShown()&&(t.stopPropagation(),o.hide(),s.focus())}}fe.on(document,Di,Ni,Vi.dataApiKeydownHandler),fe.on(document,Di,Mi,Vi.dataApiKeydownHandler),fe.on(document,Si,Vi.clearMenus),fe.on(document,$i,Vi.clearMenus),fe.on(document,Si,Ni,(function(t){t.preventDefault(),Vi.getOrCreateInstance(this).toggle()})),Qt(Vi);const Yi="backdrop",Ki="show",Qi=`mousedown.bs.${Yi}`,Xi={className:"modal-backdrop",clickCallback:null,isAnimated:!1,isVisible:!0,rootElement:"body"},Ui={className:"string",clickCallback:"(function|null)",isAnimated:"boolean",isVisible:"boolean",rootElement:"(element|string)"};class Gi extends be{constructor(t){super(),this._config=this._getConfig(t),this._isAppended=!1,this._element=null}static get Default(){return Xi}static get DefaultType(){return Ui}static get NAME(){return Yi}show(t){if(!this._config.isVisible)return void Xt(t);this._append();const e=this._getElement();this._config.isAnimated&&qt(e),e.classList.add(Ki),this._emulateAnimation((()=>{Xt(t)}))}hide(t){this._config.isVisible?(this._getElement().classList.remove(Ki),this._emulateAnimation((()=>{this.dispose(),Xt(t)}))):Xt(t)}dispose(){this._isAppended&&(fe.off(this._element,Qi),this._element.remove(),this._isAppended=!1)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_configAfterMerge(t){return t.rootElement=Ht(t.rootElement),t}_append(){if(this._isAppended)return;const t=this._getElement();this._config.rootElement.append(t),fe.on(t,Qi,(()=>{Xt(this._config.clickCallback)})),this._isAppended=!0}_emulateAnimation(t){Ut(t,this._getElement(),this._config.isAnimated)}}const Ji=".bs.focustrap",Zi=`focusin${Ji}`,tn=`keydown.tab${Ji}`,en="backward",nn={autofocus:!0,trapElement:null},sn={autofocus:"boolean",trapElement:"element"};class on extends be{constructor(t){super(),this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}static get Default(){return nn}static get DefaultType(){return sn}static get NAME(){return"focustrap"}activate(){this._isActive||(this._config.autofocus&&this._config.trapElement.focus(),fe.off(document,Ji),fe.on(document,Zi,(t=>this._handleFocusin(t))),fe.on(document,tn,(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,fe.off(document,Ji))}_handleFocusin(t){const{trapElement:e}=this._config;if(t.target===document||t.target===e||e.contains(t.target))return;const i=we.focusableChildren(e);0===i.length?e.focus():this._lastTabNavDirection===en?i[i.length-1].focus():i[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?en:"forward")}}const rn=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",an=".sticky-top",ln="padding-right",cn="margin-right";class hn{constructor(){this._element=document.body}getWidth(){const t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const t=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,ln,(e=>e+t)),this._setElementAttributes(rn,ln,(e=>e+t)),this._setElementAttributes(an,cn,(e=>e-t))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,ln),this._resetElementAttributes(rn,ln),this._resetElementAttributes(an,cn)}isOverflowing(){return this.getWidth()>0}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t).getPropertyValue(e);t.style.setProperty(e,`${i(Number.parseFloat(s))}px`)}))}_saveInitialAttribute(t,e){const i=t.style.getPropertyValue(e);i&&_e.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=_e.getDataAttribute(t,e);null!==i?(_e.removeDataAttribute(t,e),t.style.setProperty(e,i)):t.style.removeProperty(e)}))}_applyManipulationCallback(t,e){if(Ft(t))e(t);else for(const i of we.find(t,this._element))e(i)}}const dn=".bs.modal",un=`hide${dn}`,fn=`hidePrevented${dn}`,pn=`hidden${dn}`,mn=`show${dn}`,gn=`shown${dn}`,_n=`resize${dn}`,bn=`click.dismiss${dn}`,vn=`mousedown.dismiss${dn}`,yn=`keydown.dismiss${dn}`,wn=`click${dn}.data-api`,En="modal-open",An="show",Tn="modal-static",Cn={backdrop:!0,focus:!0,keyboard:!0},On={backdrop:"(boolean|string)",focus:"boolean",keyboard:"boolean"};class xn extends ve{constructor(t,e){super(t,e),this._dialog=we.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._isTransitioning=!1,this._scrollBar=new hn,this._addEventListeners()}static get Default(){return Cn}static get DefaultType(){return On}static get NAME(){return"modal"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||fe.trigger(this._element,mn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isTransitioning=!0,this._scrollBar.hide(),document.body.classList.add(En),this._adjustDialog(),this._backdrop.show((()=>this._showElement(t))))}hide(){this._isShown&&!this._isTransitioning&&(fe.trigger(this._element,un).defaultPrevented||(this._isShown=!1,this._isTransitioning=!0,this._focustrap.deactivate(),this._element.classList.remove(An),this._queueCallback((()=>this._hideModal()),this._element,this._isAnimated())))}dispose(){fe.off(window,dn),fe.off(this._dialog,dn),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Gi({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new on({trapElement:this._element})}_showElement(t){document.body.contains(this._element)||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0;const e=we.findOne(".modal-body",this._dialog);e&&(e.scrollTop=0),qt(this._element),this._element.classList.add(An),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,fe.trigger(this._element,gn,{relatedTarget:t})}),this._dialog,this._isAnimated())}_addEventListeners(){fe.on(this._element,yn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():this._triggerBackdropTransition())})),fe.on(window,_n,(()=>{this._isShown&&!this._isTransitioning&&this._adjustDialog()})),fe.on(this._element,vn,(t=>{fe.one(this._element,bn,(e=>{this._element===t.target&&this._element===e.target&&("static"!==this._config.backdrop?this._config.backdrop&&this.hide():this._triggerBackdropTransition())}))}))}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(En),this._resetAdjustments(),this._scrollBar.reset(),fe.trigger(this._element,pn)}))}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(fe.trigger(this._element,fn).defaultPrevented)return;const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._element.style.overflowY;"hidden"===e||this._element.classList.contains(Tn)||(t||(this._element.style.overflowY="hidden"),this._element.classList.add(Tn),this._queueCallback((()=>{this._element.classList.remove(Tn),this._queueCallback((()=>{this._element.style.overflowY=e}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;if(i&&!t){const t=Kt()?"paddingLeft":"paddingRight";this._element.style[t]=`${e}px`}if(!i&&t){const t=Kt()?"paddingRight":"paddingLeft";this._element.style[t]=`${e}px`}}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=xn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}fe.on(document,wn,'[data-bs-toggle="modal"]',(function(t){const e=we.getElementFromSelector(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),fe.one(e,mn,(t=>{t.defaultPrevented||fe.one(e,pn,(()=>{Bt(this)&&this.focus()}))}));const i=we.findOne(".modal.show");i&&xn.getInstance(i).hide(),xn.getOrCreateInstance(e).toggle(this)})),Ee(xn),Qt(xn);const kn=".bs.offcanvas",Ln=".data-api",Sn=`load${kn}${Ln}`,Dn="show",$n="showing",In="hiding",Nn=".offcanvas.show",Pn=`show${kn}`,Mn=`shown${kn}`,jn=`hide${kn}`,Fn=`hidePrevented${kn}`,Hn=`hidden${kn}`,Bn=`resize${kn}`,Wn=`click${kn}${Ln}`,zn=`keydown.dismiss${kn}`,Rn={backdrop:!0,keyboard:!0,scroll:!1},qn={backdrop:"(boolean|string)",keyboard:"boolean",scroll:"boolean"};class Vn extends ve{constructor(t,e){super(t,e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get Default(){return Rn}static get DefaultType(){return qn}static get NAME(){return"offcanvas"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||fe.trigger(this._element,Pn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._backdrop.show(),this._config.scroll||(new hn).hide(),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add($n),this._queueCallback((()=>{this._config.scroll&&!this._config.backdrop||this._focustrap.activate(),this._element.classList.add(Dn),this._element.classList.remove($n),fe.trigger(this._element,Mn,{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(fe.trigger(this._element,jn).defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.add(In),this._backdrop.hide(),this._queueCallback((()=>{this._element.classList.remove(Dn,In),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._config.scroll||(new hn).reset(),fe.trigger(this._element,Hn)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_initializeBackDrop(){const t=Boolean(this._config.backdrop);return new Gi({className:"offcanvas-backdrop",isVisible:t,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:t?()=>{"static"!==this._config.backdrop?this.hide():fe.trigger(this._element,Fn)}:null})}_initializeFocusTrap(){return new on({trapElement:this._element})}_addEventListeners(){fe.on(this._element,zn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():fe.trigger(this._element,Fn))}))}static jQueryInterface(t){return this.each((function(){const e=Vn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}fe.on(document,Wn,'[data-bs-toggle="offcanvas"]',(function(t){const e=we.getElementFromSelector(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),Wt(this))return;fe.one(e,Hn,(()=>{Bt(this)&&this.focus()}));const i=we.findOne(Nn);i&&i!==e&&Vn.getInstance(i).hide(),Vn.getOrCreateInstance(e).toggle(this)})),fe.on(window,Sn,(()=>{for(const t of we.find(Nn))Vn.getOrCreateInstance(t).show()})),fe.on(window,Bn,(()=>{for(const t of we.find("[aria-modal][class*=show][class*=offcanvas-]"))"fixed"!==getComputedStyle(t).position&&Vn.getOrCreateInstance(t).hide()})),Ee(Vn),Qt(Vn);const Yn={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Kn=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Qn=/^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i,Xn=(t,e)=>{const i=t.nodeName.toLowerCase();return e.includes(i)?!Kn.has(i)||Boolean(Qn.test(t.nodeValue)):e.filter((t=>t instanceof RegExp)).some((t=>t.test(i)))},Un={allowList:Yn,content:{},extraClass:"",html:!1,sanitize:!0,sanitizeFn:null,template:"
"},Gn={allowList:"object",content:"object",extraClass:"(string|function)",html:"boolean",sanitize:"boolean",sanitizeFn:"(null|function)",template:"string"},Jn={entry:"(string|element|function|null)",selector:"(string|element)"};class Zn extends be{constructor(t){super(),this._config=this._getConfig(t)}static get Default(){return Un}static get DefaultType(){return Gn}static get NAME(){return"TemplateFactory"}getContent(){return Object.values(this._config.content).map((t=>this._resolvePossibleFunction(t))).filter(Boolean)}hasContent(){return this.getContent().length>0}changeContent(t){return this._checkContent(t),this._config.content={...this._config.content,...t},this}toHtml(){const t=document.createElement("div");t.innerHTML=this._maybeSanitize(this._config.template);for(const[e,i]of Object.entries(this._config.content))this._setContent(t,i,e);const e=t.children[0],i=this._resolvePossibleFunction(this._config.extraClass);return i&&e.classList.add(...i.split(" ")),e}_typeCheckConfig(t){super._typeCheckConfig(t),this._checkContent(t.content)}_checkContent(t){for(const[e,i]of Object.entries(t))super._typeCheckConfig({selector:e,entry:i},Jn)}_setContent(t,e,i){const n=we.findOne(i,t);n&&((e=this._resolvePossibleFunction(e))?Ft(e)?this._putElementInTemplate(Ht(e),n):this._config.html?n.innerHTML=this._maybeSanitize(e):n.textContent=e:n.remove())}_maybeSanitize(t){return this._config.sanitize?function(t,e,i){if(!t.length)return t;if(i&&"function"==typeof i)return i(t);const n=(new window.DOMParser).parseFromString(t,"text/html"),s=[].concat(...n.body.querySelectorAll("*"));for(const t of s){const i=t.nodeName.toLowerCase();if(!Object.keys(e).includes(i)){t.remove();continue}const n=[].concat(...t.attributes),s=[].concat(e["*"]||[],e[i]||[]);for(const e of n)Xn(e,s)||t.removeAttribute(e.nodeName)}return n.body.innerHTML}(t,this._config.allowList,this._config.sanitizeFn):t}_resolvePossibleFunction(t){return Xt(t,[this])}_putElementInTemplate(t,e){if(this._config.html)return e.innerHTML="",void e.append(t);e.textContent=t.textContent}}const ts=new Set(["sanitize","allowList","sanitizeFn"]),es="fade",is="show",ns=".modal",ss="hide.bs.modal",os="hover",rs="focus",as={AUTO:"auto",TOP:"top",RIGHT:Kt()?"left":"right",BOTTOM:"bottom",LEFT:Kt()?"right":"left"},ls={allowList:Yn,animation:!0,boundary:"clippingParents",container:!1,customClass:"",delay:0,fallbackPlacements:["top","right","bottom","left"],html:!1,offset:[0,6],placement:"top",popperConfig:null,sanitize:!0,sanitizeFn:null,selector:!1,template:'',title:"",trigger:"hover focus"},cs={allowList:"object",animation:"boolean",boundary:"(string|element)",container:"(string|element|boolean)",customClass:"(string|function)",delay:"(number|object)",fallbackPlacements:"array",html:"boolean",offset:"(array|string|function)",placement:"(string|function)",popperConfig:"(null|object|function)",sanitize:"boolean",sanitizeFn:"(null|function)",selector:"(string|boolean)",template:"string",title:"(string|element|function)",trigger:"string"};class hs extends ve{constructor(t,i){if(void 0===e)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t,i),this._isEnabled=!0,this._timeout=0,this._isHovered=null,this._activeTrigger={},this._popper=null,this._templateFactory=null,this._newContent=null,this.tip=null,this._setListeners(),this._config.selector||this._fixTitle()}static get Default(){return ls}static get DefaultType(){return cs}static get NAME(){return"tooltip"}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(){this._isEnabled&&(this._activeTrigger.click=!this._activeTrigger.click,this._isShown()?this._leave():this._enter())}dispose(){clearTimeout(this._timeout),fe.off(this._element.closest(ns),ss,this._hideModalHandler),this._element.getAttribute("data-bs-original-title")&&this._element.setAttribute("title",this._element.getAttribute("data-bs-original-title")),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this._isWithContent()||!this._isEnabled)return;const t=fe.trigger(this._element,this.constructor.eventName("show")),e=(zt(this._element)||this._element.ownerDocument.documentElement).contains(this._element);if(t.defaultPrevented||!e)return;this._disposePopper();const i=this._getTipElement();this._element.setAttribute("aria-describedby",i.getAttribute("id"));const{container:n}=this._config;if(this._element.ownerDocument.documentElement.contains(this.tip)||(n.append(i),fe.trigger(this._element,this.constructor.eventName("inserted"))),this._popper=this._createPopper(i),i.classList.add(is),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.on(t,"mouseover",Rt);this._queueCallback((()=>{fe.trigger(this._element,this.constructor.eventName("shown")),!1===this._isHovered&&this._leave(),this._isHovered=!1}),this.tip,this._isAnimated())}hide(){if(this._isShown()&&!fe.trigger(this._element,this.constructor.eventName("hide")).defaultPrevented){if(this._getTipElement().classList.remove(is),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))fe.off(t,"mouseover",Rt);this._activeTrigger.click=!1,this._activeTrigger[rs]=!1,this._activeTrigger[os]=!1,this._isHovered=null,this._queueCallback((()=>{this._isWithActiveTrigger()||(this._isHovered||this._disposePopper(),this._element.removeAttribute("aria-describedby"),fe.trigger(this._element,this.constructor.eventName("hidden")))}),this.tip,this._isAnimated())}}update(){this._popper&&this._popper.update()}_isWithContent(){return Boolean(this._getTitle())}_getTipElement(){return this.tip||(this.tip=this._createTipElement(this._newContent||this._getContentForTemplate())),this.tip}_createTipElement(t){const e=this._getTemplateFactory(t).toHtml();if(!e)return null;e.classList.remove(es,is),e.classList.add(`bs-${this.constructor.NAME}-auto`);const i=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME).toString();return e.setAttribute("id",i),this._isAnimated()&&e.classList.add(es),e}setContent(t){this._newContent=t,this._isShown()&&(this._disposePopper(),this.show())}_getTemplateFactory(t){return this._templateFactory?this._templateFactory.changeContent(t):this._templateFactory=new Zn({...this._config,content:t,extraClass:this._resolvePossibleFunction(this._config.customClass)}),this._templateFactory}_getContentForTemplate(){return{".tooltip-inner":this._getTitle()}}_getTitle(){return this._resolvePossibleFunction(this._config.title)||this._element.getAttribute("data-bs-original-title")}_initializeOnDelegatedTarget(t){return this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_isAnimated(){return this._config.animation||this.tip&&this.tip.classList.contains(es)}_isShown(){return this.tip&&this.tip.classList.contains(is)}_createPopper(t){const e=Xt(this._config.placement,[this,t,this._element]),i=as[e.toUpperCase()];return Dt(this._element,t,this._getPopperConfig(i))}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return Xt(t,[this._element])}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"preSetPlacement",enabled:!0,phase:"beforeMain",fn:t=>{this._getTipElement().setAttribute("data-popper-placement",t.state.placement)}}]};return{...e,...Xt(this._config.popperConfig,[e])}}_setListeners(){const t=this._config.trigger.split(" ");for(const e of t)if("click"===e)fe.on(this._element,this.constructor.eventName("click"),this._config.selector,(t=>{this._initializeOnDelegatedTarget(t).toggle()}));else if("manual"!==e){const t=e===os?this.constructor.eventName("mouseenter"):this.constructor.eventName("focusin"),i=e===os?this.constructor.eventName("mouseleave"):this.constructor.eventName("focusout");fe.on(this._element,t,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusin"===t.type?rs:os]=!0,e._enter()})),fe.on(this._element,i,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusout"===t.type?rs:os]=e._element.contains(t.relatedTarget),e._leave()}))}this._hideModalHandler=()=>{this._element&&this.hide()},fe.on(this._element.closest(ns),ss,this._hideModalHandler)}_fixTitle(){const t=this._element.getAttribute("title");t&&(this._element.getAttribute("aria-label")||this._element.textContent.trim()||this._element.setAttribute("aria-label",t),this._element.setAttribute("data-bs-original-title",t),this._element.removeAttribute("title"))}_enter(){this._isShown()||this._isHovered?this._isHovered=!0:(this._isHovered=!0,this._setTimeout((()=>{this._isHovered&&this.show()}),this._config.delay.show))}_leave(){this._isWithActiveTrigger()||(this._isHovered=!1,this._setTimeout((()=>{this._isHovered||this.hide()}),this._config.delay.hide))}_setTimeout(t,e){clearTimeout(this._timeout),this._timeout=setTimeout(t,e)}_isWithActiveTrigger(){return Object.values(this._activeTrigger).includes(!0)}_getConfig(t){const e=_e.getDataAttributes(this._element);for(const t of Object.keys(e))ts.has(t)&&delete e[t];return t={...e,..."object"==typeof t&&t?t:{}},t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t.container=!1===t.container?document.body:Ht(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),t}_getDelegateConfig(){const t={};for(const[e,i]of Object.entries(this._config))this.constructor.Default[e]!==i&&(t[e]=i);return t.selector=!1,t.trigger="manual",t}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null),this.tip&&(this.tip.remove(),this.tip=null)}static jQueryInterface(t){return this.each((function(){const e=hs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(hs);const ds={...hs.Default,content:"",offset:[0,8],placement:"right",template:'',trigger:"click"},us={...hs.DefaultType,content:"(null|string|element|function)"};class fs extends hs{static get Default(){return ds}static get DefaultType(){return us}static get NAME(){return"popover"}_isWithContent(){return this._getTitle()||this._getContent()}_getContentForTemplate(){return{".popover-header":this._getTitle(),".popover-body":this._getContent()}}_getContent(){return this._resolvePossibleFunction(this._config.content)}static jQueryInterface(t){return this.each((function(){const e=fs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}Qt(fs);const ps=".bs.scrollspy",ms=`activate${ps}`,gs=`click${ps}`,_s=`load${ps}.data-api`,bs="active",vs="[href]",ys=".nav-link",ws=`${ys}, .nav-item > ${ys}, .list-group-item`,Es={offset:null,rootMargin:"0px 0px -25%",smoothScroll:!1,target:null,threshold:[.1,.5,1]},As={offset:"(number|null)",rootMargin:"string",smoothScroll:"boolean",target:"element",threshold:"array"};class Ts extends ve{constructor(t,e){super(t,e),this._targetLinks=new Map,this._observableSections=new Map,this._rootElement="visible"===getComputedStyle(this._element).overflowY?null:this._element,this._activeTarget=null,this._observer=null,this._previousScrollData={visibleEntryTop:0,parentScrollTop:0},this.refresh()}static get Default(){return Es}static get DefaultType(){return As}static get NAME(){return"scrollspy"}refresh(){this._initializeTargetsAndObservables(),this._maybeEnableSmoothScroll(),this._observer?this._observer.disconnect():this._observer=this._getNewObserver();for(const t of this._observableSections.values())this._observer.observe(t)}dispose(){this._observer.disconnect(),super.dispose()}_configAfterMerge(t){return t.target=Ht(t.target)||document.body,t.rootMargin=t.offset?`${t.offset}px 0px -30%`:t.rootMargin,"string"==typeof t.threshold&&(t.threshold=t.threshold.split(",").map((t=>Number.parseFloat(t)))),t}_maybeEnableSmoothScroll(){this._config.smoothScroll&&(fe.off(this._config.target,gs),fe.on(this._config.target,gs,vs,(t=>{const e=this._observableSections.get(t.target.hash);if(e){t.preventDefault();const i=this._rootElement||window,n=e.offsetTop-this._element.offsetTop;if(i.scrollTo)return void i.scrollTo({top:n,behavior:"smooth"});i.scrollTop=n}})))}_getNewObserver(){const t={root:this._rootElement,threshold:this._config.threshold,rootMargin:this._config.rootMargin};return new IntersectionObserver((t=>this._observerCallback(t)),t)}_observerCallback(t){const e=t=>this._targetLinks.get(`#${t.target.id}`),i=t=>{this._previousScrollData.visibleEntryTop=t.target.offsetTop,this._process(e(t))},n=(this._rootElement||document.documentElement).scrollTop,s=n>=this._previousScrollData.parentScrollTop;this._previousScrollData.parentScrollTop=n;for(const o of t){if(!o.isIntersecting){this._activeTarget=null,this._clearActiveClass(e(o));continue}const t=o.target.offsetTop>=this._previousScrollData.visibleEntryTop;if(s&&t){if(i(o),!n)return}else s||t||i(o)}}_initializeTargetsAndObservables(){this._targetLinks=new Map,this._observableSections=new Map;const t=we.find(vs,this._config.target);for(const e of t){if(!e.hash||Wt(e))continue;const t=we.findOne(decodeURI(e.hash),this._element);Bt(t)&&(this._targetLinks.set(decodeURI(e.hash),e),this._observableSections.set(e.hash,t))}}_process(t){this._activeTarget!==t&&(this._clearActiveClass(this._config.target),this._activeTarget=t,t.classList.add(bs),this._activateParents(t),fe.trigger(this._element,ms,{relatedTarget:t}))}_activateParents(t){if(t.classList.contains("dropdown-item"))we.findOne(".dropdown-toggle",t.closest(".dropdown")).classList.add(bs);else for(const e of we.parents(t,".nav, .list-group"))for(const t of we.prev(e,ws))t.classList.add(bs)}_clearActiveClass(t){t.classList.remove(bs);const e=we.find(`${vs}.${bs}`,t);for(const t of e)t.classList.remove(bs)}static jQueryInterface(t){return this.each((function(){const e=Ts.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}fe.on(window,_s,(()=>{for(const t of we.find('[data-bs-spy="scroll"]'))Ts.getOrCreateInstance(t)})),Qt(Ts);const Cs=".bs.tab",Os=`hide${Cs}`,xs=`hidden${Cs}`,ks=`show${Cs}`,Ls=`shown${Cs}`,Ss=`click${Cs}`,Ds=`keydown${Cs}`,$s=`load${Cs}`,Is="ArrowLeft",Ns="ArrowRight",Ps="ArrowUp",Ms="ArrowDown",js="Home",Fs="End",Hs="active",Bs="fade",Ws="show",zs=".dropdown-toggle",Rs=`:not(${zs})`,qs='[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',Vs=`.nav-link${Rs}, .list-group-item${Rs}, [role="tab"]${Rs}, ${qs}`,Ys=`.${Hs}[data-bs-toggle="tab"], .${Hs}[data-bs-toggle="pill"], .${Hs}[data-bs-toggle="list"]`;class Ks extends ve{constructor(t){super(t),this._parent=this._element.closest('.list-group, .nav, [role="tablist"]'),this._parent&&(this._setInitialAttributes(this._parent,this._getChildren()),fe.on(this._element,Ds,(t=>this._keydown(t))))}static get NAME(){return"tab"}show(){const t=this._element;if(this._elemIsActive(t))return;const e=this._getActiveElem(),i=e?fe.trigger(e,Os,{relatedTarget:t}):null;fe.trigger(t,ks,{relatedTarget:e}).defaultPrevented||i&&i.defaultPrevented||(this._deactivate(e,t),this._activate(t,e))}_activate(t,e){t&&(t.classList.add(Hs),this._activate(we.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),fe.trigger(t,Ls,{relatedTarget:e})):t.classList.add(Ws)}),t,t.classList.contains(Bs)))}_deactivate(t,e){t&&(t.classList.remove(Hs),t.blur(),this._deactivate(we.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),fe.trigger(t,xs,{relatedTarget:e})):t.classList.remove(Ws)}),t,t.classList.contains(Bs)))}_keydown(t){if(![Is,Ns,Ps,Ms,js,Fs].includes(t.key))return;t.stopPropagation(),t.preventDefault();const e=this._getChildren().filter((t=>!Wt(t)));let i;if([js,Fs].includes(t.key))i=e[t.key===js?0:e.length-1];else{const n=[Ns,Ms].includes(t.key);i=Gt(e,t.target,n,!0)}i&&(i.focus({preventScroll:!0}),Ks.getOrCreateInstance(i).show())}_getChildren(){return we.find(Vs,this._parent)}_getActiveElem(){return this._getChildren().find((t=>this._elemIsActive(t)))||null}_setInitialAttributes(t,e){this._setAttributeIfNotExists(t,"role","tablist");for(const t of e)this._setInitialAttributesOnChild(t)}_setInitialAttributesOnChild(t){t=this._getInnerElement(t);const e=this._elemIsActive(t),i=this._getOuterElement(t);t.setAttribute("aria-selected",e),i!==t&&this._setAttributeIfNotExists(i,"role","presentation"),e||t.setAttribute("tabindex","-1"),this._setAttributeIfNotExists(t,"role","tab"),this._setInitialAttributesOnTargetPanel(t)}_setInitialAttributesOnTargetPanel(t){const e=we.getElementFromSelector(t);e&&(this._setAttributeIfNotExists(e,"role","tabpanel"),t.id&&this._setAttributeIfNotExists(e,"aria-labelledby",`${t.id}`))}_toggleDropDown(t,e){const i=this._getOuterElement(t);if(!i.classList.contains("dropdown"))return;const n=(t,n)=>{const s=we.findOne(t,i);s&&s.classList.toggle(n,e)};n(zs,Hs),n(".dropdown-menu",Ws),i.setAttribute("aria-expanded",e)}_setAttributeIfNotExists(t,e,i){t.hasAttribute(e)||t.setAttribute(e,i)}_elemIsActive(t){return t.classList.contains(Hs)}_getInnerElement(t){return t.matches(Vs)?t:we.findOne(Vs,t)}_getOuterElement(t){return t.closest(".nav-item, .list-group-item")||t}static jQueryInterface(t){return this.each((function(){const e=Ks.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}fe.on(document,Ss,qs,(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),Wt(this)||Ks.getOrCreateInstance(this).show()})),fe.on(window,$s,(()=>{for(const t of we.find(Ys))Ks.getOrCreateInstance(t)})),Qt(Ks);const Qs=".bs.toast",Xs=`mouseover${Qs}`,Us=`mouseout${Qs}`,Gs=`focusin${Qs}`,Js=`focusout${Qs}`,Zs=`hide${Qs}`,to=`hidden${Qs}`,eo=`show${Qs}`,io=`shown${Qs}`,no="hide",so="show",oo="showing",ro={animation:"boolean",autohide:"boolean",delay:"number"},ao={animation:!0,autohide:!0,delay:5e3};class lo extends ve{constructor(t,e){super(t,e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get Default(){return ao}static get DefaultType(){return ro}static get NAME(){return"toast"}show(){fe.trigger(this._element,eo).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(no),qt(this._element),this._element.classList.add(so,oo),this._queueCallback((()=>{this._element.classList.remove(oo),fe.trigger(this._element,io),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this.isShown()&&(fe.trigger(this._element,Zs).defaultPrevented||(this._element.classList.add(oo),this._queueCallback((()=>{this._element.classList.add(no),this._element.classList.remove(oo,so),fe.trigger(this._element,to)}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this.isShown()&&this._element.classList.remove(so),super.dispose()}isShown(){return this._element.classList.contains(so)}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){fe.on(this._element,Xs,(t=>this._onInteraction(t,!0))),fe.on(this._element,Us,(t=>this._onInteraction(t,!1))),fe.on(this._element,Gs,(t=>this._onInteraction(t,!0))),fe.on(this._element,Js,(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=lo.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}function co(t){"loading"!=document.readyState?t():document.addEventListener("DOMContentLoaded",t)}Ee(lo),Qt(lo),co((function(){[].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]')).map((function(t){return new hs(t,{delay:{show:500,hide:100}})}))})),co((function(){document.getElementById("pst-back-to-top").addEventListener("click",(function(){document.body.scrollTop=0,document.documentElement.scrollTop=0}))})),co((function(){var t=document.getElementById("pst-back-to-top"),e=document.getElementsByClassName("bd-header")[0].getBoundingClientRect();window.addEventListener("scroll",(function(){this.oldScroll>this.scrollY&&this.scrollY>e.bottom?t.style.display="block":t.style.display="none",this.oldScroll=this.scrollY}))})),window.bootstrap=i})(); +//# sourceMappingURL=bootstrap.js.map \ No newline at end of file diff --git a/_static/scripts/bootstrap.js.LICENSE.txt b/_static/scripts/bootstrap.js.LICENSE.txt new file mode 100644 index 00000000..10f979d0 --- /dev/null +++ b/_static/scripts/bootstrap.js.LICENSE.txt @@ -0,0 +1,5 @@ +/*! + * Bootstrap v5.3.2 (https://getbootstrap.com/) + * Copyright 2011-2023 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ diff --git a/_static/scripts/bootstrap.js.map b/_static/scripts/bootstrap.js.map new file mode 100644 index 00000000..64e212b1 --- /dev/null +++ b/_static/scripts/bootstrap.js.map @@ -0,0 +1 @@ +{"version":3,"file":"scripts/bootstrap.js","mappings":";mBACA,IAAIA,EAAsB,CCA1BA,EAAwB,CAACC,EAASC,KACjC,IAAI,IAAIC,KAAOD,EACXF,EAAoBI,EAAEF,EAAYC,KAASH,EAAoBI,EAAEH,EAASE,IAC5EE,OAAOC,eAAeL,EAASE,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,IAE1E,ECNDH,EAAwB,CAACS,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,GCClFV,EAAyBC,IACH,oBAAXa,QAA0BA,OAAOC,aAC1CV,OAAOC,eAAeL,EAASa,OAAOC,YAAa,CAAEC,MAAO,WAE7DX,OAAOC,eAAeL,EAAS,aAAc,CAAEe,OAAO,GAAO,01BCLvD,IAAI,EAAM,MACNC,EAAS,SACTC,EAAQ,QACRC,EAAO,OACPC,EAAO,OACPC,EAAiB,CAAC,EAAKJ,EAAQC,EAAOC,GACtCG,EAAQ,QACRC,EAAM,MACNC,EAAkB,kBAClBC,EAAW,WACXC,EAAS,SACTC,EAAY,YACZC,EAAmCP,EAAeQ,QAAO,SAAUC,EAAKC,GACjF,OAAOD,EAAIE,OAAO,CAACD,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAChE,GAAG,IACQ,EAA0B,GAAGS,OAAOX,EAAgB,CAACD,IAAOS,QAAO,SAAUC,EAAKC,GAC3F,OAAOD,EAAIE,OAAO,CAACD,EAAWA,EAAY,IAAMT,EAAOS,EAAY,IAAMR,GAC3E,GAAG,IAEQU,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAa,aACbC,EAAO,OACPC,EAAY,YAEZC,EAAc,cACdC,EAAQ,QACRC,EAAa,aACbC,EAAiB,CAACT,EAAYC,EAAMC,EAAWC,EAAYC,EAAMC,EAAWC,EAAaC,EAAOC,GC9B5F,SAASE,EAAYC,GAClC,OAAOA,GAAWA,EAAQC,UAAY,IAAIC,cAAgB,IAC5D,CCFe,SAASC,EAAUC,GAChC,GAAY,MAARA,EACF,OAAOC,OAGT,GAAwB,oBAApBD,EAAKE,WAAkC,CACzC,IAAIC,EAAgBH,EAAKG,cACzB,OAAOA,GAAgBA,EAAcC,aAAwBH,MAC/D,CAEA,OAAOD,CACT,CCTA,SAASK,EAAUL,GAEjB,OAAOA,aADUD,EAAUC,GAAMM,SACIN,aAAgBM,OACvD,CAEA,SAASC,EAAcP,GAErB,OAAOA,aADUD,EAAUC,GAAMQ,aACIR,aAAgBQ,WACvD,CAEA,SAASC,EAAaT,GAEpB,MAA0B,oBAAfU,aAKJV,aADUD,EAAUC,GAAMU,YACIV,aAAgBU,WACvD,CCwDA,SACEC,KAAM,cACNC,SAAS,EACTC,MAAO,QACPC,GA5EF,SAAqBC,GACnB,IAAIC,EAAQD,EAAKC,MACjB3D,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIS,EAAQJ,EAAMK,OAAOV,IAAS,CAAC,EAC/BW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EACxCf,EAAUoB,EAAME,SAASP,GAExBJ,EAAcX,IAAaD,EAAYC,KAO5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUR,GACxC,IAAI3C,EAAQsD,EAAWX,IAET,IAAV3C,EACF4B,EAAQ4B,gBAAgBb,GAExBf,EAAQ6B,aAAad,GAAgB,IAAV3C,EAAiB,GAAKA,EAErD,IACF,GACF,EAoDE0D,OAlDF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MACdY,EAAgB,CAClBlD,OAAQ,CACNmD,SAAUb,EAAMc,QAAQC,SACxB5D,KAAM,IACN6D,IAAK,IACLC,OAAQ,KAEVC,MAAO,CACLL,SAAU,YAEZlD,UAAW,CAAC,GASd,OAPAtB,OAAOkE,OAAOP,EAAME,SAASxC,OAAO0C,MAAOQ,EAAclD,QACzDsC,EAAMK,OAASO,EAEXZ,EAAME,SAASgB,OACjB7E,OAAOkE,OAAOP,EAAME,SAASgB,MAAMd,MAAOQ,EAAcM,OAGnD,WACL7E,OAAO4D,KAAKD,EAAME,UAAUC,SAAQ,SAAUR,GAC5C,IAAIf,EAAUoB,EAAME,SAASP,GACzBW,EAAaN,EAAMM,WAAWX,IAAS,CAAC,EAGxCS,EAFkB/D,OAAO4D,KAAKD,EAAMK,OAAOzD,eAAe+C,GAAQK,EAAMK,OAAOV,GAAQiB,EAAcjB,IAE7E9B,QAAO,SAAUuC,EAAOe,GAElD,OADAf,EAAMe,GAAY,GACXf,CACT,GAAG,CAAC,GAECb,EAAcX,IAAaD,EAAYC,KAI5CvC,OAAOkE,OAAO3B,EAAQwB,MAAOA,GAC7B/D,OAAO4D,KAAKK,GAAYH,SAAQ,SAAUiB,GACxCxC,EAAQ4B,gBAAgBY,EAC1B,IACF,GACF,CACF,EASEC,SAAU,CAAC,kBCjFE,SAASC,EAAiBvD,GACvC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCHO,IAAI,EAAMC,KAAKC,IACX,EAAMD,KAAKE,IACXC,EAAQH,KAAKG,MCFT,SAASC,IACtB,IAAIC,EAASC,UAAUC,cAEvB,OAAc,MAAVF,GAAkBA,EAAOG,QAAUC,MAAMC,QAAQL,EAAOG,QACnDH,EAAOG,OAAOG,KAAI,SAAUC,GACjC,OAAOA,EAAKC,MAAQ,IAAMD,EAAKE,OACjC,IAAGC,KAAK,KAGHT,UAAUU,SACnB,CCTe,SAASC,IACtB,OAAQ,iCAAiCC,KAAKd,IAChD,CCCe,SAASe,EAAsB/D,EAASgE,EAAcC,QAC9C,IAAjBD,IACFA,GAAe,QAGO,IAApBC,IACFA,GAAkB,GAGpB,IAAIC,EAAalE,EAAQ+D,wBACrBI,EAAS,EACTC,EAAS,EAETJ,GAAgBrD,EAAcX,KAChCmE,EAASnE,EAAQqE,YAAc,GAAItB,EAAMmB,EAAWI,OAAStE,EAAQqE,aAAmB,EACxFD,EAASpE,EAAQuE,aAAe,GAAIxB,EAAMmB,EAAWM,QAAUxE,EAAQuE,cAAoB,GAG7F,IACIE,GADOhE,EAAUT,GAAWG,EAAUH,GAAWK,QAC3BoE,eAEtBC,GAAoBb,KAAsBI,EAC1CU,GAAKT,EAAW3F,MAAQmG,GAAoBD,EAAiBA,EAAeG,WAAa,IAAMT,EAC/FU,GAAKX,EAAW9B,KAAOsC,GAAoBD,EAAiBA,EAAeK,UAAY,IAAMV,EAC7FE,EAAQJ,EAAWI,MAAQH,EAC3BK,EAASN,EAAWM,OAASJ,EACjC,MAAO,CACLE,MAAOA,EACPE,OAAQA,EACRpC,IAAKyC,EACLvG,MAAOqG,EAAIL,EACXjG,OAAQwG,EAAIL,EACZjG,KAAMoG,EACNA,EAAGA,EACHE,EAAGA,EAEP,CCrCe,SAASE,EAAc/E,GACpC,IAAIkE,EAAaH,EAAsB/D,GAGnCsE,EAAQtE,EAAQqE,YAChBG,EAASxE,EAAQuE,aAUrB,OARI3B,KAAKoC,IAAId,EAAWI,MAAQA,IAAU,IACxCA,EAAQJ,EAAWI,OAGjB1B,KAAKoC,IAAId,EAAWM,OAASA,IAAW,IAC1CA,EAASN,EAAWM,QAGf,CACLG,EAAG3E,EAAQ4E,WACXC,EAAG7E,EAAQ8E,UACXR,MAAOA,EACPE,OAAQA,EAEZ,CCvBe,SAASS,EAASC,EAAQC,GACvC,IAAIC,EAAWD,EAAME,aAAeF,EAAME,cAE1C,GAAIH,EAAOD,SAASE,GAClB,OAAO,EAEJ,GAAIC,GAAYvE,EAAauE,GAAW,CACzC,IAAIE,EAAOH,EAEX,EAAG,CACD,GAAIG,GAAQJ,EAAOK,WAAWD,GAC5B,OAAO,EAITA,EAAOA,EAAKE,YAAcF,EAAKG,IACjC,OAASH,EACX,CAGF,OAAO,CACT,CCrBe,SAAS,EAAiBtF,GACvC,OAAOG,EAAUH,GAAS0F,iBAAiB1F,EAC7C,CCFe,SAAS2F,EAAe3F,GACrC,MAAO,CAAC,QAAS,KAAM,MAAM4F,QAAQ7F,EAAYC,KAAa,CAChE,CCFe,SAAS6F,EAAmB7F,GAEzC,QAASS,EAAUT,GAAWA,EAAQO,cACtCP,EAAQ8F,WAAazF,OAAOyF,UAAUC,eACxC,CCFe,SAASC,EAAchG,GACpC,MAA6B,SAAzBD,EAAYC,GACPA,EAMPA,EAAQiG,cACRjG,EAAQwF,aACR3E,EAAab,GAAWA,EAAQyF,KAAO,OAEvCI,EAAmB7F,EAGvB,CCVA,SAASkG,EAAoBlG,GAC3B,OAAKW,EAAcX,IACoB,UAAvC,EAAiBA,GAASiC,SAInBjC,EAAQmG,aAHN,IAIX,CAwCe,SAASC,EAAgBpG,GAItC,IAHA,IAAIK,EAASF,EAAUH,GACnBmG,EAAeD,EAAoBlG,GAEhCmG,GAAgBR,EAAeQ,IAA6D,WAA5C,EAAiBA,GAAclE,UACpFkE,EAAeD,EAAoBC,GAGrC,OAAIA,IAA+C,SAA9BpG,EAAYoG,IAA0D,SAA9BpG,EAAYoG,IAAwE,WAA5C,EAAiBA,GAAclE,UAC3H5B,EAGF8F,GAhDT,SAA4BnG,GAC1B,IAAIqG,EAAY,WAAWvC,KAAKd,KAGhC,GAFW,WAAWc,KAAKd,MAEfrC,EAAcX,IAII,UAFX,EAAiBA,GAEnBiC,SACb,OAAO,KAIX,IAAIqE,EAAcN,EAAchG,GAMhC,IAJIa,EAAayF,KACfA,EAAcA,EAAYb,MAGrB9E,EAAc2F,IAAgB,CAAC,OAAQ,QAAQV,QAAQ7F,EAAYuG,IAAgB,GAAG,CAC3F,IAAIC,EAAM,EAAiBD,GAI3B,GAAsB,SAAlBC,EAAIC,WAA4C,SAApBD,EAAIE,aAA0C,UAAhBF,EAAIG,UAAiF,IAA1D,CAAC,YAAa,eAAed,QAAQW,EAAII,aAAsBN,GAAgC,WAAnBE,EAAII,YAA2BN,GAAaE,EAAIK,QAAyB,SAAfL,EAAIK,OACjO,OAAON,EAEPA,EAAcA,EAAYd,UAE9B,CAEA,OAAO,IACT,CAgByBqB,CAAmB7G,IAAYK,CACxD,CCpEe,SAASyG,EAAyB3H,GAC/C,MAAO,CAAC,MAAO,UAAUyG,QAAQzG,IAAc,EAAI,IAAM,GAC3D,CCDO,SAAS4H,EAAOjE,EAAK1E,EAAOyE,GACjC,OAAO,EAAQC,EAAK,EAAQ1E,EAAOyE,GACrC,CCFe,SAASmE,EAAmBC,GACzC,OAAOxJ,OAAOkE,OAAO,CAAC,ECDf,CACLS,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GDHuC0I,EACjD,CEHe,SAASC,EAAgB9I,EAAOiD,GAC7C,OAAOA,EAAKpC,QAAO,SAAUkI,EAAS5J,GAEpC,OADA4J,EAAQ5J,GAAOa,EACR+I,CACT,GAAG,CAAC,EACN,CC4EA,SACEpG,KAAM,QACNC,SAAS,EACTC,MAAO,OACPC,GApEF,SAAeC,GACb,IAAIiG,EAEAhG,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZmB,EAAUf,EAAKe,QACfmF,EAAejG,EAAME,SAASgB,MAC9BgF,EAAgBlG,EAAMmG,cAAcD,cACpCE,EAAgB9E,EAAiBtB,EAAMjC,WACvCsI,EAAOX,EAAyBU,GAEhCE,EADa,CAACnJ,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAClC,SAAW,QAElC,GAAKH,GAAiBC,EAAtB,CAIA,IAAIL,EAxBgB,SAAyBU,EAASvG,GAItD,OAAO4F,EAAsC,iBAH7CW,EAA6B,mBAAZA,EAAyBA,EAAQlK,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CAC/EzI,UAAWiC,EAAMjC,aACbwI,GACkDA,EAAUT,EAAgBS,EAASlJ,GAC7F,CAmBsBoJ,CAAgB3F,EAAQyF,QAASvG,GACjD0G,EAAY/C,EAAcsC,GAC1BU,EAAmB,MAATN,EAAe,EAAMlJ,EAC/ByJ,EAAmB,MAATP,EAAepJ,EAASC,EAClC2J,EAAU7G,EAAMwG,MAAM7I,UAAU2I,GAAOtG,EAAMwG,MAAM7I,UAAU0I,GAAQH,EAAcG,GAAQrG,EAAMwG,MAAM9I,OAAO4I,GAC9GQ,EAAYZ,EAAcG,GAAQrG,EAAMwG,MAAM7I,UAAU0I,GACxDU,EAAoB/B,EAAgBiB,GACpCe,EAAaD,EAA6B,MAATV,EAAeU,EAAkBE,cAAgB,EAAIF,EAAkBG,aAAe,EAAI,EAC3HC,EAAoBN,EAAU,EAAIC,EAAY,EAG9CpF,EAAMmE,EAAcc,GACpBlF,EAAMuF,EAAaN,EAAUJ,GAAOT,EAAce,GAClDQ,EAASJ,EAAa,EAAIN,EAAUJ,GAAO,EAAIa,EAC/CE,EAAS1B,EAAOjE,EAAK0F,EAAQ3F,GAE7B6F,EAAWjB,EACfrG,EAAMmG,cAAcxG,KAASqG,EAAwB,CAAC,GAAyBsB,GAAYD,EAAQrB,EAAsBuB,aAAeF,EAASD,EAAQpB,EAnBzJ,CAoBF,EAkCEtF,OAhCF,SAAgBC,GACd,IAAIX,EAAQW,EAAMX,MAEdwH,EADU7G,EAAMG,QACWlC,QAC3BqH,OAAoC,IAArBuB,EAA8B,sBAAwBA,EAErD,MAAhBvB,IAKwB,iBAAjBA,IACTA,EAAejG,EAAME,SAASxC,OAAO+J,cAAcxB,MAOhDpC,EAAS7D,EAAME,SAASxC,OAAQuI,KAIrCjG,EAAME,SAASgB,MAAQ+E,EACzB,EASE5E,SAAU,CAAC,iBACXqG,iBAAkB,CAAC,oBCxFN,SAASC,EAAa5J,GACnC,OAAOA,EAAUwD,MAAM,KAAK,EAC9B,CCOA,IAAIqG,GAAa,CACf5G,IAAK,OACL9D,MAAO,OACPD,OAAQ,OACRE,KAAM,QAeD,SAAS0K,GAAYlH,GAC1B,IAAImH,EAEApK,EAASiD,EAAMjD,OACfqK,EAAapH,EAAMoH,WACnBhK,EAAY4C,EAAM5C,UAClBiK,EAAYrH,EAAMqH,UAClBC,EAAUtH,EAAMsH,QAChBpH,EAAWF,EAAME,SACjBqH,EAAkBvH,EAAMuH,gBACxBC,EAAWxH,EAAMwH,SACjBC,EAAezH,EAAMyH,aACrBC,EAAU1H,EAAM0H,QAChBC,EAAaL,EAAQ1E,EACrBA,OAAmB,IAAf+E,EAAwB,EAAIA,EAChCC,EAAaN,EAAQxE,EACrBA,OAAmB,IAAf8E,EAAwB,EAAIA,EAEhCC,EAAgC,mBAAjBJ,EAA8BA,EAAa,CAC5D7E,EAAGA,EACHE,IACG,CACHF,EAAGA,EACHE,GAGFF,EAAIiF,EAAMjF,EACVE,EAAI+E,EAAM/E,EACV,IAAIgF,EAAOR,EAAQrL,eAAe,KAC9B8L,EAAOT,EAAQrL,eAAe,KAC9B+L,EAAQxL,EACRyL,EAAQ,EACRC,EAAM5J,OAEV,GAAIkJ,EAAU,CACZ,IAAIpD,EAAeC,EAAgBtH,GAC/BoL,EAAa,eACbC,EAAY,cAEZhE,IAAiBhG,EAAUrB,IAGmB,WAA5C,EAFJqH,EAAeN,EAAmB/G,IAECmD,UAAsC,aAAbA,IAC1DiI,EAAa,eACbC,EAAY,gBAOZhL,IAAc,IAAQA,IAAcZ,GAAQY,IAAcb,IAAU8K,IAAczK,KACpFqL,EAAQ3L,EAGRwG,IAFc4E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeD,OACzF2B,EAAa+D,IACEf,EAAW3E,OAC1BK,GAAKyE,EAAkB,GAAK,GAG1BnK,IAAcZ,IAASY,IAAc,GAAOA,IAAcd,GAAW+K,IAAczK,KACrFoL,EAAQzL,EAGRqG,IAFc8E,GAAWtD,IAAiB8D,GAAOA,EAAIxF,eAAiBwF,EAAIxF,eAAeH,MACzF6B,EAAagE,IACEhB,EAAW7E,MAC1BK,GAAK2E,EAAkB,GAAK,EAEhC,CAEA,IAgBMc,EAhBFC,EAAe5M,OAAOkE,OAAO,CAC/BM,SAAUA,GACTsH,GAAYP,IAEXsB,GAAyB,IAAjBd,EAlFd,SAA2BrI,EAAM8I,GAC/B,IAAItF,EAAIxD,EAAKwD,EACTE,EAAI1D,EAAK0D,EACT0F,EAAMN,EAAIO,kBAAoB,EAClC,MAAO,CACL7F,EAAG5B,EAAM4B,EAAI4F,GAAOA,GAAO,EAC3B1F,EAAG9B,EAAM8B,EAAI0F,GAAOA,GAAO,EAE/B,CA0EsCE,CAAkB,CACpD9F,EAAGA,EACHE,GACC1E,EAAUrB,IAAW,CACtB6F,EAAGA,EACHE,GAMF,OAHAF,EAAI2F,EAAM3F,EACVE,EAAIyF,EAAMzF,EAENyE,EAGK7L,OAAOkE,OAAO,CAAC,EAAG0I,IAAeD,EAAiB,CAAC,GAAkBJ,GAASF,EAAO,IAAM,GAAIM,EAAeL,GAASF,EAAO,IAAM,GAAIO,EAAe5D,WAAayD,EAAIO,kBAAoB,IAAM,EAAI,aAAe7F,EAAI,OAASE,EAAI,MAAQ,eAAiBF,EAAI,OAASE,EAAI,SAAUuF,IAG5R3M,OAAOkE,OAAO,CAAC,EAAG0I,IAAenB,EAAkB,CAAC,GAAmBc,GAASF,EAAOjF,EAAI,KAAO,GAAIqE,EAAgBa,GAASF,EAAOlF,EAAI,KAAO,GAAIuE,EAAgB1C,UAAY,GAAI0C,GAC9L,CA4CA,UACEnI,KAAM,gBACNC,SAAS,EACTC,MAAO,cACPC,GA9CF,SAAuBwJ,GACrB,IAAItJ,EAAQsJ,EAAMtJ,MACdc,EAAUwI,EAAMxI,QAChByI,EAAwBzI,EAAQoH,gBAChCA,OAA4C,IAA1BqB,GAA0CA,EAC5DC,EAAoB1I,EAAQqH,SAC5BA,OAAiC,IAAtBqB,GAAsCA,EACjDC,EAAwB3I,EAAQsH,aAChCA,OAAyC,IAA1BqB,GAA0CA,EACzDR,EAAe,CACjBlL,UAAWuD,EAAiBtB,EAAMjC,WAClCiK,UAAWL,EAAa3H,EAAMjC,WAC9BL,OAAQsC,EAAME,SAASxC,OACvBqK,WAAY/H,EAAMwG,MAAM9I,OACxBwK,gBAAiBA,EACjBG,QAAoC,UAA3BrI,EAAMc,QAAQC,UAGgB,MAArCf,EAAMmG,cAAcD,gBACtBlG,EAAMK,OAAO3C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAO3C,OAAQmK,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACvGhB,QAASjI,EAAMmG,cAAcD,cAC7BrF,SAAUb,EAAMc,QAAQC,SACxBoH,SAAUA,EACVC,aAAcA,OAIe,MAA7BpI,EAAMmG,cAAcjF,QACtBlB,EAAMK,OAAOa,MAAQ7E,OAAOkE,OAAO,CAAC,EAAGP,EAAMK,OAAOa,MAAO2G,GAAYxL,OAAOkE,OAAO,CAAC,EAAG0I,EAAc,CACrGhB,QAASjI,EAAMmG,cAAcjF,MAC7BL,SAAU,WACVsH,UAAU,EACVC,aAAcA,OAIlBpI,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,wBAAyBsC,EAAMjC,WAEnC,EAQE2L,KAAM,CAAC,GCrKT,IAAIC,GAAU,CACZA,SAAS,GAsCX,UACEhK,KAAM,iBACNC,SAAS,EACTC,MAAO,QACPC,GAAI,WAAe,EACnBY,OAxCF,SAAgBX,GACd,IAAIC,EAAQD,EAAKC,MACb4J,EAAW7J,EAAK6J,SAChB9I,EAAUf,EAAKe,QACf+I,EAAkB/I,EAAQgJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAkBjJ,EAAQkJ,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7C9K,EAASF,EAAUiB,EAAME,SAASxC,QAClCuM,EAAgB,GAAGjM,OAAOgC,EAAMiK,cAActM,UAAWqC,EAAMiK,cAAcvM,QAYjF,OAVIoM,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaC,iBAAiB,SAAUP,EAASQ,OAAQT,GAC3D,IAGEK,GACF/K,EAAOkL,iBAAiB,SAAUP,EAASQ,OAAQT,IAG9C,WACDG,GACFG,EAAc9J,SAAQ,SAAU+J,GAC9BA,EAAaG,oBAAoB,SAAUT,EAASQ,OAAQT,GAC9D,IAGEK,GACF/K,EAAOoL,oBAAoB,SAAUT,EAASQ,OAAQT,GAE1D,CACF,EASED,KAAM,CAAC,GC/CT,IAAIY,GAAO,CACTnN,KAAM,QACND,MAAO,OACPD,OAAQ,MACR+D,IAAK,UAEQ,SAASuJ,GAAqBxM,GAC3C,OAAOA,EAAUyM,QAAQ,0BAA0B,SAAUC,GAC3D,OAAOH,GAAKG,EACd,GACF,CCVA,IAAI,GAAO,CACTnN,MAAO,MACPC,IAAK,SAEQ,SAASmN,GAA8B3M,GACpD,OAAOA,EAAUyM,QAAQ,cAAc,SAAUC,GAC/C,OAAO,GAAKA,EACd,GACF,CCPe,SAASE,GAAgB3L,GACtC,IAAI6J,EAAM9J,EAAUC,GAGpB,MAAO,CACL4L,WAHe/B,EAAIgC,YAInBC,UAHcjC,EAAIkC,YAKtB,CCNe,SAASC,GAAoBpM,GAQ1C,OAAO+D,EAAsB8B,EAAmB7F,IAAUzB,KAAOwN,GAAgB/L,GAASgM,UAC5F,CCXe,SAASK,GAAerM,GAErC,IAAIsM,EAAoB,EAAiBtM,GACrCuM,EAAWD,EAAkBC,SAC7BC,EAAYF,EAAkBE,UAC9BC,EAAYH,EAAkBG,UAElC,MAAO,6BAA6B3I,KAAKyI,EAAWE,EAAYD,EAClE,CCLe,SAASE,GAAgBtM,GACtC,MAAI,CAAC,OAAQ,OAAQ,aAAawF,QAAQ7F,EAAYK,KAAU,EAEvDA,EAAKG,cAAcoM,KAGxBhM,EAAcP,IAASiM,GAAejM,GACjCA,EAGFsM,GAAgB1G,EAAc5F,GACvC,CCJe,SAASwM,GAAkB5M,EAAS6M,GACjD,IAAIC,OAES,IAATD,IACFA,EAAO,IAGT,IAAIvB,EAAeoB,GAAgB1M,GAC/B+M,EAASzB,KAAqE,OAAlDwB,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,MACpH1C,EAAM9J,EAAUmL,GAChB0B,EAASD,EAAS,CAAC9C,GAAK7K,OAAO6K,EAAIxF,gBAAkB,GAAI4H,GAAef,GAAgBA,EAAe,IAAMA,EAC7G2B,EAAcJ,EAAKzN,OAAO4N,GAC9B,OAAOD,EAASE,EAChBA,EAAY7N,OAAOwN,GAAkB5G,EAAcgH,IACrD,CCzBe,SAASE,GAAiBC,GACvC,OAAO1P,OAAOkE,OAAO,CAAC,EAAGwL,EAAM,CAC7B5O,KAAM4O,EAAKxI,EACXvC,IAAK+K,EAAKtI,EACVvG,MAAO6O,EAAKxI,EAAIwI,EAAK7I,MACrBjG,OAAQ8O,EAAKtI,EAAIsI,EAAK3I,QAE1B,CCqBA,SAAS4I,GAA2BpN,EAASqN,EAAgBlL,GAC3D,OAAOkL,IAAmBxO,EAAWqO,GCzBxB,SAAyBlN,EAASmC,GAC/C,IAAI8H,EAAM9J,EAAUH,GAChBsN,EAAOzH,EAAmB7F,GAC1ByE,EAAiBwF,EAAIxF,eACrBH,EAAQgJ,EAAKhF,YACb9D,EAAS8I,EAAKjF,aACd1D,EAAI,EACJE,EAAI,EAER,GAAIJ,EAAgB,CAClBH,EAAQG,EAAeH,MACvBE,EAASC,EAAeD,OACxB,IAAI+I,EAAiB1J,KAEjB0J,IAAmBA,GAA+B,UAAbpL,KACvCwC,EAAIF,EAAeG,WACnBC,EAAIJ,EAAeK,UAEvB,CAEA,MAAO,CACLR,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EAAIyH,GAAoBpM,GAC3B6E,EAAGA,EAEP,CDDwD2I,CAAgBxN,EAASmC,IAAa1B,EAAU4M,GAdxG,SAAoCrN,EAASmC,GAC3C,IAAIgL,EAAOpJ,EAAsB/D,GAAS,EAAoB,UAAbmC,GASjD,OARAgL,EAAK/K,IAAM+K,EAAK/K,IAAMpC,EAAQyN,UAC9BN,EAAK5O,KAAO4O,EAAK5O,KAAOyB,EAAQ0N,WAChCP,EAAK9O,OAAS8O,EAAK/K,IAAMpC,EAAQqI,aACjC8E,EAAK7O,MAAQ6O,EAAK5O,KAAOyB,EAAQsI,YACjC6E,EAAK7I,MAAQtE,EAAQsI,YACrB6E,EAAK3I,OAASxE,EAAQqI,aACtB8E,EAAKxI,EAAIwI,EAAK5O,KACd4O,EAAKtI,EAAIsI,EAAK/K,IACP+K,CACT,CAG0HQ,CAA2BN,EAAgBlL,GAAY+K,GEtBlK,SAAyBlN,GACtC,IAAI8M,EAEAQ,EAAOzH,EAAmB7F,GAC1B4N,EAAY7B,GAAgB/L,GAC5B2M,EAA0D,OAAlDG,EAAwB9M,EAAQO,oBAAyB,EAASuM,EAAsBH,KAChGrI,EAAQ,EAAIgJ,EAAKO,YAAaP,EAAKhF,YAAaqE,EAAOA,EAAKkB,YAAc,EAAGlB,EAAOA,EAAKrE,YAAc,GACvG9D,EAAS,EAAI8I,EAAKQ,aAAcR,EAAKjF,aAAcsE,EAAOA,EAAKmB,aAAe,EAAGnB,EAAOA,EAAKtE,aAAe,GAC5G1D,GAAKiJ,EAAU5B,WAAaI,GAAoBpM,GAChD6E,GAAK+I,EAAU1B,UAMnB,MAJiD,QAA7C,EAAiBS,GAAQW,GAAMS,YACjCpJ,GAAK,EAAI2I,EAAKhF,YAAaqE,EAAOA,EAAKrE,YAAc,GAAKhE,GAGrD,CACLA,MAAOA,EACPE,OAAQA,EACRG,EAAGA,EACHE,EAAGA,EAEP,CFCkMmJ,CAAgBnI,EAAmB7F,IACrO,CG1Be,SAASiO,GAAe9M,GACrC,IAOIkI,EAPAtK,EAAYoC,EAAKpC,UACjBiB,EAAUmB,EAAKnB,QACfb,EAAYgC,EAAKhC,UACjBqI,EAAgBrI,EAAYuD,EAAiBvD,GAAa,KAC1DiK,EAAYjK,EAAY4J,EAAa5J,GAAa,KAClD+O,EAAUnP,EAAU4F,EAAI5F,EAAUuF,MAAQ,EAAItE,EAAQsE,MAAQ,EAC9D6J,EAAUpP,EAAU8F,EAAI9F,EAAUyF,OAAS,EAAIxE,EAAQwE,OAAS,EAGpE,OAAQgD,GACN,KAAK,EACH6B,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI7E,EAAQwE,QAE3B,MAEF,KAAKnG,EACHgL,EAAU,CACR1E,EAAGuJ,EACHrJ,EAAG9F,EAAU8F,EAAI9F,EAAUyF,QAE7B,MAEF,KAAKlG,EACH+K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI5F,EAAUuF,MAC3BO,EAAGsJ,GAEL,MAEF,KAAK5P,EACH8K,EAAU,CACR1E,EAAG5F,EAAU4F,EAAI3E,EAAQsE,MACzBO,EAAGsJ,GAEL,MAEF,QACE9E,EAAU,CACR1E,EAAG5F,EAAU4F,EACbE,EAAG9F,EAAU8F,GAInB,IAAIuJ,EAAW5G,EAAgBV,EAAyBU,GAAiB,KAEzE,GAAgB,MAAZ4G,EAAkB,CACpB,IAAI1G,EAAmB,MAAb0G,EAAmB,SAAW,QAExC,OAAQhF,GACN,KAAK1K,EACH2K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAC7E,MAEF,KAAK/I,EACH0K,EAAQ+E,GAAY/E,EAAQ+E,IAAarP,EAAU2I,GAAO,EAAI1H,EAAQ0H,GAAO,GAKnF,CAEA,OAAO2B,CACT,CC3De,SAASgF,GAAejN,EAAOc,QAC5B,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACXqM,EAAqBD,EAASnP,UAC9BA,OAAmC,IAAvBoP,EAAgCnN,EAAMjC,UAAYoP,EAC9DC,EAAoBF,EAASnM,SAC7BA,OAAiC,IAAtBqM,EAA+BpN,EAAMe,SAAWqM,EAC3DC,EAAoBH,EAASI,SAC7BA,OAAiC,IAAtBD,EAA+B7P,EAAkB6P,EAC5DE,EAAwBL,EAASM,aACjCA,OAAyC,IAA1BD,EAAmC9P,EAAW8P,EAC7DE,EAAwBP,EAASQ,eACjCA,OAA2C,IAA1BD,EAAmC/P,EAAS+P,EAC7DE,EAAuBT,EAASU,YAChCA,OAAuC,IAAzBD,GAA0CA,EACxDE,EAAmBX,EAAS3G,QAC5BA,OAA+B,IAArBsH,EAA8B,EAAIA,EAC5ChI,EAAgBD,EAAsC,iBAAZW,EAAuBA,EAAUT,EAAgBS,EAASlJ,IACpGyQ,EAAaJ,IAAmBhQ,EAASC,EAAYD,EACrDqK,EAAa/H,EAAMwG,MAAM9I,OACzBkB,EAAUoB,EAAME,SAAS0N,EAAcE,EAAaJ,GACpDK,EJkBS,SAAyBnP,EAAS0O,EAAUE,EAAczM,GACvE,IAAIiN,EAAmC,oBAAbV,EAlB5B,SAA4B1O,GAC1B,IAAIpB,EAAkBgO,GAAkB5G,EAAchG,IAElDqP,EADoB,CAAC,WAAY,SAASzJ,QAAQ,EAAiB5F,GAASiC,WAAa,GACnDtB,EAAcX,GAAWoG,EAAgBpG,GAAWA,EAE9F,OAAKS,EAAU4O,GAKRzQ,EAAgBgI,QAAO,SAAUyG,GACtC,OAAO5M,EAAU4M,IAAmBpI,EAASoI,EAAgBgC,IAAmD,SAAhCtP,EAAYsN,EAC9F,IANS,EAOX,CAK6DiC,CAAmBtP,GAAW,GAAGZ,OAAOsP,GAC/F9P,EAAkB,GAAGQ,OAAOgQ,EAAqB,CAACR,IAClDW,EAAsB3Q,EAAgB,GACtC4Q,EAAe5Q,EAAgBK,QAAO,SAAUwQ,EAASpC,GAC3D,IAAIF,EAAOC,GAA2BpN,EAASqN,EAAgBlL,GAK/D,OAJAsN,EAAQrN,IAAM,EAAI+K,EAAK/K,IAAKqN,EAAQrN,KACpCqN,EAAQnR,MAAQ,EAAI6O,EAAK7O,MAAOmR,EAAQnR,OACxCmR,EAAQpR,OAAS,EAAI8O,EAAK9O,OAAQoR,EAAQpR,QAC1CoR,EAAQlR,KAAO,EAAI4O,EAAK5O,KAAMkR,EAAQlR,MAC/BkR,CACT,GAAGrC,GAA2BpN,EAASuP,EAAqBpN,IAK5D,OAJAqN,EAAalL,MAAQkL,EAAalR,MAAQkR,EAAajR,KACvDiR,EAAahL,OAASgL,EAAanR,OAASmR,EAAapN,IACzDoN,EAAa7K,EAAI6K,EAAajR,KAC9BiR,EAAa3K,EAAI2K,EAAapN,IACvBoN,CACT,CInC2BE,CAAgBjP,EAAUT,GAAWA,EAAUA,EAAQ2P,gBAAkB9J,EAAmBzE,EAAME,SAASxC,QAAS4P,EAAUE,EAAczM,GACjKyN,EAAsB7L,EAAsB3C,EAAME,SAASvC,WAC3DuI,EAAgB2G,GAAe,CACjClP,UAAW6Q,EACX5P,QAASmJ,EACThH,SAAU,WACVhD,UAAWA,IAET0Q,EAAmB3C,GAAiBzP,OAAOkE,OAAO,CAAC,EAAGwH,EAAY7B,IAClEwI,EAAoBhB,IAAmBhQ,EAAS+Q,EAAmBD,EAGnEG,EAAkB,CACpB3N,IAAK+M,EAAmB/M,IAAM0N,EAAkB1N,IAAM6E,EAAc7E,IACpE/D,OAAQyR,EAAkBzR,OAAS8Q,EAAmB9Q,OAAS4I,EAAc5I,OAC7EE,KAAM4Q,EAAmB5Q,KAAOuR,EAAkBvR,KAAO0I,EAAc1I,KACvED,MAAOwR,EAAkBxR,MAAQ6Q,EAAmB7Q,MAAQ2I,EAAc3I,OAExE0R,EAAa5O,EAAMmG,cAAckB,OAErC,GAAIqG,IAAmBhQ,GAAUkR,EAAY,CAC3C,IAAIvH,EAASuH,EAAW7Q,GACxB1B,OAAO4D,KAAK0O,GAAiBxO,SAAQ,SAAUhE,GAC7C,IAAI0S,EAAW,CAAC3R,EAAOD,GAAQuH,QAAQrI,IAAQ,EAAI,GAAK,EACpDkK,EAAO,CAAC,EAAKpJ,GAAQuH,QAAQrI,IAAQ,EAAI,IAAM,IACnDwS,EAAgBxS,IAAQkL,EAAOhB,GAAQwI,CACzC,GACF,CAEA,OAAOF,CACT,CCyEA,UACEhP,KAAM,OACNC,SAAS,EACTC,MAAO,OACPC,GA5HF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KAEhB,IAAIK,EAAMmG,cAAcxG,GAAMmP,MAA9B,CAoCA,IAhCA,IAAIC,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAqCA,EACpDG,EAA8BtO,EAAQuO,mBACtC9I,EAAUzF,EAAQyF,QAClB+G,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtB0B,EAAwBxO,EAAQyO,eAChCA,OAA2C,IAA1BD,GAA0CA,EAC3DE,EAAwB1O,EAAQ0O,sBAChCC,EAAqBzP,EAAMc,QAAQ/C,UACnCqI,EAAgB9E,EAAiBmO,GAEjCJ,EAAqBD,IADHhJ,IAAkBqJ,GACqCF,EAjC/E,SAAuCxR,GACrC,GAAIuD,EAAiBvD,KAAeX,EAClC,MAAO,GAGT,IAAIsS,EAAoBnF,GAAqBxM,GAC7C,MAAO,CAAC2M,GAA8B3M,GAAY2R,EAAmBhF,GAA8BgF,GACrG,CA0B6IC,CAA8BF,GAA3E,CAAClF,GAAqBkF,KAChHG,EAAa,CAACH,GAAoBzR,OAAOqR,GAAoBxR,QAAO,SAAUC,EAAKC,GACrF,OAAOD,EAAIE,OAAOsD,EAAiBvD,KAAeX,ECvCvC,SAA8B4C,EAAOc,QAClC,IAAZA,IACFA,EAAU,CAAC,GAGb,IAAIoM,EAAWpM,EACX/C,EAAYmP,EAASnP,UACrBuP,EAAWJ,EAASI,SACpBE,EAAeN,EAASM,aACxBjH,EAAU2G,EAAS3G,QACnBgJ,EAAiBrC,EAASqC,eAC1BM,EAAwB3C,EAASsC,sBACjCA,OAAkD,IAA1BK,EAAmC,EAAgBA,EAC3E7H,EAAYL,EAAa5J,GACzB6R,EAAa5H,EAAYuH,EAAiB3R,EAAsBA,EAAoB4H,QAAO,SAAUzH,GACvG,OAAO4J,EAAa5J,KAAeiK,CACrC,IAAK3K,EACDyS,EAAoBF,EAAWpK,QAAO,SAAUzH,GAClD,OAAOyR,EAAsBhL,QAAQzG,IAAc,CACrD,IAEiC,IAA7B+R,EAAkBC,SACpBD,EAAoBF,GAItB,IAAII,EAAYF,EAAkBjS,QAAO,SAAUC,EAAKC,GAOtD,OANAD,EAAIC,GAAakP,GAAejN,EAAO,CACrCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,IACRjF,EAAiBvD,IACbD,CACT,GAAG,CAAC,GACJ,OAAOzB,OAAO4D,KAAK+P,GAAWC,MAAK,SAAUC,EAAGC,GAC9C,OAAOH,EAAUE,GAAKF,EAAUG,EAClC,GACF,CDC6DC,CAAqBpQ,EAAO,CACnFjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTgJ,eAAgBA,EAChBC,sBAAuBA,IACpBzR,EACP,GAAG,IACCsS,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzB4S,EAAY,IAAIC,IAChBC,GAAqB,EACrBC,EAAwBb,EAAW,GAE9Bc,EAAI,EAAGA,EAAId,EAAWG,OAAQW,IAAK,CAC1C,IAAI3S,EAAY6R,EAAWc,GAEvBC,EAAiBrP,EAAiBvD,GAElC6S,EAAmBjJ,EAAa5J,KAAeT,EAC/CuT,EAAa,CAAC,EAAK5T,GAAQuH,QAAQmM,IAAmB,EACtDrK,EAAMuK,EAAa,QAAU,SAC7B1F,EAAW8B,GAAejN,EAAO,CACnCjC,UAAWA,EACXuP,SAAUA,EACVE,aAAcA,EACdI,YAAaA,EACbrH,QAASA,IAEPuK,EAAoBD,EAAaD,EAAmB1T,EAAQC,EAAOyT,EAAmB3T,EAAS,EAE/FoT,EAAc/J,GAAOyB,EAAWzB,KAClCwK,EAAoBvG,GAAqBuG,IAG3C,IAAIC,EAAmBxG,GAAqBuG,GACxCE,EAAS,GAUb,GARIhC,GACFgC,EAAOC,KAAK9F,EAASwF,IAAmB,GAGtCxB,GACF6B,EAAOC,KAAK9F,EAAS2F,IAAsB,EAAG3F,EAAS4F,IAAqB,GAG1EC,EAAOE,OAAM,SAAUC,GACzB,OAAOA,CACT,IAAI,CACFV,EAAwB1S,EACxByS,GAAqB,EACrB,KACF,CAEAF,EAAUc,IAAIrT,EAAWiT,EAC3B,CAEA,GAAIR,EAqBF,IAnBA,IAEIa,EAAQ,SAAeC,GACzB,IAAIC,EAAmB3B,EAAW4B,MAAK,SAAUzT,GAC/C,IAAIiT,EAASV,EAAU9T,IAAIuB,GAE3B,GAAIiT,EACF,OAAOA,EAAOS,MAAM,EAAGH,GAAIJ,OAAM,SAAUC,GACzC,OAAOA,CACT,GAEJ,IAEA,GAAII,EAEF,OADAd,EAAwBc,EACjB,OAEX,EAESD,EAnBY/B,EAAiB,EAAI,EAmBZ+B,EAAK,GAGpB,UAFFD,EAAMC,GADmBA,KAOpCtR,EAAMjC,YAAc0S,IACtBzQ,EAAMmG,cAAcxG,GAAMmP,OAAQ,EAClC9O,EAAMjC,UAAY0S,EAClBzQ,EAAM0R,OAAQ,EA5GhB,CA8GF,EAQEhK,iBAAkB,CAAC,UACnBgC,KAAM,CACJoF,OAAO,IE7IX,SAAS6C,GAAexG,EAAUY,EAAM6F,GAQtC,YAPyB,IAArBA,IACFA,EAAmB,CACjBrO,EAAG,EACHE,EAAG,IAIA,CACLzC,IAAKmK,EAASnK,IAAM+K,EAAK3I,OAASwO,EAAiBnO,EACnDvG,MAAOiO,EAASjO,MAAQ6O,EAAK7I,MAAQ0O,EAAiBrO,EACtDtG,OAAQkO,EAASlO,OAAS8O,EAAK3I,OAASwO,EAAiBnO,EACzDtG,KAAMgO,EAAShO,KAAO4O,EAAK7I,MAAQ0O,EAAiBrO,EAExD,CAEA,SAASsO,GAAsB1G,GAC7B,MAAO,CAAC,EAAKjO,EAAOD,EAAQE,GAAM2U,MAAK,SAAUC,GAC/C,OAAO5G,EAAS4G,IAAS,CAC3B,GACF,CA+BA,UACEpS,KAAM,OACNC,SAAS,EACTC,MAAO,OACP6H,iBAAkB,CAAC,mBACnB5H,GAlCF,SAAcC,GACZ,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KACZ0Q,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBkU,EAAmB5R,EAAMmG,cAAc6L,gBACvCC,EAAoBhF,GAAejN,EAAO,CAC5C0N,eAAgB,cAEdwE,EAAoBjF,GAAejN,EAAO,CAC5C4N,aAAa,IAEXuE,EAA2BR,GAAeM,EAAmB5B,GAC7D+B,EAAsBT,GAAeO,EAAmBnK,EAAY6J,GACpES,EAAoBR,GAAsBM,GAC1CG,EAAmBT,GAAsBO,GAC7CpS,EAAMmG,cAAcxG,GAAQ,CAC1BwS,yBAA0BA,EAC1BC,oBAAqBA,EACrBC,kBAAmBA,EACnBC,iBAAkBA,GAEpBtS,EAAMM,WAAW5C,OAASrB,OAAOkE,OAAO,CAAC,EAAGP,EAAMM,WAAW5C,OAAQ,CACnE,+BAAgC2U,EAChC,sBAAuBC,GAE3B,GCJA,IACE3S,KAAM,SACNC,SAAS,EACTC,MAAO,OACPwB,SAAU,CAAC,iBACXvB,GA5BF,SAAgBa,GACd,IAAIX,EAAQW,EAAMX,MACdc,EAAUH,EAAMG,QAChBnB,EAAOgB,EAAMhB,KACb4S,EAAkBzR,EAAQuG,OAC1BA,OAA6B,IAApBkL,EAA6B,CAAC,EAAG,GAAKA,EAC/C7I,EAAO,EAAW7L,QAAO,SAAUC,EAAKC,GAE1C,OADAD,EAAIC,GA5BD,SAAiCA,EAAWyI,EAAOa,GACxD,IAAIjB,EAAgB9E,EAAiBvD,GACjCyU,EAAiB,CAACrV,EAAM,GAAKqH,QAAQ4B,IAAkB,GAAK,EAAI,EAEhErG,EAAyB,mBAAXsH,EAAwBA,EAAOhL,OAAOkE,OAAO,CAAC,EAAGiG,EAAO,CACxEzI,UAAWA,KACPsJ,EACFoL,EAAW1S,EAAK,GAChB2S,EAAW3S,EAAK,GAIpB,OAFA0S,EAAWA,GAAY,EACvBC,GAAYA,GAAY,GAAKF,EACtB,CAACrV,EAAMD,GAAOsH,QAAQ4B,IAAkB,EAAI,CACjD7C,EAAGmP,EACHjP,EAAGgP,GACD,CACFlP,EAAGkP,EACHhP,EAAGiP,EAEP,CASqBC,CAAwB5U,EAAWiC,EAAMwG,MAAOa,GAC1DvJ,CACT,GAAG,CAAC,GACA8U,EAAwBlJ,EAAK1J,EAAMjC,WACnCwF,EAAIqP,EAAsBrP,EAC1BE,EAAImP,EAAsBnP,EAEW,MAArCzD,EAAMmG,cAAcD,gBACtBlG,EAAMmG,cAAcD,cAAc3C,GAAKA,EACvCvD,EAAMmG,cAAcD,cAAczC,GAAKA,GAGzCzD,EAAMmG,cAAcxG,GAAQ+J,CAC9B,GC1BA,IACE/J,KAAM,gBACNC,SAAS,EACTC,MAAO,OACPC,GApBF,SAAuBC,GACrB,IAAIC,EAAQD,EAAKC,MACbL,EAAOI,EAAKJ,KAKhBK,EAAMmG,cAAcxG,GAAQkN,GAAe,CACzClP,UAAWqC,EAAMwG,MAAM7I,UACvBiB,QAASoB,EAAMwG,MAAM9I,OACrBqD,SAAU,WACVhD,UAAWiC,EAAMjC,WAErB,EAQE2L,KAAM,CAAC,GCgHT,IACE/J,KAAM,kBACNC,SAAS,EACTC,MAAO,OACPC,GA/HF,SAAyBC,GACvB,IAAIC,EAAQD,EAAKC,MACbc,EAAUf,EAAKe,QACfnB,EAAOI,EAAKJ,KACZoP,EAAoBjO,EAAQkM,SAC5BgC,OAAsC,IAAtBD,GAAsCA,EACtDE,EAAmBnO,EAAQoO,QAC3BC,OAAoC,IAArBF,GAAsCA,EACrD3B,EAAWxM,EAAQwM,SACnBE,EAAe1M,EAAQ0M,aACvBI,EAAc9M,EAAQ8M,YACtBrH,EAAUzF,EAAQyF,QAClBsM,EAAkB/R,EAAQgS,OAC1BA,OAA6B,IAApBD,GAAoCA,EAC7CE,EAAwBjS,EAAQkS,aAChCA,OAAyC,IAA1BD,EAAmC,EAAIA,EACtD5H,EAAW8B,GAAejN,EAAO,CACnCsN,SAAUA,EACVE,aAAcA,EACdjH,QAASA,EACTqH,YAAaA,IAEXxH,EAAgB9E,EAAiBtB,EAAMjC,WACvCiK,EAAYL,EAAa3H,EAAMjC,WAC/BkV,GAAmBjL,EACnBgF,EAAWtH,EAAyBU,GACpC8I,ECrCY,MDqCSlC,ECrCH,IAAM,IDsCxB9G,EAAgBlG,EAAMmG,cAAcD,cACpCmK,EAAgBrQ,EAAMwG,MAAM7I,UAC5BoK,EAAa/H,EAAMwG,MAAM9I,OACzBwV,EAA4C,mBAAjBF,EAA8BA,EAAa3W,OAAOkE,OAAO,CAAC,EAAGP,EAAMwG,MAAO,CACvGzI,UAAWiC,EAAMjC,aACbiV,EACFG,EAA2D,iBAAtBD,EAAiC,CACxElG,SAAUkG,EACVhE,QAASgE,GACP7W,OAAOkE,OAAO,CAChByM,SAAU,EACVkC,QAAS,GACRgE,GACCE,EAAsBpT,EAAMmG,cAAckB,OAASrH,EAAMmG,cAAckB,OAAOrH,EAAMjC,WAAa,KACjG2L,EAAO,CACTnG,EAAG,EACHE,EAAG,GAGL,GAAKyC,EAAL,CAIA,GAAI8I,EAAe,CACjB,IAAIqE,EAEAC,EAAwB,MAAbtG,EAAmB,EAAM7P,EACpCoW,EAAuB,MAAbvG,EAAmB/P,EAASC,EACtCoJ,EAAmB,MAAb0G,EAAmB,SAAW,QACpC3F,EAASnB,EAAc8G,GACvBtL,EAAM2F,EAAS8D,EAASmI,GACxB7R,EAAM4F,EAAS8D,EAASoI,GACxBC,EAAWV,GAAU/K,EAAWzB,GAAO,EAAI,EAC3CmN,EAASzL,IAAc1K,EAAQ+S,EAAc/J,GAAOyB,EAAWzB,GAC/DoN,EAAS1L,IAAc1K,GAASyK,EAAWzB,IAAQ+J,EAAc/J,GAGjEL,EAAejG,EAAME,SAASgB,MAC9BwF,EAAYoM,GAAU7M,EAAetC,EAAcsC,GAAgB,CACrE/C,MAAO,EACPE,OAAQ,GAENuQ,GAAqB3T,EAAMmG,cAAc,oBAAsBnG,EAAMmG,cAAc,oBAAoBI,QxBhFtG,CACLvF,IAAK,EACL9D,MAAO,EACPD,OAAQ,EACRE,KAAM,GwB6EFyW,GAAkBD,GAAmBL,GACrCO,GAAkBF,GAAmBJ,GAMrCO,GAAWnO,EAAO,EAAG0K,EAAc/J,GAAMI,EAAUJ,IACnDyN,GAAYd,EAAkB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWF,GAAkBT,EAA4BnG,SAAWyG,EAASK,GAAWF,GAAkBT,EAA4BnG,SACxMgH,GAAYf,GAAmB5C,EAAc/J,GAAO,EAAIkN,EAAWM,GAAWD,GAAkBV,EAA4BnG,SAAW0G,EAASI,GAAWD,GAAkBV,EAA4BnG,SACzMjG,GAAoB/G,EAAME,SAASgB,OAAS8D,EAAgBhF,EAAME,SAASgB,OAC3E+S,GAAelN,GAAiC,MAAbiG,EAAmBjG,GAAkBsF,WAAa,EAAItF,GAAkBuF,YAAc,EAAI,EAC7H4H,GAAwH,OAAjGb,EAA+C,MAAvBD,OAA8B,EAASA,EAAoBpG,IAAqBqG,EAAwB,EAEvJc,GAAY9M,EAAS2M,GAAYE,GACjCE,GAAkBzO,EAAOmN,EAAS,EAAQpR,EAF9B2F,EAAS0M,GAAYG,GAAsBD,IAEKvS,EAAK2F,EAAQyL,EAAS,EAAQrR,EAAK0S,IAAa1S,GAChHyE,EAAc8G,GAAYoH,GAC1B1K,EAAKsD,GAAYoH,GAAkB/M,CACrC,CAEA,GAAI8H,EAAc,CAChB,IAAIkF,GAEAC,GAAyB,MAAbtH,EAAmB,EAAM7P,EAErCoX,GAAwB,MAAbvH,EAAmB/P,EAASC,EAEvCsX,GAAUtO,EAAcgJ,GAExBuF,GAAmB,MAAZvF,EAAkB,SAAW,QAEpCwF,GAAOF,GAAUrJ,EAASmJ,IAE1BK,GAAOH,GAAUrJ,EAASoJ,IAE1BK,IAAuD,IAAxC,CAAC,EAAKzX,GAAMqH,QAAQ4B,GAEnCyO,GAAyH,OAAjGR,GAAgD,MAAvBjB,OAA8B,EAASA,EAAoBlE,IAAoBmF,GAAyB,EAEzJS,GAAaF,GAAeF,GAAOF,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAEzI6F,GAAaH,GAAeJ,GAAUnE,EAAcoE,IAAQ1M,EAAW0M,IAAQI,GAAuB1B,EAA4BjE,QAAUyF,GAE5IK,GAAmBlC,GAAU8B,G1BzH9B,SAAwBlT,EAAK1E,EAAOyE,GACzC,IAAIwT,EAAItP,EAAOjE,EAAK1E,EAAOyE,GAC3B,OAAOwT,EAAIxT,EAAMA,EAAMwT,CACzB,C0BsHoDC,CAAeJ,GAAYN,GAASO,IAAcpP,EAAOmN,EAASgC,GAAaJ,GAAMF,GAAS1B,EAASiC,GAAaJ,IAEpKzO,EAAcgJ,GAAW8F,GACzBtL,EAAKwF,GAAW8F,GAAmBR,EACrC,CAEAxU,EAAMmG,cAAcxG,GAAQ+J,CAvE5B,CAwEF,EAQEhC,iBAAkB,CAAC,WE1HN,SAASyN,GAAiBC,EAAyBrQ,EAAcsD,QAC9D,IAAZA,IACFA,GAAU,GAGZ,ICnBoCrJ,ECJOJ,EFuBvCyW,EAA0B9V,EAAcwF,GACxCuQ,EAAuB/V,EAAcwF,IAf3C,SAAyBnG,GACvB,IAAImN,EAAOnN,EAAQ+D,wBACfI,EAASpB,EAAMoK,EAAK7I,OAAStE,EAAQqE,aAAe,EACpDD,EAASrB,EAAMoK,EAAK3I,QAAUxE,EAAQuE,cAAgB,EAC1D,OAAkB,IAAXJ,GAA2B,IAAXC,CACzB,CAU4DuS,CAAgBxQ,GACtEJ,EAAkBF,EAAmBM,GACrCgH,EAAOpJ,EAAsByS,EAAyBE,EAAsBjN,GAC5EyB,EAAS,CACXc,WAAY,EACZE,UAAW,GAET7C,EAAU,CACZ1E,EAAG,EACHE,EAAG,GAkBL,OAfI4R,IAA4BA,IAA4BhN,MACxB,SAA9B1J,EAAYoG,IAChBkG,GAAetG,MACbmF,GCnCgC9K,EDmCT+F,KClCdhG,EAAUC,IAAUO,EAAcP,GCJxC,CACL4L,YAFyChM,EDQbI,GCNR4L,WACpBE,UAAWlM,EAAQkM,WDGZH,GAAgB3L,IDoCnBO,EAAcwF,KAChBkD,EAAUtF,EAAsBoC,GAAc,IACtCxB,GAAKwB,EAAauH,WAC1BrE,EAAQxE,GAAKsB,EAAasH,WACjB1H,IACTsD,EAAQ1E,EAAIyH,GAAoBrG,KAI7B,CACLpB,EAAGwI,EAAK5O,KAAO2M,EAAOc,WAAa3C,EAAQ1E,EAC3CE,EAAGsI,EAAK/K,IAAM8I,EAAOgB,UAAY7C,EAAQxE,EACzCP,MAAO6I,EAAK7I,MACZE,OAAQ2I,EAAK3I,OAEjB,CGvDA,SAASoS,GAAMC,GACb,IAAItT,EAAM,IAAIoO,IACVmF,EAAU,IAAIC,IACdC,EAAS,GAKb,SAAS3F,EAAK4F,GACZH,EAAQI,IAAID,EAASlW,MACN,GAAG3B,OAAO6X,EAASxU,UAAY,GAAIwU,EAASnO,kBAAoB,IACtEvH,SAAQ,SAAU4V,GACzB,IAAKL,EAAQM,IAAID,GAAM,CACrB,IAAIE,EAAc9T,EAAI3F,IAAIuZ,GAEtBE,GACFhG,EAAKgG,EAET,CACF,IACAL,EAAO3E,KAAK4E,EACd,CAQA,OAzBAJ,EAAUtV,SAAQ,SAAU0V,GAC1B1T,EAAIiP,IAAIyE,EAASlW,KAAMkW,EACzB,IAiBAJ,EAAUtV,SAAQ,SAAU0V,GACrBH,EAAQM,IAAIH,EAASlW,OAExBsQ,EAAK4F,EAET,IACOD,CACT,CCvBA,IAAIM,GAAkB,CACpBnY,UAAW,SACX0X,UAAW,GACX1U,SAAU,YAGZ,SAASoV,KACP,IAAK,IAAI1B,EAAO2B,UAAUrG,OAAQsG,EAAO,IAAIpU,MAAMwS,GAAO6B,EAAO,EAAGA,EAAO7B,EAAM6B,IAC/ED,EAAKC,GAAQF,UAAUE,GAGzB,OAAQD,EAAKvE,MAAK,SAAUlT,GAC1B,QAASA,GAAoD,mBAAlCA,EAAQ+D,sBACrC,GACF,CAEO,SAAS4T,GAAgBC,QACL,IAArBA,IACFA,EAAmB,CAAC,GAGtB,IAAIC,EAAoBD,EACpBE,EAAwBD,EAAkBE,iBAC1CA,OAA6C,IAA1BD,EAAmC,GAAKA,EAC3DE,EAAyBH,EAAkBI,eAC3CA,OAA4C,IAA3BD,EAAoCV,GAAkBU,EAC3E,OAAO,SAAsBjZ,EAAWD,EAAQoD,QAC9B,IAAZA,IACFA,EAAU+V,GAGZ,ICxC6B/W,EAC3BgX,EDuCE9W,EAAQ,CACVjC,UAAW,SACXgZ,iBAAkB,GAClBjW,QAASzE,OAAOkE,OAAO,CAAC,EAAG2V,GAAiBW,GAC5C1Q,cAAe,CAAC,EAChBjG,SAAU,CACRvC,UAAWA,EACXD,OAAQA,GAEV4C,WAAY,CAAC,EACbD,OAAQ,CAAC,GAEP2W,EAAmB,GACnBC,GAAc,EACdrN,EAAW,CACb5J,MAAOA,EACPkX,WAAY,SAAoBC,GAC9B,IAAIrW,EAAsC,mBAArBqW,EAAkCA,EAAiBnX,EAAMc,SAAWqW,EACzFC,IACApX,EAAMc,QAAUzE,OAAOkE,OAAO,CAAC,EAAGsW,EAAgB7W,EAAMc,QAASA,GACjEd,EAAMiK,cAAgB,CACpBtM,UAAW0B,EAAU1B,GAAa6N,GAAkB7N,GAAaA,EAAU4Q,eAAiB/C,GAAkB7N,EAAU4Q,gBAAkB,GAC1I7Q,OAAQ8N,GAAkB9N,IAI5B,IElE4B+X,EAC9B4B,EFiEMN,EDhCG,SAAwBtB,GAErC,IAAIsB,EAAmBvB,GAAMC,GAE7B,OAAO/W,EAAeb,QAAO,SAAUC,EAAK+B,GAC1C,OAAO/B,EAAIE,OAAO+Y,EAAiBvR,QAAO,SAAUqQ,GAClD,OAAOA,EAAShW,QAAUA,CAC5B,IACF,GAAG,GACL,CCuB+ByX,EElEK7B,EFkEsB,GAAGzX,OAAO2Y,EAAkB3W,EAAMc,QAAQ2U,WEjE9F4B,EAAS5B,EAAU5X,QAAO,SAAUwZ,EAAQE,GAC9C,IAAIC,EAAWH,EAAOE,EAAQ5X,MAK9B,OAJA0X,EAAOE,EAAQ5X,MAAQ6X,EAAWnb,OAAOkE,OAAO,CAAC,EAAGiX,EAAUD,EAAS,CACrEzW,QAASzE,OAAOkE,OAAO,CAAC,EAAGiX,EAAS1W,QAASyW,EAAQzW,SACrD4I,KAAMrN,OAAOkE,OAAO,CAAC,EAAGiX,EAAS9N,KAAM6N,EAAQ7N,QAC5C6N,EACEF,CACT,GAAG,CAAC,GAEGhb,OAAO4D,KAAKoX,GAAQlV,KAAI,SAAUhG,GACvC,OAAOkb,EAAOlb,EAChB,MF4DM,OAJA6D,EAAM+W,iBAAmBA,EAAiBvR,QAAO,SAAUiS,GACzD,OAAOA,EAAE7X,OACX,IA+FFI,EAAM+W,iBAAiB5W,SAAQ,SAAUJ,GACvC,IAAIJ,EAAOI,EAAKJ,KACZ+X,EAAe3X,EAAKe,QACpBA,OAA2B,IAAjB4W,EAA0B,CAAC,EAAIA,EACzChX,EAASX,EAAKW,OAElB,GAAsB,mBAAXA,EAAuB,CAChC,IAAIiX,EAAYjX,EAAO,CACrBV,MAAOA,EACPL,KAAMA,EACNiK,SAAUA,EACV9I,QAASA,IAKXkW,EAAiB/F,KAAK0G,GAFT,WAAmB,EAGlC,CACF,IA/GS/N,EAASQ,QAClB,EAMAwN,YAAa,WACX,IAAIX,EAAJ,CAIA,IAAIY,EAAkB7X,EAAME,SACxBvC,EAAYka,EAAgBla,UAC5BD,EAASma,EAAgBna,OAG7B,GAAKyY,GAAiBxY,EAAWD,GAAjC,CAKAsC,EAAMwG,MAAQ,CACZ7I,UAAWwX,GAAiBxX,EAAWqH,EAAgBtH,GAAoC,UAA3BsC,EAAMc,QAAQC,UAC9ErD,OAAQiG,EAAcjG,IAOxBsC,EAAM0R,OAAQ,EACd1R,EAAMjC,UAAYiC,EAAMc,QAAQ/C,UAKhCiC,EAAM+W,iBAAiB5W,SAAQ,SAAU0V,GACvC,OAAO7V,EAAMmG,cAAc0P,EAASlW,MAAQtD,OAAOkE,OAAO,CAAC,EAAGsV,EAASnM,KACzE,IAEA,IAAK,IAAIoO,EAAQ,EAAGA,EAAQ9X,EAAM+W,iBAAiBhH,OAAQ+H,IACzD,IAAoB,IAAhB9X,EAAM0R,MAAV,CAMA,IAAIqG,EAAwB/X,EAAM+W,iBAAiBe,GAC/ChY,EAAKiY,EAAsBjY,GAC3BkY,EAAyBD,EAAsBjX,QAC/CoM,OAAsC,IAA3B8K,EAAoC,CAAC,EAAIA,EACpDrY,EAAOoY,EAAsBpY,KAEf,mBAAPG,IACTE,EAAQF,EAAG,CACTE,MAAOA,EACPc,QAASoM,EACTvN,KAAMA,EACNiK,SAAUA,KACN5J,EAdR,MAHEA,EAAM0R,OAAQ,EACdoG,GAAS,CAzBb,CATA,CAqDF,EAGA1N,QC1I2BtK,ED0IV,WACf,OAAO,IAAImY,SAAQ,SAAUC,GAC3BtO,EAASgO,cACTM,EAAQlY,EACV,GACF,EC7IG,WAUL,OATK8W,IACHA,EAAU,IAAImB,SAAQ,SAAUC,GAC9BD,QAAQC,UAAUC,MAAK,WACrBrB,OAAUsB,EACVF,EAAQpY,IACV,GACF,KAGKgX,CACT,GDmIIuB,QAAS,WACPjB,IACAH,GAAc,CAChB,GAGF,IAAKd,GAAiBxY,EAAWD,GAC/B,OAAOkM,EAmCT,SAASwN,IACPJ,EAAiB7W,SAAQ,SAAUL,GACjC,OAAOA,GACT,IACAkX,EAAmB,EACrB,CAEA,OAvCApN,EAASsN,WAAWpW,GAASqX,MAAK,SAAUnY,IACrCiX,GAAenW,EAAQwX,eAC1BxX,EAAQwX,cAActY,EAE1B,IAmCO4J,CACT,CACF,CACO,IAAI2O,GAA4BhC,KGzLnC,GAA4BA,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,EAAa,GAAQ,GAAM,GAAiB,EAAO,MCJrH,GAA4BjC,GAAgB,CAC9CI,iBAFqB,CAAC6B,GAAgB,GAAe,GAAe,KCatE,MAAMC,GAAa,IAAIlI,IACjBmI,GAAO,CACX,GAAAtH,CAAIxS,EAASzC,EAAKyN,GACX6O,GAAWzC,IAAIpX,IAClB6Z,GAAWrH,IAAIxS,EAAS,IAAI2R,KAE9B,MAAMoI,EAAcF,GAAWjc,IAAIoC,GAI9B+Z,EAAY3C,IAAI7Z,IAA6B,IAArBwc,EAAYC,KAKzCD,EAAYvH,IAAIjV,EAAKyN,GAHnBiP,QAAQC,MAAM,+EAA+E7W,MAAM8W,KAAKJ,EAAY1Y,QAAQ,MAIhI,EACAzD,IAAG,CAACoC,EAASzC,IACPsc,GAAWzC,IAAIpX,IACV6Z,GAAWjc,IAAIoC,GAASpC,IAAIL,IAE9B,KAET,MAAA6c,CAAOpa,EAASzC,GACd,IAAKsc,GAAWzC,IAAIpX,GAClB,OAEF,MAAM+Z,EAAcF,GAAWjc,IAAIoC,GACnC+Z,EAAYM,OAAO9c,GAGM,IAArBwc,EAAYC,MACdH,GAAWQ,OAAOra,EAEtB,GAYIsa,GAAiB,gBAOjBC,GAAgBC,IAChBA,GAAYna,OAAOoa,KAAOpa,OAAOoa,IAAIC,SAEvCF,EAAWA,EAAS5O,QAAQ,iBAAiB,CAAC+O,EAAOC,IAAO,IAAIH,IAAIC,OAAOE,QAEtEJ,GA4CHK,GAAuB7a,IAC3BA,EAAQ8a,cAAc,IAAIC,MAAMT,IAAgB,EAE5C,GAAYU,MACXA,GAA4B,iBAAXA,UAGO,IAAlBA,EAAOC,SAChBD,EAASA,EAAO,SAEgB,IAApBA,EAAOE,UAEjBC,GAAaH,GAEb,GAAUA,GACLA,EAAOC,OAASD,EAAO,GAAKA,EAEf,iBAAXA,GAAuBA,EAAO7J,OAAS,EACzCrL,SAAS+C,cAAc0R,GAAcS,IAEvC,KAEHI,GAAYpb,IAChB,IAAK,GAAUA,IAAgD,IAApCA,EAAQqb,iBAAiBlK,OAClD,OAAO,EAET,MAAMmK,EAAgF,YAA7D5V,iBAAiB1F,GAASub,iBAAiB,cAE9DC,EAAgBxb,EAAQyb,QAAQ,uBACtC,IAAKD,EACH,OAAOF,EAET,GAAIE,IAAkBxb,EAAS,CAC7B,MAAM0b,EAAU1b,EAAQyb,QAAQ,WAChC,GAAIC,GAAWA,EAAQlW,aAAegW,EACpC,OAAO,EAET,GAAgB,OAAZE,EACF,OAAO,CAEX,CACA,OAAOJ,CAAgB,EAEnBK,GAAa3b,IACZA,GAAWA,EAAQkb,WAAaU,KAAKC,gBAGtC7b,EAAQ8b,UAAU7W,SAAS,mBAGC,IAArBjF,EAAQ+b,SACV/b,EAAQ+b,SAEV/b,EAAQgc,aAAa,aAAoD,UAArChc,EAAQic,aAAa,aAE5DC,GAAiBlc,IACrB,IAAK8F,SAASC,gBAAgBoW,aAC5B,OAAO,KAIT,GAAmC,mBAAxBnc,EAAQqF,YAA4B,CAC7C,MAAM+W,EAAOpc,EAAQqF,cACrB,OAAO+W,aAAgBtb,WAAasb,EAAO,IAC7C,CACA,OAAIpc,aAAmBc,WACdd,EAIJA,EAAQwF,WAGN0W,GAAelc,EAAQwF,YAFrB,IAEgC,EAErC6W,GAAO,OAUPC,GAAStc,IACbA,EAAQuE,YAAY,EAGhBgY,GAAY,IACZlc,OAAOmc,SAAW1W,SAAS6G,KAAKqP,aAAa,qBACxC3b,OAAOmc,OAET,KAEHC,GAA4B,GAgB5BC,GAAQ,IAAuC,QAAjC5W,SAASC,gBAAgB4W,IACvCC,GAAqBC,IAhBAC,QAiBN,KACjB,MAAMC,EAAIR,KAEV,GAAIQ,EAAG,CACL,MAAMhc,EAAO8b,EAAOG,KACdC,EAAqBF,EAAE7b,GAAGH,GAChCgc,EAAE7b,GAAGH,GAAQ8b,EAAOK,gBACpBH,EAAE7b,GAAGH,GAAMoc,YAAcN,EACzBE,EAAE7b,GAAGH,GAAMqc,WAAa,KACtBL,EAAE7b,GAAGH,GAAQkc,EACNJ,EAAOK,gBAElB,GA5B0B,YAAxBpX,SAASuX,YAENZ,GAA0BtL,QAC7BrL,SAASyF,iBAAiB,oBAAoB,KAC5C,IAAK,MAAMuR,KAAYL,GACrBK,GACF,IAGJL,GAA0BpK,KAAKyK,IAE/BA,GAkBA,EAEEQ,GAAU,CAACC,EAAkB9F,EAAO,GAAI+F,EAAeD,IACxB,mBAArBA,EAAkCA,KAAoB9F,GAAQ+F,EAExEC,GAAyB,CAACX,EAAUY,EAAmBC,GAAoB,KAC/E,IAAKA,EAEH,YADAL,GAAQR,GAGV,MACMc,EAhKiC5d,KACvC,IAAKA,EACH,OAAO,EAIT,IAAI,mBACF6d,EAAkB,gBAClBC,GACEzd,OAAOqF,iBAAiB1F,GAC5B,MAAM+d,EAA0BC,OAAOC,WAAWJ,GAC5CK,EAAuBF,OAAOC,WAAWH,GAG/C,OAAKC,GAA4BG,GAKjCL,EAAqBA,EAAmBlb,MAAM,KAAK,GACnDmb,EAAkBA,EAAgBnb,MAAM,KAAK,GAtDf,KAuDtBqb,OAAOC,WAAWJ,GAAsBG,OAAOC,WAAWH,KANzD,CAMoG,EA2IpFK,CAAiCT,GADlC,EAExB,IAAIU,GAAS,EACb,MAAMC,EAAU,EACdrR,aAEIA,IAAW0Q,IAGfU,GAAS,EACTV,EAAkBjS,oBAAoB6O,GAAgB+D,GACtDf,GAAQR,GAAS,EAEnBY,EAAkBnS,iBAAiB+O,GAAgB+D,GACnDC,YAAW,KACJF,GACHvD,GAAqB6C,EACvB,GACCE,EAAiB,EAYhBW,GAAuB,CAAC1R,EAAM2R,EAAeC,EAAeC,KAChE,MAAMC,EAAa9R,EAAKsE,OACxB,IAAI+H,EAAQrM,EAAKjH,QAAQ4Y,GAIzB,OAAe,IAAXtF,GACMuF,GAAiBC,EAAiB7R,EAAK8R,EAAa,GAAK9R,EAAK,IAExEqM,GAASuF,EAAgB,GAAK,EAC1BC,IACFxF,GAASA,EAAQyF,GAAcA,GAE1B9R,EAAKjK,KAAKC,IAAI,EAAGD,KAAKE,IAAIoW,EAAOyF,EAAa,KAAI,EAerDC,GAAiB,qBACjBC,GAAiB,OACjBC,GAAgB,SAChBC,GAAgB,CAAC,EACvB,IAAIC,GAAW,EACf,MAAMC,GAAe,CACnBC,WAAY,YACZC,WAAY,YAERC,GAAe,IAAIrI,IAAI,CAAC,QAAS,WAAY,UAAW,YAAa,cAAe,aAAc,iBAAkB,YAAa,WAAY,YAAa,cAAe,YAAa,UAAW,WAAY,QAAS,oBAAqB,aAAc,YAAa,WAAY,cAAe,cAAe,cAAe,YAAa,eAAgB,gBAAiB,eAAgB,gBAAiB,aAAc,QAAS,OAAQ,SAAU,QAAS,SAAU,SAAU,UAAW,WAAY,OAAQ,SAAU,eAAgB,SAAU,OAAQ,mBAAoB,mBAAoB,QAAS,QAAS,WAM/lB,SAASsI,GAAarf,EAASsf,GAC7B,OAAOA,GAAO,GAAGA,MAAQN,QAAgBhf,EAAQgf,UAAYA,IAC/D,CACA,SAASO,GAAiBvf,GACxB,MAAMsf,EAAMD,GAAarf,GAGzB,OAFAA,EAAQgf,SAAWM,EACnBP,GAAcO,GAAOP,GAAcO,IAAQ,CAAC,EACrCP,GAAcO,EACvB,CAiCA,SAASE,GAAYC,EAAQC,EAAUC,EAAqB,MAC1D,OAAOliB,OAAOmiB,OAAOH,GAAQ7M,MAAKiN,GAASA,EAAMH,WAAaA,GAAYG,EAAMF,qBAAuBA,GACzG,CACA,SAASG,GAAoBC,EAAmB1B,EAAS2B,GACvD,MAAMC,EAAiC,iBAAZ5B,EAErBqB,EAAWO,EAAcD,EAAqB3B,GAAW2B,EAC/D,IAAIE,EAAYC,GAAaJ,GAI7B,OAHKX,GAAahI,IAAI8I,KACpBA,EAAYH,GAEP,CAACE,EAAaP,EAAUQ,EACjC,CACA,SAASE,GAAWpgB,EAAS+f,EAAmB1B,EAAS2B,EAAoBK,GAC3E,GAAiC,iBAAtBN,IAAmC/f,EAC5C,OAEF,IAAKigB,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GAIzF,GAAID,KAAqBd,GAAc,CACrC,MAAMqB,EAAepf,GACZ,SAAU2e,GACf,IAAKA,EAAMU,eAAiBV,EAAMU,gBAAkBV,EAAMW,iBAAmBX,EAAMW,eAAevb,SAAS4a,EAAMU,eAC/G,OAAOrf,EAAGjD,KAAKwiB,KAAMZ,EAEzB,EAEFH,EAAWY,EAAaZ,EAC1B,CACA,MAAMD,EAASF,GAAiBvf,GAC1B0gB,EAAWjB,EAAOS,KAAeT,EAAOS,GAAa,CAAC,GACtDS,EAAmBnB,GAAYkB,EAAUhB,EAAUO,EAAc5B,EAAU,MACjF,GAAIsC,EAEF,YADAA,EAAiBN,OAASM,EAAiBN,QAAUA,GAGvD,MAAMf,EAAMD,GAAaK,EAAUK,EAAkBnU,QAAQgT,GAAgB,KACvE1d,EAAK+e,EA5Db,SAAoCjgB,EAASwa,EAAUtZ,GACrD,OAAO,SAASmd,EAAQwB,GACtB,MAAMe,EAAc5gB,EAAQ6gB,iBAAiBrG,GAC7C,IAAK,IAAI,OACPxN,GACE6S,EAAO7S,GAAUA,IAAWyT,KAAMzT,EAASA,EAAOxH,WACpD,IAAK,MAAMsb,KAAcF,EACvB,GAAIE,IAAe9T,EASnB,OANA+T,GAAWlB,EAAO,CAChBW,eAAgBxT,IAEdqR,EAAQgC,QACVW,GAAaC,IAAIjhB,EAAS6f,EAAMqB,KAAM1G,EAAUtZ,GAE3CA,EAAGigB,MAAMnU,EAAQ,CAAC6S,GAG/B,CACF,CAwC2BuB,CAA2BphB,EAASqe,EAASqB,GAvExE,SAA0B1f,EAASkB,GACjC,OAAO,SAASmd,EAAQwB,GAOtB,OANAkB,GAAWlB,EAAO,CAChBW,eAAgBxgB,IAEdqe,EAAQgC,QACVW,GAAaC,IAAIjhB,EAAS6f,EAAMqB,KAAMhgB,GAEjCA,EAAGigB,MAAMnhB,EAAS,CAAC6f,GAC5B,CACF,CA6DoFwB,CAAiBrhB,EAAS0f,GAC5Gxe,EAAGye,mBAAqBM,EAAc5B,EAAU,KAChDnd,EAAGwe,SAAWA,EACdxe,EAAGmf,OAASA,EACZnf,EAAG8d,SAAWM,EACdoB,EAASpB,GAAOpe,EAChBlB,EAAQuL,iBAAiB2U,EAAWhf,EAAI+e,EAC1C,CACA,SAASqB,GAActhB,EAASyf,EAAQS,EAAW7B,EAASsB,GAC1D,MAAMze,EAAKse,GAAYC,EAAOS,GAAY7B,EAASsB,GAC9Cze,IAGLlB,EAAQyL,oBAAoByU,EAAWhf,EAAIqgB,QAAQ5B,WAC5CF,EAAOS,GAAWhf,EAAG8d,UAC9B,CACA,SAASwC,GAAyBxhB,EAASyf,EAAQS,EAAWuB,GAC5D,MAAMC,EAAoBjC,EAAOS,IAAc,CAAC,EAChD,IAAK,MAAOyB,EAAY9B,KAAUpiB,OAAOmkB,QAAQF,GAC3CC,EAAWE,SAASJ,IACtBH,GAActhB,EAASyf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAGtE,CACA,SAASQ,GAAaN,GAGpB,OADAA,EAAQA,EAAMjU,QAAQiT,GAAgB,IAC/BI,GAAaY,IAAUA,CAChC,CACA,MAAMmB,GAAe,CACnB,EAAAc,CAAG9hB,EAAS6f,EAAOxB,EAAS2B,GAC1BI,GAAWpgB,EAAS6f,EAAOxB,EAAS2B,GAAoB,EAC1D,EACA,GAAA+B,CAAI/hB,EAAS6f,EAAOxB,EAAS2B,GAC3BI,GAAWpgB,EAAS6f,EAAOxB,EAAS2B,GAAoB,EAC1D,EACA,GAAAiB,CAAIjhB,EAAS+f,EAAmB1B,EAAS2B,GACvC,GAAiC,iBAAtBD,IAAmC/f,EAC5C,OAEF,MAAOigB,EAAaP,EAAUQ,GAAaJ,GAAoBC,EAAmB1B,EAAS2B,GACrFgC,EAAc9B,IAAcH,EAC5BN,EAASF,GAAiBvf,GAC1B0hB,EAAoBjC,EAAOS,IAAc,CAAC,EAC1C+B,EAAclC,EAAkBmC,WAAW,KACjD,QAAwB,IAAbxC,EAAX,CAQA,GAAIuC,EACF,IAAK,MAAME,KAAgB1kB,OAAO4D,KAAKoe,GACrC+B,GAAyBxhB,EAASyf,EAAQ0C,EAAcpC,EAAkBlN,MAAM,IAGpF,IAAK,MAAOuP,EAAavC,KAAUpiB,OAAOmkB,QAAQF,GAAoB,CACpE,MAAMC,EAAaS,EAAYxW,QAAQkT,GAAe,IACjDkD,IAAejC,EAAkB8B,SAASF,IAC7CL,GAActhB,EAASyf,EAAQS,EAAWL,EAAMH,SAAUG,EAAMF,mBAEpE,CAXA,KAPA,CAEE,IAAKliB,OAAO4D,KAAKqgB,GAAmBvQ,OAClC,OAEFmQ,GAActhB,EAASyf,EAAQS,EAAWR,EAAUO,EAAc5B,EAAU,KAE9E,CAYF,EACA,OAAAgE,CAAQriB,EAAS6f,EAAOpI,GACtB,GAAqB,iBAAVoI,IAAuB7f,EAChC,OAAO,KAET,MAAM+c,EAAIR,KAGV,IAAI+F,EAAc,KACdC,GAAU,EACVC,GAAiB,EACjBC,GAAmB,EAJH5C,IADFM,GAAaN,IAMZ9C,IACjBuF,EAAcvF,EAAEhC,MAAM8E,EAAOpI,GAC7BsF,EAAE/c,GAASqiB,QAAQC,GACnBC,GAAWD,EAAYI,uBACvBF,GAAkBF,EAAYK,gCAC9BF,EAAmBH,EAAYM,sBAEjC,MAAMC,EAAM9B,GAAW,IAAIhG,MAAM8E,EAAO,CACtC0C,UACAO,YAAY,IACVrL,GAUJ,OATIgL,GACFI,EAAIE,iBAEFP,GACFxiB,EAAQ8a,cAAc+H,GAEpBA,EAAIJ,kBAAoBH,GAC1BA,EAAYS,iBAEPF,CACT,GAEF,SAAS9B,GAAWljB,EAAKmlB,EAAO,CAAC,GAC/B,IAAK,MAAOzlB,EAAKa,KAAUX,OAAOmkB,QAAQoB,GACxC,IACEnlB,EAAIN,GAAOa,CACb,CAAE,MAAO6kB,GACPxlB,OAAOC,eAAeG,EAAKN,EAAK,CAC9B2lB,cAAc,EACdtlB,IAAG,IACMQ,GAGb,CAEF,OAAOP,CACT,CASA,SAASslB,GAAc/kB,GACrB,GAAc,SAAVA,EACF,OAAO,EAET,GAAc,UAAVA,EACF,OAAO,EAET,GAAIA,IAAU4f,OAAO5f,GAAOkC,WAC1B,OAAO0d,OAAO5f,GAEhB,GAAc,KAAVA,GAA0B,SAAVA,EAClB,OAAO,KAET,GAAqB,iBAAVA,EACT,OAAOA,EAET,IACE,OAAOglB,KAAKC,MAAMC,mBAAmBllB,GACvC,CAAE,MAAO6kB,GACP,OAAO7kB,CACT,CACF,CACA,SAASmlB,GAAiBhmB,GACxB,OAAOA,EAAIqO,QAAQ,UAAU4X,GAAO,IAAIA,EAAItjB,iBAC9C,CACA,MAAMujB,GAAc,CAClB,gBAAAC,CAAiB1jB,EAASzC,EAAKa,GAC7B4B,EAAQ6B,aAAa,WAAW0hB,GAAiBhmB,KAAQa,EAC3D,EACA,mBAAAulB,CAAoB3jB,EAASzC,GAC3ByC,EAAQ4B,gBAAgB,WAAW2hB,GAAiBhmB,KACtD,EACA,iBAAAqmB,CAAkB5jB,GAChB,IAAKA,EACH,MAAO,CAAC,EAEV,MAAM0B,EAAa,CAAC,EACdmiB,EAASpmB,OAAO4D,KAAKrB,EAAQ8jB,SAASld,QAAOrJ,GAAOA,EAAI2kB,WAAW,QAAU3kB,EAAI2kB,WAAW,cAClG,IAAK,MAAM3kB,KAAOsmB,EAAQ,CACxB,IAAIE,EAAUxmB,EAAIqO,QAAQ,MAAO,IACjCmY,EAAUA,EAAQC,OAAO,GAAG9jB,cAAgB6jB,EAAQlR,MAAM,EAAGkR,EAAQ5S,QACrEzP,EAAWqiB,GAAWZ,GAAcnjB,EAAQ8jB,QAAQvmB,GACtD,CACA,OAAOmE,CACT,EACAuiB,iBAAgB,CAACjkB,EAASzC,IACjB4lB,GAAcnjB,EAAQic,aAAa,WAAWsH,GAAiBhmB,QAgB1E,MAAM2mB,GAEJ,kBAAWC,GACT,MAAO,CAAC,CACV,CACA,sBAAWC,GACT,MAAO,CAAC,CACV,CACA,eAAWpH,GACT,MAAM,IAAIqH,MAAM,sEAClB,CACA,UAAAC,CAAWC,GAIT,OAHAA,EAAS9D,KAAK+D,gBAAgBD,GAC9BA,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CACA,iBAAAE,CAAkBF,GAChB,OAAOA,CACT,CACA,eAAAC,CAAgBD,EAAQvkB,GACtB,MAAM2kB,EAAa,GAAU3kB,GAAWyjB,GAAYQ,iBAAiBjkB,EAAS,UAAY,CAAC,EAE3F,MAAO,IACFygB,KAAKmE,YAAYT,WACM,iBAAfQ,EAA0BA,EAAa,CAAC,KAC/C,GAAU3kB,GAAWyjB,GAAYG,kBAAkB5jB,GAAW,CAAC,KAC7C,iBAAXukB,EAAsBA,EAAS,CAAC,EAE/C,CACA,gBAAAG,CAAiBH,EAAQM,EAAcpE,KAAKmE,YAAYR,aACtD,IAAK,MAAO7hB,EAAUuiB,KAAkBrnB,OAAOmkB,QAAQiD,GAAc,CACnE,MAAMzmB,EAAQmmB,EAAOhiB,GACfwiB,EAAY,GAAU3mB,GAAS,UAjiBrC4c,OADSA,EAkiB+C5c,GAhiBnD,GAAG4c,IAELvd,OAAOM,UAAUuC,SAASrC,KAAK+c,GAAQL,MAAM,eAAe,GAAGza,cA+hBlE,IAAK,IAAI8kB,OAAOF,GAAehhB,KAAKihB,GAClC,MAAM,IAAIE,UAAU,GAAGxE,KAAKmE,YAAY5H,KAAKkI,0BAA0B3iB,qBAA4BwiB,yBAAiCD,MAExI,CAtiBW9J,KAuiBb,EAqBF,MAAMmK,WAAsBjB,GAC1B,WAAAU,CAAY5kB,EAASukB,GACnBa,SACAplB,EAAUmb,GAAWnb,MAIrBygB,KAAK4E,SAAWrlB,EAChBygB,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/BzK,GAAKtH,IAAIiO,KAAK4E,SAAU5E,KAAKmE,YAAYW,SAAU9E,MACrD,CAGA,OAAA+E,GACE1L,GAAKM,OAAOqG,KAAK4E,SAAU5E,KAAKmE,YAAYW,UAC5CvE,GAAaC,IAAIR,KAAK4E,SAAU5E,KAAKmE,YAAYa,WACjD,IAAK,MAAMC,KAAgBjoB,OAAOkoB,oBAAoBlF,MACpDA,KAAKiF,GAAgB,IAEzB,CACA,cAAAE,CAAe9I,EAAU9c,EAAS6lB,GAAa,GAC7CpI,GAAuBX,EAAU9c,EAAS6lB,EAC5C,CACA,UAAAvB,CAAWC,GAIT,OAHAA,EAAS9D,KAAK+D,gBAAgBD,EAAQ9D,KAAK4E,UAC3Cd,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CAGA,kBAAOuB,CAAY9lB,GACjB,OAAO8Z,GAAKlc,IAAIud,GAAWnb,GAAUygB,KAAK8E,SAC5C,CACA,0BAAOQ,CAAoB/lB,EAASukB,EAAS,CAAC,GAC5C,OAAO9D,KAAKqF,YAAY9lB,IAAY,IAAIygB,KAAKzgB,EAA2B,iBAAXukB,EAAsBA,EAAS,KAC9F,CACA,kBAAWyB,GACT,MA5CY,OA6Cd,CACA,mBAAWT,GACT,MAAO,MAAM9E,KAAKzD,MACpB,CACA,oBAAWyI,GACT,MAAO,IAAIhF,KAAK8E,UAClB,CACA,gBAAOU,CAAUllB,GACf,MAAO,GAAGA,IAAO0f,KAAKgF,WACxB,EAUF,MAAMS,GAAclmB,IAClB,IAAIwa,EAAWxa,EAAQic,aAAa,kBACpC,IAAKzB,GAAyB,MAAbA,EAAkB,CACjC,IAAI2L,EAAgBnmB,EAAQic,aAAa,QAMzC,IAAKkK,IAAkBA,EAActE,SAAS,OAASsE,EAAcjE,WAAW,KAC9E,OAAO,KAILiE,EAActE,SAAS,OAASsE,EAAcjE,WAAW,OAC3DiE,EAAgB,IAAIA,EAAcxjB,MAAM,KAAK,MAE/C6X,EAAW2L,GAAmC,MAAlBA,EAAwB5L,GAAc4L,EAAcC,QAAU,IAC5F,CACA,OAAO5L,CAAQ,EAEX6L,GAAiB,CACrBzT,KAAI,CAAC4H,EAAUxa,EAAU8F,SAASC,kBACzB,GAAG3G,UAAUsB,QAAQ3C,UAAU8iB,iBAAiB5iB,KAAK+B,EAASwa,IAEvE8L,QAAO,CAAC9L,EAAUxa,EAAU8F,SAASC,kBAC5BrF,QAAQ3C,UAAU8K,cAAc5K,KAAK+B,EAASwa,GAEvD+L,SAAQ,CAACvmB,EAASwa,IACT,GAAGpb,UAAUY,EAAQumB,UAAU3f,QAAOzB,GAASA,EAAMqhB,QAAQhM,KAEtE,OAAAiM,CAAQzmB,EAASwa,GACf,MAAMiM,EAAU,GAChB,IAAIC,EAAW1mB,EAAQwF,WAAWiW,QAAQjB,GAC1C,KAAOkM,GACLD,EAAQpU,KAAKqU,GACbA,EAAWA,EAASlhB,WAAWiW,QAAQjB,GAEzC,OAAOiM,CACT,EACA,IAAAE,CAAK3mB,EAASwa,GACZ,IAAIoM,EAAW5mB,EAAQ6mB,uBACvB,KAAOD,GAAU,CACf,GAAIA,EAASJ,QAAQhM,GACnB,MAAO,CAACoM,GAEVA,EAAWA,EAASC,sBACtB,CACA,MAAO,EACT,EAEA,IAAAvhB,CAAKtF,EAASwa,GACZ,IAAIlV,EAAOtF,EAAQ8mB,mBACnB,KAAOxhB,GAAM,CACX,GAAIA,EAAKkhB,QAAQhM,GACf,MAAO,CAAClV,GAEVA,EAAOA,EAAKwhB,kBACd,CACA,MAAO,EACT,EACA,iBAAAC,CAAkB/mB,GAChB,MAAMgnB,EAAa,CAAC,IAAK,SAAU,QAAS,WAAY,SAAU,UAAW,aAAc,4BAA4BzjB,KAAIiX,GAAY,GAAGA,2BAAiC7W,KAAK,KAChL,OAAO8c,KAAK7N,KAAKoU,EAAYhnB,GAAS4G,QAAOqgB,IAAOtL,GAAWsL,IAAO7L,GAAU6L,IAClF,EACA,sBAAAC,CAAuBlnB,GACrB,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAIwa,GACK6L,GAAeC,QAAQ9L,GAAYA,EAErC,IACT,EACA,sBAAA2M,CAAuBnnB,GACrB,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAOwa,EAAW6L,GAAeC,QAAQ9L,GAAY,IACvD,EACA,+BAAA4M,CAAgCpnB,GAC9B,MAAMwa,EAAW0L,GAAYlmB,GAC7B,OAAOwa,EAAW6L,GAAezT,KAAK4H,GAAY,EACpD,GAUI6M,GAAuB,CAACC,EAAWC,EAAS,UAChD,MAAMC,EAAa,gBAAgBF,EAAU7B,YACvC1kB,EAAOumB,EAAUtK,KACvBgE,GAAac,GAAGhc,SAAU0hB,EAAY,qBAAqBzmB,OAAU,SAAU8e,GAI7E,GAHI,CAAC,IAAK,QAAQgC,SAASpB,KAAKgH,UAC9B5H,EAAMkD,iBAEJpH,GAAW8E,MACb,OAEF,MAAMzT,EAASqZ,GAAec,uBAAuB1G,OAASA,KAAKhF,QAAQ,IAAI1a,KAC9DumB,EAAUvB,oBAAoB/Y,GAGtCua,IACX,GAAE,EAiBEG,GAAc,YACdC,GAAc,QAAQD,KACtBE,GAAe,SAASF,KAQ9B,MAAMG,WAAc1C,GAElB,eAAWnI,GACT,MAfW,OAgBb,CAGA,KAAA8K,GAEE,GADmB9G,GAAaqB,QAAQ5B,KAAK4E,SAAUsC,IACxClF,iBACb,OAEFhC,KAAK4E,SAASvJ,UAAU1B,OAlBF,QAmBtB,MAAMyL,EAAapF,KAAK4E,SAASvJ,UAAU7W,SApBrB,QAqBtBwb,KAAKmF,gBAAe,IAAMnF,KAAKsH,mBAAmBtH,KAAK4E,SAAUQ,EACnE,CAGA,eAAAkC,GACEtH,KAAK4E,SAASjL,SACd4G,GAAaqB,QAAQ5B,KAAK4E,SAAUuC,IACpCnH,KAAK+E,SACP,CAGA,sBAAOtI,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAO+c,GAAM9B,oBAAoBtF,MACvC,GAAsB,iBAAX8D,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KAJb,CAKF,GACF,EAOF4G,GAAqBQ,GAAO,SAM5BjL,GAAmBiL,IAcnB,MAKMI,GAAyB,4BAO/B,MAAMC,WAAe/C,GAEnB,eAAWnI,GACT,MAfW,QAgBb,CAGA,MAAAmL,GAEE1H,KAAK4E,SAASxjB,aAAa,eAAgB4e,KAAK4E,SAASvJ,UAAUqM,OAjB3C,UAkB1B,CAGA,sBAAOjL,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAOod,GAAOnC,oBAAoBtF,MACzB,WAAX8D,GACFzZ,EAAKyZ,IAET,GACF,EAOFvD,GAAac,GAAGhc,SAjCe,2BAiCmBmiB,IAAwBpI,IACxEA,EAAMkD,iBACN,MAAMqF,EAASvI,EAAM7S,OAAOyO,QAAQwM,IACvBC,GAAOnC,oBAAoBqC,GACnCD,QAAQ,IAOfvL,GAAmBsL,IAcnB,MACMG,GAAc,YACdC,GAAmB,aAAaD,KAChCE,GAAkB,YAAYF,KAC9BG,GAAiB,WAAWH,KAC5BI,GAAoB,cAAcJ,KAClCK,GAAkB,YAAYL,KAK9BM,GAAY,CAChBC,YAAa,KACbC,aAAc,KACdC,cAAe,MAEXC,GAAgB,CACpBH,YAAa,kBACbC,aAAc,kBACdC,cAAe,mBAOjB,MAAME,WAAc9E,GAClB,WAAAU,CAAY5kB,EAASukB,GACnBa,QACA3E,KAAK4E,SAAWrlB,EACXA,GAAYgpB,GAAMC,gBAGvBxI,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKyI,QAAU,EACfzI,KAAK0I,sBAAwB5H,QAAQlhB,OAAO+oB,cAC5C3I,KAAK4I,cACP,CAGA,kBAAWlF,GACT,OAAOwE,EACT,CACA,sBAAWvE,GACT,OAAO2E,EACT,CACA,eAAW/L,GACT,MA/CW,OAgDb,CAGA,OAAAwI,GACExE,GAAaC,IAAIR,KAAK4E,SAAUgD,GAClC,CAGA,MAAAiB,CAAOzJ,GACAY,KAAK0I,sBAIN1I,KAAK8I,wBAAwB1J,KAC/BY,KAAKyI,QAAUrJ,EAAM2J,SAJrB/I,KAAKyI,QAAUrJ,EAAM4J,QAAQ,GAAGD,OAMpC,CACA,IAAAE,CAAK7J,GACCY,KAAK8I,wBAAwB1J,KAC/BY,KAAKyI,QAAUrJ,EAAM2J,QAAU/I,KAAKyI,SAEtCzI,KAAKkJ,eACLrM,GAAQmD,KAAK6E,QAAQsD,YACvB,CACA,KAAAgB,CAAM/J,GACJY,KAAKyI,QAAUrJ,EAAM4J,SAAW5J,EAAM4J,QAAQtY,OAAS,EAAI,EAAI0O,EAAM4J,QAAQ,GAAGD,QAAU/I,KAAKyI,OACjG,CACA,YAAAS,GACE,MAAME,EAAYjnB,KAAKoC,IAAIyb,KAAKyI,SAChC,GAAIW,GAnEgB,GAoElB,OAEF,MAAM9b,EAAY8b,EAAYpJ,KAAKyI,QACnCzI,KAAKyI,QAAU,EACVnb,GAGLuP,GAAQvP,EAAY,EAAI0S,KAAK6E,QAAQwD,cAAgBrI,KAAK6E,QAAQuD,aACpE,CACA,WAAAQ,GACM5I,KAAK0I,uBACPnI,GAAac,GAAGrB,KAAK4E,SAAUoD,IAAmB5I,GAASY,KAAK6I,OAAOzJ,KACvEmB,GAAac,GAAGrB,KAAK4E,SAAUqD,IAAiB7I,GAASY,KAAKiJ,KAAK7J,KACnEY,KAAK4E,SAASvJ,UAAU5E,IAlFG,mBAoF3B8J,GAAac,GAAGrB,KAAK4E,SAAUiD,IAAkBzI,GAASY,KAAK6I,OAAOzJ,KACtEmB,GAAac,GAAGrB,KAAK4E,SAAUkD,IAAiB1I,GAASY,KAAKmJ,MAAM/J,KACpEmB,GAAac,GAAGrB,KAAK4E,SAAUmD,IAAgB3I,GAASY,KAAKiJ,KAAK7J,KAEtE,CACA,uBAAA0J,CAAwB1J,GACtB,OAAOY,KAAK0I,wBA3FS,QA2FiBtJ,EAAMiK,aA5FrB,UA4FyDjK,EAAMiK,YACxF,CAGA,kBAAOb,GACL,MAAO,iBAAkBnjB,SAASC,iBAAmB7C,UAAU6mB,eAAiB,CAClF,EAeF,MAEMC,GAAc,eACdC,GAAiB,YAKjBC,GAAa,OACbC,GAAa,OACbC,GAAiB,OACjBC,GAAkB,QAClBC,GAAc,QAAQN,KACtBO,GAAa,OAAOP,KACpBQ,GAAkB,UAAUR,KAC5BS,GAAqB,aAAaT,KAClCU,GAAqB,aAAaV,KAClCW,GAAmB,YAAYX,KAC/BY,GAAwB,OAAOZ,KAAcC,KAC7CY,GAAyB,QAAQb,KAAcC,KAC/Ca,GAAsB,WACtBC,GAAsB,SAMtBC,GAAkB,UAClBC,GAAgB,iBAChBC,GAAuBF,GAAkBC,GAKzCE,GAAmB,CACvB,UAAoBd,GACpB,WAAqBD,IAEjBgB,GAAY,CAChBC,SAAU,IACVC,UAAU,EACVC,MAAO,QACPC,MAAM,EACNC,OAAO,EACPC,MAAM,GAEFC,GAAgB,CACpBN,SAAU,mBAEVC,SAAU,UACVC,MAAO,mBACPC,KAAM,mBACNC,MAAO,UACPC,KAAM,WAOR,MAAME,WAAiBzG,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKoL,UAAY,KACjBpL,KAAKqL,eAAiB,KACtBrL,KAAKsL,YAAa,EAClBtL,KAAKuL,aAAe,KACpBvL,KAAKwL,aAAe,KACpBxL,KAAKyL,mBAAqB7F,GAAeC,QArCjB,uBAqC8C7F,KAAK4E,UAC3E5E,KAAK0L,qBACD1L,KAAK6E,QAAQkG,OAASV,IACxBrK,KAAK2L,OAET,CAGA,kBAAWjI,GACT,OAAOiH,EACT,CACA,sBAAWhH,GACT,OAAOuH,EACT,CACA,eAAW3O,GACT,MAnFW,UAoFb,CAGA,IAAA1X,GACEmb,KAAK4L,OAAOnC,GACd,CACA,eAAAoC,IAIOxmB,SAASymB,QAAUnR,GAAUqF,KAAK4E,WACrC5E,KAAKnb,MAET,CACA,IAAAqhB,GACElG,KAAK4L,OAAOlC,GACd,CACA,KAAAoB,GACM9K,KAAKsL,YACPlR,GAAqB4F,KAAK4E,UAE5B5E,KAAK+L,gBACP,CACA,KAAAJ,GACE3L,KAAK+L,iBACL/L,KAAKgM,kBACLhM,KAAKoL,UAAYa,aAAY,IAAMjM,KAAK6L,mBAAmB7L,KAAK6E,QAAQ+F,SAC1E,CACA,iBAAAsB,GACOlM,KAAK6E,QAAQkG,OAGd/K,KAAKsL,WACP/K,GAAae,IAAItB,KAAK4E,SAAUkF,IAAY,IAAM9J,KAAK2L,UAGzD3L,KAAK2L,QACP,CACA,EAAAQ,CAAG1T,GACD,MAAM2T,EAAQpM,KAAKqM,YACnB,GAAI5T,EAAQ2T,EAAM1b,OAAS,GAAK+H,EAAQ,EACtC,OAEF,GAAIuH,KAAKsL,WAEP,YADA/K,GAAae,IAAItB,KAAK4E,SAAUkF,IAAY,IAAM9J,KAAKmM,GAAG1T,KAG5D,MAAM6T,EAActM,KAAKuM,cAAcvM,KAAKwM,cAC5C,GAAIF,IAAgB7T,EAClB,OAEF,MAAMtC,EAAQsC,EAAQ6T,EAAc7C,GAAaC,GACjD1J,KAAK4L,OAAOzV,EAAOiW,EAAM3T,GAC3B,CACA,OAAAsM,GACM/E,KAAKwL,cACPxL,KAAKwL,aAAazG,UAEpBJ,MAAMI,SACR,CAGA,iBAAAf,CAAkBF,GAEhB,OADAA,EAAO2I,gBAAkB3I,EAAO8G,SACzB9G,CACT,CACA,kBAAA4H,GACM1L,KAAK6E,QAAQgG,UACftK,GAAac,GAAGrB,KAAK4E,SAAUmF,IAAiB3K,GAASY,KAAK0M,SAAStN,KAE9C,UAAvBY,KAAK6E,QAAQiG,QACfvK,GAAac,GAAGrB,KAAK4E,SAAUoF,IAAoB,IAAMhK,KAAK8K,UAC9DvK,GAAac,GAAGrB,KAAK4E,SAAUqF,IAAoB,IAAMjK,KAAKkM,uBAE5DlM,KAAK6E,QAAQmG,OAASzC,GAAMC,eAC9BxI,KAAK2M,yBAET,CACA,uBAAAA,GACE,IAAK,MAAMC,KAAOhH,GAAezT,KArIX,qBAqImC6N,KAAK4E,UAC5DrE,GAAac,GAAGuL,EAAK1C,IAAkB9K,GAASA,EAAMkD,mBAExD,MAmBMuK,EAAc,CAClBzE,aAAc,IAAMpI,KAAK4L,OAAO5L,KAAK8M,kBAAkBnD,KACvDtB,cAAe,IAAMrI,KAAK4L,OAAO5L,KAAK8M,kBAAkBlD,KACxDzB,YAtBkB,KACS,UAAvBnI,KAAK6E,QAAQiG,QAYjB9K,KAAK8K,QACD9K,KAAKuL,cACPwB,aAAa/M,KAAKuL,cAEpBvL,KAAKuL,aAAe1N,YAAW,IAAMmC,KAAKkM,qBAjLjB,IAiL+DlM,KAAK6E,QAAQ+F,UAAS,GAOhH5K,KAAKwL,aAAe,IAAIjD,GAAMvI,KAAK4E,SAAUiI,EAC/C,CACA,QAAAH,CAAStN,GACP,GAAI,kBAAkB/b,KAAK+b,EAAM7S,OAAOya,SACtC,OAEF,MAAM1Z,EAAYod,GAAiBtL,EAAMtiB,KACrCwQ,IACF8R,EAAMkD,iBACNtC,KAAK4L,OAAO5L,KAAK8M,kBAAkBxf,IAEvC,CACA,aAAAif,CAAchtB,GACZ,OAAOygB,KAAKqM,YAAYlnB,QAAQ5F,EAClC,CACA,0BAAAytB,CAA2BvU,GACzB,IAAKuH,KAAKyL,mBACR,OAEF,MAAMwB,EAAkBrH,GAAeC,QAAQ0E,GAAiBvK,KAAKyL,oBACrEwB,EAAgB5R,UAAU1B,OAAO2Q,IACjC2C,EAAgB9rB,gBAAgB,gBAChC,MAAM+rB,EAAqBtH,GAAeC,QAAQ,sBAAsBpN,MAAWuH,KAAKyL,oBACpFyB,IACFA,EAAmB7R,UAAU5E,IAAI6T,IACjC4C,EAAmB9rB,aAAa,eAAgB,QAEpD,CACA,eAAA4qB,GACE,MAAMzsB,EAAUygB,KAAKqL,gBAAkBrL,KAAKwM,aAC5C,IAAKjtB,EACH,OAEF,MAAM4tB,EAAkB5P,OAAO6P,SAAS7tB,EAAQic,aAAa,oBAAqB,IAClFwE,KAAK6E,QAAQ+F,SAAWuC,GAAmBnN,KAAK6E,QAAQ4H,eAC1D,CACA,MAAAb,CAAOzV,EAAO5W,EAAU,MACtB,GAAIygB,KAAKsL,WACP,OAEF,MAAMvN,EAAgBiC,KAAKwM,aACrBa,EAASlX,IAAUsT,GACnB6D,EAAc/tB,GAAWue,GAAqBkC,KAAKqM,YAAatO,EAAesP,EAAQrN,KAAK6E,QAAQoG,MAC1G,GAAIqC,IAAgBvP,EAClB,OAEF,MAAMwP,EAAmBvN,KAAKuM,cAAce,GACtCE,EAAehI,GACZjF,GAAaqB,QAAQ5B,KAAK4E,SAAUY,EAAW,CACpD1F,cAAewN,EACfhgB,UAAW0S,KAAKyN,kBAAkBtX,GAClCuD,KAAMsG,KAAKuM,cAAcxO,GACzBoO,GAAIoB,IAIR,GADmBC,EAAa3D,IACjB7H,iBACb,OAEF,IAAKjE,IAAkBuP,EAGrB,OAEF,MAAMI,EAAY5M,QAAQd,KAAKoL,WAC/BpL,KAAK8K,QACL9K,KAAKsL,YAAa,EAClBtL,KAAKgN,2BAA2BO,GAChCvN,KAAKqL,eAAiBiC,EACtB,MAAMK,EAAuBN,EA3OR,sBADF,oBA6ObO,EAAiBP,EA3OH,qBACA,qBA2OpBC,EAAYjS,UAAU5E,IAAImX,GAC1B/R,GAAOyR,GACPvP,EAAc1C,UAAU5E,IAAIkX,GAC5BL,EAAYjS,UAAU5E,IAAIkX,GAQ1B3N,KAAKmF,gBAPoB,KACvBmI,EAAYjS,UAAU1B,OAAOgU,EAAsBC,GACnDN,EAAYjS,UAAU5E,IAAI6T,IAC1BvM,EAAc1C,UAAU1B,OAAO2Q,GAAqBsD,EAAgBD,GACpE3N,KAAKsL,YAAa,EAClBkC,EAAa1D,GAAW,GAEY/L,EAAeiC,KAAK6N,eACtDH,GACF1N,KAAK2L,OAET,CACA,WAAAkC,GACE,OAAO7N,KAAK4E,SAASvJ,UAAU7W,SAhQV,QAiQvB,CACA,UAAAgoB,GACE,OAAO5G,GAAeC,QAAQ4E,GAAsBzK,KAAK4E,SAC3D,CACA,SAAAyH,GACE,OAAOzG,GAAezT,KAAKqY,GAAexK,KAAK4E,SACjD,CACA,cAAAmH,GACM/L,KAAKoL,YACP0C,cAAc9N,KAAKoL,WACnBpL,KAAKoL,UAAY,KAErB,CACA,iBAAA0B,CAAkBxf,GAChB,OAAI2O,KACK3O,IAAcqc,GAAiBD,GAAaD,GAE9Cnc,IAAcqc,GAAiBF,GAAaC,EACrD,CACA,iBAAA+D,CAAkBtX,GAChB,OAAI8F,KACK9F,IAAUuT,GAAaC,GAAiBC,GAE1CzT,IAAUuT,GAAaE,GAAkBD,EAClD,CAGA,sBAAOlN,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAO8gB,GAAS7F,oBAAoBtF,KAAM8D,GAChD,GAAsB,iBAAXA,GAIX,GAAsB,iBAAXA,EAAqB,CAC9B,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IACP,OAREzZ,EAAK8hB,GAAGrI,EASZ,GACF,EAOFvD,GAAac,GAAGhc,SAAU+kB,GAvSE,uCAuS2C,SAAUhL,GAC/E,MAAM7S,EAASqZ,GAAec,uBAAuB1G,MACrD,IAAKzT,IAAWA,EAAO8O,UAAU7W,SAAS6lB,IACxC,OAEFjL,EAAMkD,iBACN,MAAMyL,EAAW5C,GAAS7F,oBAAoB/Y,GACxCyhB,EAAahO,KAAKxE,aAAa,oBACrC,OAAIwS,GACFD,EAAS5B,GAAG6B,QACZD,EAAS7B,qBAGyC,SAAhDlJ,GAAYQ,iBAAiBxD,KAAM,UACrC+N,EAASlpB,YACTkpB,EAAS7B,sBAGX6B,EAAS7H,YACT6H,EAAS7B,oBACX,IACA3L,GAAac,GAAGzhB,OAAQuqB,IAAuB,KAC7C,MAAM8D,EAAYrI,GAAezT,KA5TR,6BA6TzB,IAAK,MAAM4b,KAAYE,EACrB9C,GAAS7F,oBAAoByI,EAC/B,IAOF5R,GAAmBgP,IAcnB,MAEM+C,GAAc,eAEdC,GAAe,OAAOD,KACtBE,GAAgB,QAAQF,KACxBG,GAAe,OAAOH,KACtBI,GAAiB,SAASJ,KAC1BK,GAAyB,QAAQL,cACjCM,GAAoB,OACpBC,GAAsB,WACtBC,GAAwB,aAExBC,GAA6B,WAAWF,OAAwBA,KAKhEG,GAAyB,8BACzBC,GAAY,CAChBpqB,OAAQ,KACRijB,QAAQ,GAEJoH,GAAgB,CACpBrqB,OAAQ,iBACRijB,OAAQ,WAOV,MAAMqH,WAAiBrK,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKgP,kBAAmB,EACxBhP,KAAKiP,cAAgB,GACrB,MAAMC,EAAatJ,GAAezT,KAAKyc,IACvC,IAAK,MAAMO,KAAQD,EAAY,CAC7B,MAAMnV,EAAW6L,GAAea,uBAAuB0I,GACjDC,EAAgBxJ,GAAezT,KAAK4H,GAAU5T,QAAOkpB,GAAgBA,IAAiBrP,KAAK4E,WAChF,OAAb7K,GAAqBqV,EAAc1e,QACrCsP,KAAKiP,cAAcrd,KAAKud,EAE5B,CACAnP,KAAKsP,sBACAtP,KAAK6E,QAAQpgB,QAChBub,KAAKuP,0BAA0BvP,KAAKiP,cAAejP,KAAKwP,YAEtDxP,KAAK6E,QAAQ6C,QACf1H,KAAK0H,QAET,CAGA,kBAAWhE,GACT,OAAOmL,EACT,CACA,sBAAWlL,GACT,OAAOmL,EACT,CACA,eAAWvS,GACT,MA9DW,UA+Db,CAGA,MAAAmL,GACM1H,KAAKwP,WACPxP,KAAKyP,OAELzP,KAAK0P,MAET,CACA,IAAAA,GACE,GAAI1P,KAAKgP,kBAAoBhP,KAAKwP,WAChC,OAEF,IAAIG,EAAiB,GAQrB,GALI3P,KAAK6E,QAAQpgB,SACfkrB,EAAiB3P,KAAK4P,uBAhEH,wCAgE4CzpB,QAAO5G,GAAWA,IAAYygB,KAAK4E,WAAU9hB,KAAIvD,GAAWwvB,GAASzJ,oBAAoB/lB,EAAS,CAC/JmoB,QAAQ,OAGRiI,EAAejf,QAAUif,EAAe,GAAGX,iBAC7C,OAGF,GADmBzO,GAAaqB,QAAQ5B,KAAK4E,SAAUuJ,IACxCnM,iBACb,OAEF,IAAK,MAAM6N,KAAkBF,EAC3BE,EAAeJ,OAEjB,MAAMK,EAAY9P,KAAK+P,gBACvB/P,KAAK4E,SAASvJ,UAAU1B,OAAO8U,IAC/BzO,KAAK4E,SAASvJ,UAAU5E,IAAIiY,IAC5B1O,KAAK4E,SAAS7jB,MAAM+uB,GAAa,EACjC9P,KAAKuP,0BAA0BvP,KAAKiP,eAAe,GACnDjP,KAAKgP,kBAAmB,EACxB,MAQMgB,EAAa,SADUF,EAAU,GAAGrL,cAAgBqL,EAAU1d,MAAM,KAE1E4N,KAAKmF,gBATY,KACfnF,KAAKgP,kBAAmB,EACxBhP,KAAK4E,SAASvJ,UAAU1B,OAAO+U,IAC/B1O,KAAK4E,SAASvJ,UAAU5E,IAAIgY,GAAqBD,IACjDxO,KAAK4E,SAAS7jB,MAAM+uB,GAAa,GACjCvP,GAAaqB,QAAQ5B,KAAK4E,SAAUwJ,GAAc,GAItBpO,KAAK4E,UAAU,GAC7C5E,KAAK4E,SAAS7jB,MAAM+uB,GAAa,GAAG9P,KAAK4E,SAASoL,MACpD,CACA,IAAAP,GACE,GAAIzP,KAAKgP,mBAAqBhP,KAAKwP,WACjC,OAGF,GADmBjP,GAAaqB,QAAQ5B,KAAK4E,SAAUyJ,IACxCrM,iBACb,OAEF,MAAM8N,EAAY9P,KAAK+P,gBACvB/P,KAAK4E,SAAS7jB,MAAM+uB,GAAa,GAAG9P,KAAK4E,SAASthB,wBAAwBwsB,OAC1EjU,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIiY,IAC5B1O,KAAK4E,SAASvJ,UAAU1B,OAAO8U,GAAqBD,IACpD,IAAK,MAAM5M,KAAW5B,KAAKiP,cAAe,CACxC,MAAM1vB,EAAUqmB,GAAec,uBAAuB9E,GAClDriB,IAAYygB,KAAKwP,SAASjwB,IAC5BygB,KAAKuP,0BAA0B,CAAC3N,IAAU,EAE9C,CACA5B,KAAKgP,kBAAmB,EAOxBhP,KAAK4E,SAAS7jB,MAAM+uB,GAAa,GACjC9P,KAAKmF,gBAPY,KACfnF,KAAKgP,kBAAmB,EACxBhP,KAAK4E,SAASvJ,UAAU1B,OAAO+U,IAC/B1O,KAAK4E,SAASvJ,UAAU5E,IAAIgY,IAC5BlO,GAAaqB,QAAQ5B,KAAK4E,SAAU0J,GAAe,GAGvBtO,KAAK4E,UAAU,EAC/C,CACA,QAAA4K,CAASjwB,EAAUygB,KAAK4E,UACtB,OAAOrlB,EAAQ8b,UAAU7W,SAASgqB,GACpC,CAGA,iBAAAxK,CAAkBF,GAGhB,OAFAA,EAAO4D,OAAS5G,QAAQgD,EAAO4D,QAC/B5D,EAAOrf,OAASiW,GAAWoJ,EAAOrf,QAC3Bqf,CACT,CACA,aAAAiM,GACE,OAAO/P,KAAK4E,SAASvJ,UAAU7W,SA3IL,uBAChB,QACC,QA0Ib,CACA,mBAAA8qB,GACE,IAAKtP,KAAK6E,QAAQpgB,OAChB,OAEF,MAAMqhB,EAAW9F,KAAK4P,uBAAuBhB,IAC7C,IAAK,MAAMrvB,KAAWumB,EAAU,CAC9B,MAAMmK,EAAWrK,GAAec,uBAAuBnnB,GACnD0wB,GACFjQ,KAAKuP,0BAA0B,CAAChwB,GAAUygB,KAAKwP,SAASS,GAE5D,CACF,CACA,sBAAAL,CAAuB7V,GACrB,MAAM+L,EAAWF,GAAezT,KAAKwc,GAA4B3O,KAAK6E,QAAQpgB,QAE9E,OAAOmhB,GAAezT,KAAK4H,EAAUiG,KAAK6E,QAAQpgB,QAAQ0B,QAAO5G,IAAYumB,EAAS1E,SAAS7hB,IACjG,CACA,yBAAAgwB,CAA0BW,EAAcC,GACtC,GAAKD,EAAaxf,OAGlB,IAAK,MAAMnR,KAAW2wB,EACpB3wB,EAAQ8b,UAAUqM,OArKK,aAqKyByI,GAChD5wB,EAAQ6B,aAAa,gBAAiB+uB,EAE1C,CAGA,sBAAO1T,CAAgBqH,GACrB,MAAMe,EAAU,CAAC,EAIjB,MAHsB,iBAAXf,GAAuB,YAAYzgB,KAAKygB,KACjDe,EAAQ6C,QAAS,GAEZ1H,KAAKuH,MAAK,WACf,MAAMld,EAAO0kB,GAASzJ,oBAAoBtF,KAAM6E,GAChD,GAAsB,iBAAXf,EAAqB,CAC9B,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IACP,CACF,GACF,EAOFvD,GAAac,GAAGhc,SAAUkpB,GAAwBK,IAAwB,SAAUxP,IAErD,MAAzBA,EAAM7S,OAAOya,SAAmB5H,EAAMW,gBAAmD,MAAjCX,EAAMW,eAAeiH,UAC/E5H,EAAMkD,iBAER,IAAK,MAAM/iB,KAAWqmB,GAAee,gCAAgC3G,MACnE+O,GAASzJ,oBAAoB/lB,EAAS,CACpCmoB,QAAQ,IACPA,QAEP,IAMAvL,GAAmB4S,IAcnB,MAAMqB,GAAS,WAETC,GAAc,eACdC,GAAiB,YAGjBC,GAAiB,UACjBC,GAAmB,YAGnBC,GAAe,OAAOJ,KACtBK,GAAiB,SAASL,KAC1BM,GAAe,OAAON,KACtBO,GAAgB,QAAQP,KACxBQ,GAAyB,QAAQR,KAAcC,KAC/CQ,GAAyB,UAAUT,KAAcC,KACjDS,GAAuB,QAAQV,KAAcC,KAC7CU,GAAoB,OAMpBC,GAAyB,4DACzBC,GAA6B,GAAGD,MAA0BD,KAC1DG,GAAgB,iBAIhBC,GAAgBnV,KAAU,UAAY,YACtCoV,GAAmBpV,KAAU,YAAc,UAC3CqV,GAAmBrV,KAAU,aAAe,eAC5CsV,GAAsBtV,KAAU,eAAiB,aACjDuV,GAAkBvV,KAAU,aAAe,cAC3CwV,GAAiBxV,KAAU,cAAgB,aAG3CyV,GAAY,CAChBC,WAAW,EACX1jB,SAAU,kBACV2jB,QAAS,UACT5pB,OAAQ,CAAC,EAAG,GACZ6pB,aAAc,KACdvzB,UAAW,UAEPwzB,GAAgB,CACpBH,UAAW,mBACX1jB,SAAU,mBACV2jB,QAAS,SACT5pB,OAAQ,0BACR6pB,aAAc,yBACdvzB,UAAW,2BAOb,MAAMyzB,WAAiBrN,GACrB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKgS,QAAU,KACfhS,KAAKiS,QAAUjS,KAAK4E,SAAS7f,WAE7Bib,KAAKkS,MAAQtM,GAAe/gB,KAAKmb,KAAK4E,SAAUuM,IAAe,IAAMvL,GAAeM,KAAKlG,KAAK4E,SAAUuM,IAAe,IAAMvL,GAAeC,QAAQsL,GAAenR,KAAKiS,SACxKjS,KAAKmS,UAAYnS,KAAKoS,eACxB,CAGA,kBAAW1O,GACT,OAAOgO,EACT,CACA,sBAAW/N,GACT,OAAOmO,EACT,CACA,eAAWvV,GACT,OAAO6T,EACT,CAGA,MAAA1I,GACE,OAAO1H,KAAKwP,WAAaxP,KAAKyP,OAASzP,KAAK0P,MAC9C,CACA,IAAAA,GACE,GAAIxU,GAAW8E,KAAK4E,WAAa5E,KAAKwP,WACpC,OAEF,MAAM1P,EAAgB,CACpBA,cAAeE,KAAK4E,UAGtB,IADkBrE,GAAaqB,QAAQ5B,KAAK4E,SAAU+L,GAAc7Q,GACtDkC,iBAAd,CASA,GANAhC,KAAKqS,gBAMD,iBAAkBhtB,SAASC,kBAAoB0a,KAAKiS,QAAQjX,QAzExC,eA0EtB,IAAK,MAAMzb,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK4Z,UAC/CvF,GAAac,GAAG9hB,EAAS,YAAaqc,IAG1CoE,KAAK4E,SAAS0N,QACdtS,KAAK4E,SAASxjB,aAAa,iBAAiB,GAC5C4e,KAAKkS,MAAM7W,UAAU5E,IAAIua,IACzBhR,KAAK4E,SAASvJ,UAAU5E,IAAIua,IAC5BzQ,GAAaqB,QAAQ5B,KAAK4E,SAAUgM,GAAe9Q,EAhBnD,CAiBF,CACA,IAAA2P,GACE,GAAIvU,GAAW8E,KAAK4E,YAAc5E,KAAKwP,WACrC,OAEF,MAAM1P,EAAgB,CACpBA,cAAeE,KAAK4E,UAEtB5E,KAAKuS,cAAczS,EACrB,CACA,OAAAiF,GACM/E,KAAKgS,SACPhS,KAAKgS,QAAQhZ,UAEf2L,MAAMI,SACR,CACA,MAAAha,GACEiV,KAAKmS,UAAYnS,KAAKoS,gBAClBpS,KAAKgS,SACPhS,KAAKgS,QAAQjnB,QAEjB,CAGA,aAAAwnB,CAAczS,GAEZ,IADkBS,GAAaqB,QAAQ5B,KAAK4E,SAAU6L,GAAc3Q,GACtDkC,iBAAd,CAMA,GAAI,iBAAkB3c,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK4Z,UAC/CvF,GAAaC,IAAIjhB,EAAS,YAAaqc,IAGvCoE,KAAKgS,SACPhS,KAAKgS,QAAQhZ,UAEfgH,KAAKkS,MAAM7W,UAAU1B,OAAOqX,IAC5BhR,KAAK4E,SAASvJ,UAAU1B,OAAOqX,IAC/BhR,KAAK4E,SAASxjB,aAAa,gBAAiB,SAC5C4hB,GAAYE,oBAAoBlD,KAAKkS,MAAO,UAC5C3R,GAAaqB,QAAQ5B,KAAK4E,SAAU8L,GAAgB5Q,EAhBpD,CAiBF,CACA,UAAA+D,CAAWC,GAET,GAAgC,iBADhCA,EAASa,MAAMd,WAAWC,IACRxlB,YAA2B,GAAUwlB,EAAOxlB,YAAgE,mBAA3CwlB,EAAOxlB,UAAUgF,sBAElG,MAAM,IAAIkhB,UAAU,GAAG4L,GAAO3L,+GAEhC,OAAOX,CACT,CACA,aAAAuO,GACE,QAAsB,IAAX,EACT,MAAM,IAAI7N,UAAU,gEAEtB,IAAIgO,EAAmBxS,KAAK4E,SACG,WAA3B5E,KAAK6E,QAAQvmB,UACfk0B,EAAmBxS,KAAKiS,QACf,GAAUjS,KAAK6E,QAAQvmB,WAChCk0B,EAAmB9X,GAAWsF,KAAK6E,QAAQvmB,WACA,iBAA3B0hB,KAAK6E,QAAQvmB,YAC7Bk0B,EAAmBxS,KAAK6E,QAAQvmB,WAElC,MAAMuzB,EAAe7R,KAAKyS,mBAC1BzS,KAAKgS,QAAU,GAAoBQ,EAAkBxS,KAAKkS,MAAOL,EACnE,CACA,QAAArC,GACE,OAAOxP,KAAKkS,MAAM7W,UAAU7W,SAASwsB,GACvC,CACA,aAAA0B,GACE,MAAMC,EAAiB3S,KAAKiS,QAC5B,GAAIU,EAAetX,UAAU7W,SArKN,WAsKrB,OAAOgtB,GAET,GAAImB,EAAetX,UAAU7W,SAvKJ,aAwKvB,OAAOitB,GAET,GAAIkB,EAAetX,UAAU7W,SAzKA,iBA0K3B,MA5JsB,MA8JxB,GAAImuB,EAAetX,UAAU7W,SA3KE,mBA4K7B,MA9JyB,SAkK3B,MAAMouB,EAAkF,QAA1E3tB,iBAAiB+a,KAAKkS,OAAOpX,iBAAiB,iBAAiB6K,OAC7E,OAAIgN,EAAetX,UAAU7W,SArLP,UAsLbouB,EAAQvB,GAAmBD,GAE7BwB,EAAQrB,GAAsBD,EACvC,CACA,aAAAc,GACE,OAAkD,OAA3CpS,KAAK4E,SAAS5J,QAnLD,UAoLtB,CACA,UAAA6X,GACE,MAAM,OACJ7qB,GACEgY,KAAK6E,QACT,MAAsB,iBAAX7c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAAS4f,OAAO6P,SAASzvB,EAAO,MAEzC,mBAAXqK,EACF8qB,GAAc9qB,EAAO8qB,EAAY9S,KAAK4E,UAExC5c,CACT,CACA,gBAAAyqB,GACE,MAAMM,EAAwB,CAC5Br0B,UAAWshB,KAAK0S,gBAChBtc,UAAW,CAAC,CACV9V,KAAM,kBACNmB,QAAS,CACPwM,SAAU+R,KAAK6E,QAAQ5W,WAExB,CACD3N,KAAM,SACNmB,QAAS,CACPuG,OAAQgY,KAAK6S,iBAanB,OAPI7S,KAAKmS,WAAsC,WAAzBnS,KAAK6E,QAAQ+M,WACjC5O,GAAYC,iBAAiBjD,KAAKkS,MAAO,SAAU,UACnDa,EAAsB3c,UAAY,CAAC,CACjC9V,KAAM,cACNC,SAAS,KAGN,IACFwyB,KACAlW,GAAQmD,KAAK6E,QAAQgN,aAAc,CAACkB,IAE3C,CACA,eAAAC,EAAgB,IACdl2B,EAAG,OACHyP,IAEA,MAAM6f,EAAQxG,GAAezT,KAhOF,8DAgO+B6N,KAAKkS,OAAO/rB,QAAO5G,GAAWob,GAAUpb,KAC7F6sB,EAAM1b,QAMXoN,GAAqBsO,EAAO7f,EAAQzP,IAAQ0zB,IAAmBpE,EAAMhL,SAAS7U,IAAS+lB,OACzF,CAGA,sBAAO7V,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAO0nB,GAASzM,oBAAoBtF,KAAM8D,GAChD,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,CACA,iBAAOmP,CAAW7T,GAChB,GA5QuB,IA4QnBA,EAAMuI,QAAgD,UAAfvI,EAAMqB,MA/QnC,QA+QuDrB,EAAMtiB,IACzE,OAEF,MAAMo2B,EAActN,GAAezT,KAAK+e,IACxC,IAAK,MAAMxJ,KAAUwL,EAAa,CAChC,MAAMC,EAAUpB,GAAS1M,YAAYqC,GACrC,IAAKyL,IAAyC,IAA9BA,EAAQtO,QAAQ8M,UAC9B,SAEF,MAAMyB,EAAehU,EAAMgU,eACrBC,EAAeD,EAAahS,SAAS+R,EAAQjB,OACnD,GAAIkB,EAAahS,SAAS+R,EAAQvO,WAA2C,WAA9BuO,EAAQtO,QAAQ8M,YAA2B0B,GAA8C,YAA9BF,EAAQtO,QAAQ8M,WAA2B0B,EACnJ,SAIF,GAAIF,EAAQjB,MAAM1tB,SAAS4a,EAAM7S,UAA2B,UAAf6S,EAAMqB,MA/RvC,QA+R2DrB,EAAMtiB,KAAqB,qCAAqCuG,KAAK+b,EAAM7S,OAAOya,UACvJ,SAEF,MAAMlH,EAAgB,CACpBA,cAAeqT,EAAQvO,UAEN,UAAfxF,EAAMqB,OACRX,EAAciH,WAAa3H,GAE7B+T,EAAQZ,cAAczS,EACxB,CACF,CACA,4BAAOwT,CAAsBlU,GAI3B,MAAMmU,EAAU,kBAAkBlwB,KAAK+b,EAAM7S,OAAOya,SAC9CwM,EAjTW,WAiTKpU,EAAMtiB,IACtB22B,EAAkB,CAAClD,GAAgBC,IAAkBpP,SAAShC,EAAMtiB,KAC1E,IAAK22B,IAAoBD,EACvB,OAEF,GAAID,IAAYC,EACd,OAEFpU,EAAMkD,iBAGN,MAAMoR,EAAkB1T,KAAK+F,QAAQkL,IAA0BjR,KAAO4F,GAAeM,KAAKlG,KAAMiR,IAAwB,IAAMrL,GAAe/gB,KAAKmb,KAAMiR,IAAwB,IAAMrL,GAAeC,QAAQoL,GAAwB7R,EAAMW,eAAehb,YACpPwF,EAAWwnB,GAASzM,oBAAoBoO,GAC9C,GAAID,EAIF,OAHArU,EAAMuU,kBACNppB,EAASmlB,YACTnlB,EAASyoB,gBAAgB5T,GAGvB7U,EAASilB,aAEXpQ,EAAMuU,kBACNppB,EAASklB,OACTiE,EAAgBpB,QAEpB,EAOF/R,GAAac,GAAGhc,SAAUyrB,GAAwBG,GAAwBc,GAASuB,uBACnF/S,GAAac,GAAGhc,SAAUyrB,GAAwBK,GAAeY,GAASuB,uBAC1E/S,GAAac,GAAGhc,SAAUwrB,GAAwBkB,GAASkB,YAC3D1S,GAAac,GAAGhc,SAAU0rB,GAAsBgB,GAASkB,YACzD1S,GAAac,GAAGhc,SAAUwrB,GAAwBI,IAAwB,SAAU7R,GAClFA,EAAMkD,iBACNyP,GAASzM,oBAAoBtF,MAAM0H,QACrC,IAMAvL,GAAmB4V,IAcnB,MAAM6B,GAAS,WAETC,GAAoB,OACpBC,GAAkB,gBAAgBF,KAClCG,GAAY,CAChBC,UAAW,iBACXC,cAAe,KACf7O,YAAY,EACZzK,WAAW,EAEXuZ,YAAa,QAGTC,GAAgB,CACpBH,UAAW,SACXC,cAAe,kBACf7O,WAAY,UACZzK,UAAW,UACXuZ,YAAa,oBAOf,MAAME,WAAiB3Q,GACrB,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKqU,aAAc,EACnBrU,KAAK4E,SAAW,IAClB,CAGA,kBAAWlB,GACT,OAAOqQ,EACT,CACA,sBAAWpQ,GACT,OAAOwQ,EACT,CACA,eAAW5X,GACT,OAAOqX,EACT,CAGA,IAAAlE,CAAKrT,GACH,IAAK2D,KAAK6E,QAAQlK,UAEhB,YADAkC,GAAQR,GAGV2D,KAAKsU,UACL,MAAM/0B,EAAUygB,KAAKuU,cACjBvU,KAAK6E,QAAQO,YACfvJ,GAAOtc,GAETA,EAAQ8b,UAAU5E,IAAIod,IACtB7T,KAAKwU,mBAAkB,KACrB3X,GAAQR,EAAS,GAErB,CACA,IAAAoT,CAAKpT,GACE2D,KAAK6E,QAAQlK,WAIlBqF,KAAKuU,cAAclZ,UAAU1B,OAAOka,IACpC7T,KAAKwU,mBAAkB,KACrBxU,KAAK+E,UACLlI,GAAQR,EAAS,KANjBQ,GAAQR,EAQZ,CACA,OAAA0I,GACO/E,KAAKqU,cAGV9T,GAAaC,IAAIR,KAAK4E,SAAUkP,IAChC9T,KAAK4E,SAASjL,SACdqG,KAAKqU,aAAc,EACrB,CAGA,WAAAE,GACE,IAAKvU,KAAK4E,SAAU,CAClB,MAAM6P,EAAWpvB,SAASqvB,cAAc,OACxCD,EAAST,UAAYhU,KAAK6E,QAAQmP,UAC9BhU,KAAK6E,QAAQO,YACfqP,EAASpZ,UAAU5E,IArFD,QAuFpBuJ,KAAK4E,SAAW6P,CAClB,CACA,OAAOzU,KAAK4E,QACd,CACA,iBAAAZ,CAAkBF,GAGhB,OADAA,EAAOoQ,YAAcxZ,GAAWoJ,EAAOoQ,aAChCpQ,CACT,CACA,OAAAwQ,GACE,GAAItU,KAAKqU,YACP,OAEF,MAAM90B,EAAUygB,KAAKuU,cACrBvU,KAAK6E,QAAQqP,YAAYS,OAAOp1B,GAChCghB,GAAac,GAAG9hB,EAASu0B,IAAiB,KACxCjX,GAAQmD,KAAK6E,QAAQoP,cAAc,IAErCjU,KAAKqU,aAAc,CACrB,CACA,iBAAAG,CAAkBnY,GAChBW,GAAuBX,EAAU2D,KAAKuU,cAAevU,KAAK6E,QAAQO,WACpE,EAeF,MAEMwP,GAAc,gBACdC,GAAkB,UAAUD,KAC5BE,GAAoB,cAAcF,KAGlCG,GAAmB,WACnBC,GAAY,CAChBC,WAAW,EACXC,YAAa,MAGTC,GAAgB,CACpBF,UAAW,UACXC,YAAa,WAOf,MAAME,WAAkB3R,GACtB,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,GAC/B9D,KAAKqV,WAAY,EACjBrV,KAAKsV,qBAAuB,IAC9B,CAGA,kBAAW5R,GACT,OAAOsR,EACT,CACA,sBAAWrR,GACT,OAAOwR,EACT,CACA,eAAW5Y,GACT,MAtCW,WAuCb,CAGA,QAAAgZ,GACMvV,KAAKqV,YAGLrV,KAAK6E,QAAQoQ,WACfjV,KAAK6E,QAAQqQ,YAAY5C,QAE3B/R,GAAaC,IAAInb,SAAUuvB,IAC3BrU,GAAac,GAAGhc,SAAUwvB,IAAiBzV,GAASY,KAAKwV,eAAepW,KACxEmB,GAAac,GAAGhc,SAAUyvB,IAAmB1V,GAASY,KAAKyV,eAAerW,KAC1EY,KAAKqV,WAAY,EACnB,CACA,UAAAK,GACO1V,KAAKqV,YAGVrV,KAAKqV,WAAY,EACjB9U,GAAaC,IAAInb,SAAUuvB,IAC7B,CAGA,cAAAY,CAAepW,GACb,MAAM,YACJ8V,GACElV,KAAK6E,QACT,GAAIzF,EAAM7S,SAAWlH,UAAY+Z,EAAM7S,SAAW2oB,GAAeA,EAAY1wB,SAAS4a,EAAM7S,QAC1F,OAEF,MAAM1L,EAAW+kB,GAAeU,kBAAkB4O,GAC1B,IAApBr0B,EAAS6P,OACXwkB,EAAY5C,QACHtS,KAAKsV,uBAAyBP,GACvCl0B,EAASA,EAAS6P,OAAS,GAAG4hB,QAE9BzxB,EAAS,GAAGyxB,OAEhB,CACA,cAAAmD,CAAerW,GA1ED,QA2ERA,EAAMtiB,MAGVkjB,KAAKsV,qBAAuBlW,EAAMuW,SAAWZ,GA7EzB,UA8EtB,EAeF,MAAMa,GAAyB,oDACzBC,GAA0B,cAC1BC,GAAmB,gBACnBC,GAAkB,eAMxB,MAAMC,GACJ,WAAA7R,GACEnE,KAAK4E,SAAWvf,SAAS6G,IAC3B,CAGA,QAAA+pB,GAEE,MAAMC,EAAgB7wB,SAASC,gBAAgBuC,YAC/C,OAAO1F,KAAKoC,IAAI3E,OAAOu2B,WAAaD,EACtC,CACA,IAAAzG,GACE,MAAM5rB,EAAQmc,KAAKiW,WACnBjW,KAAKoW,mBAELpW,KAAKqW,sBAAsBrW,KAAK4E,SAAUkR,IAAkBQ,GAAmBA,EAAkBzyB,IAEjGmc,KAAKqW,sBAAsBT,GAAwBE,IAAkBQ,GAAmBA,EAAkBzyB,IAC1Gmc,KAAKqW,sBAAsBR,GAAyBE,IAAiBO,GAAmBA,EAAkBzyB,GAC5G,CACA,KAAAwO,GACE2N,KAAKuW,wBAAwBvW,KAAK4E,SAAU,YAC5C5E,KAAKuW,wBAAwBvW,KAAK4E,SAAUkR,IAC5C9V,KAAKuW,wBAAwBX,GAAwBE,IACrD9V,KAAKuW,wBAAwBV,GAAyBE,GACxD,CACA,aAAAS,GACE,OAAOxW,KAAKiW,WAAa,CAC3B,CAGA,gBAAAG,GACEpW,KAAKyW,sBAAsBzW,KAAK4E,SAAU,YAC1C5E,KAAK4E,SAAS7jB,MAAM+K,SAAW,QACjC,CACA,qBAAAuqB,CAAsBtc,EAAU2c,EAAera,GAC7C,MAAMsa,EAAiB3W,KAAKiW,WAS5BjW,KAAK4W,2BAA2B7c,GARHxa,IAC3B,GAAIA,IAAYygB,KAAK4E,UAAYhlB,OAAOu2B,WAAa52B,EAAQsI,YAAc8uB,EACzE,OAEF3W,KAAKyW,sBAAsBl3B,EAASm3B,GACpC,MAAMJ,EAAkB12B,OAAOqF,iBAAiB1F,GAASub,iBAAiB4b,GAC1En3B,EAAQwB,MAAM81B,YAAYH,EAAe,GAAGra,EAASkB,OAAOC,WAAW8Y,QAAsB,GAGjG,CACA,qBAAAG,CAAsBl3B,EAASm3B,GAC7B,MAAMI,EAAcv3B,EAAQwB,MAAM+Z,iBAAiB4b,GAC/CI,GACF9T,GAAYC,iBAAiB1jB,EAASm3B,EAAeI,EAEzD,CACA,uBAAAP,CAAwBxc,EAAU2c,GAWhC1W,KAAK4W,2BAA2B7c,GAVHxa,IAC3B,MAAM5B,EAAQqlB,GAAYQ,iBAAiBjkB,EAASm3B,GAEtC,OAAV/4B,GAIJqlB,GAAYE,oBAAoB3jB,EAASm3B,GACzCn3B,EAAQwB,MAAM81B,YAAYH,EAAe/4B,IAJvC4B,EAAQwB,MAAMg2B,eAAeL,EAIgB,GAGnD,CACA,0BAAAE,CAA2B7c,EAAUid,GACnC,GAAI,GAAUjd,GACZid,EAASjd,QAGX,IAAK,MAAMkd,KAAOrR,GAAezT,KAAK4H,EAAUiG,KAAK4E,UACnDoS,EAASC,EAEb,EAeF,MAEMC,GAAc,YAGdC,GAAe,OAAOD,KACtBE,GAAyB,gBAAgBF,KACzCG,GAAiB,SAASH,KAC1BI,GAAe,OAAOJ,KACtBK,GAAgB,QAAQL,KACxBM,GAAiB,SAASN,KAC1BO,GAAsB,gBAAgBP,KACtCQ,GAA0B,oBAAoBR,KAC9CS,GAA0B,kBAAkBT,KAC5CU,GAAyB,QAAQV,cACjCW,GAAkB,aAElBC,GAAoB,OACpBC,GAAoB,eAKpBC,GAAY,CAChBvD,UAAU,EACVnC,OAAO,EACPzH,UAAU,GAENoN,GAAgB,CACpBxD,SAAU,mBACVnC,MAAO,UACPzH,SAAU,WAOZ,MAAMqN,WAAcxT,GAClB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKmY,QAAUvS,GAAeC,QArBV,gBAqBmC7F,KAAK4E,UAC5D5E,KAAKoY,UAAYpY,KAAKqY,sBACtBrY,KAAKsY,WAAatY,KAAKuY,uBACvBvY,KAAKwP,UAAW,EAChBxP,KAAKgP,kBAAmB,EACxBhP,KAAKwY,WAAa,IAAIxC,GACtBhW,KAAK0L,oBACP,CAGA,kBAAWhI,GACT,OAAOsU,EACT,CACA,sBAAWrU,GACT,OAAOsU,EACT,CACA,eAAW1b,GACT,MA1DW,OA2Db,CAGA,MAAAmL,CAAO5H,GACL,OAAOE,KAAKwP,SAAWxP,KAAKyP,OAASzP,KAAK0P,KAAK5P,EACjD,CACA,IAAA4P,CAAK5P,GACCE,KAAKwP,UAAYxP,KAAKgP,kBAGRzO,GAAaqB,QAAQ5B,KAAK4E,SAAU0S,GAAc,CAClExX,kBAEYkC,mBAGdhC,KAAKwP,UAAW,EAChBxP,KAAKgP,kBAAmB,EACxBhP,KAAKwY,WAAW/I,OAChBpqB,SAAS6G,KAAKmP,UAAU5E,IAAIohB,IAC5B7X,KAAKyY,gBACLzY,KAAKoY,UAAU1I,MAAK,IAAM1P,KAAK0Y,aAAa5Y,KAC9C,CACA,IAAA2P,GACOzP,KAAKwP,WAAYxP,KAAKgP,mBAGTzO,GAAaqB,QAAQ5B,KAAK4E,SAAUuS,IACxCnV,mBAGdhC,KAAKwP,UAAW,EAChBxP,KAAKgP,kBAAmB,EACxBhP,KAAKsY,WAAW5C,aAChB1V,KAAK4E,SAASvJ,UAAU1B,OAAOme,IAC/B9X,KAAKmF,gBAAe,IAAMnF,KAAK2Y,cAAc3Y,KAAK4E,SAAU5E,KAAK6N,gBACnE,CACA,OAAA9I,GACExE,GAAaC,IAAI5gB,OAAQs3B,IACzB3W,GAAaC,IAAIR,KAAKmY,QAASjB,IAC/BlX,KAAKoY,UAAUrT,UACf/E,KAAKsY,WAAW5C,aAChB/Q,MAAMI,SACR,CACA,YAAA6T,GACE5Y,KAAKyY,eACP,CAGA,mBAAAJ,GACE,OAAO,IAAIjE,GAAS,CAClBzZ,UAAWmG,QAAQd,KAAK6E,QAAQ4P,UAEhCrP,WAAYpF,KAAK6N,eAErB,CACA,oBAAA0K,GACE,OAAO,IAAInD,GAAU,CACnBF,YAAalV,KAAK4E,UAEtB,CACA,YAAA8T,CAAa5Y,GAENza,SAAS6G,KAAK1H,SAASwb,KAAK4E,WAC/Bvf,SAAS6G,KAAKyoB,OAAO3U,KAAK4E,UAE5B5E,KAAK4E,SAAS7jB,MAAM6wB,QAAU,QAC9B5R,KAAK4E,SAASzjB,gBAAgB,eAC9B6e,KAAK4E,SAASxjB,aAAa,cAAc,GACzC4e,KAAK4E,SAASxjB,aAAa,OAAQ,UACnC4e,KAAK4E,SAASnZ,UAAY,EAC1B,MAAMotB,EAAYjT,GAAeC,QA7GT,cA6GsC7F,KAAKmY,SAC/DU,IACFA,EAAUptB,UAAY,GAExBoQ,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAIqhB,IAU5B9X,KAAKmF,gBATsB,KACrBnF,KAAK6E,QAAQyN,OACftS,KAAKsY,WAAW/C,WAElBvV,KAAKgP,kBAAmB,EACxBzO,GAAaqB,QAAQ5B,KAAK4E,SAAU2S,GAAe,CACjDzX,iBACA,GAEoCE,KAAKmY,QAASnY,KAAK6N,cAC7D,CACA,kBAAAnC,GACEnL,GAAac,GAAGrB,KAAK4E,SAAU+S,IAAyBvY,IAhJvC,WAiJXA,EAAMtiB,MAGNkjB,KAAK6E,QAAQgG,SACf7K,KAAKyP,OAGPzP,KAAK8Y,6BAA4B,IAEnCvY,GAAac,GAAGzhB,OAAQ43B,IAAgB,KAClCxX,KAAKwP,WAAaxP,KAAKgP,kBACzBhP,KAAKyY,eACP,IAEFlY,GAAac,GAAGrB,KAAK4E,SAAU8S,IAAyBtY,IAEtDmB,GAAae,IAAItB,KAAK4E,SAAU6S,IAAqBsB,IAC/C/Y,KAAK4E,WAAaxF,EAAM7S,QAAUyT,KAAK4E,WAAamU,EAAOxsB,SAGjC,WAA1ByT,KAAK6E,QAAQ4P,SAIbzU,KAAK6E,QAAQ4P,UACfzU,KAAKyP,OAJLzP,KAAK8Y,6BAKP,GACA,GAEN,CACA,UAAAH,GACE3Y,KAAK4E,SAAS7jB,MAAM6wB,QAAU,OAC9B5R,KAAK4E,SAASxjB,aAAa,eAAe,GAC1C4e,KAAK4E,SAASzjB,gBAAgB,cAC9B6e,KAAK4E,SAASzjB,gBAAgB,QAC9B6e,KAAKgP,kBAAmB,EACxBhP,KAAKoY,UAAU3I,MAAK,KAClBpqB,SAAS6G,KAAKmP,UAAU1B,OAAOke,IAC/B7X,KAAKgZ,oBACLhZ,KAAKwY,WAAWnmB,QAChBkO,GAAaqB,QAAQ5B,KAAK4E,SAAUyS,GAAe,GAEvD,CACA,WAAAxJ,GACE,OAAO7N,KAAK4E,SAASvJ,UAAU7W,SAjLT,OAkLxB,CACA,0BAAAs0B,GAEE,GADkBvY,GAAaqB,QAAQ5B,KAAK4E,SAAUwS,IACxCpV,iBACZ,OAEF,MAAMiX,EAAqBjZ,KAAK4E,SAASvX,aAAehI,SAASC,gBAAgBsC,aAC3EsxB,EAAmBlZ,KAAK4E,SAAS7jB,MAAMiL,UAEpB,WAArBktB,GAAiClZ,KAAK4E,SAASvJ,UAAU7W,SAASuzB,MAGjEkB,IACHjZ,KAAK4E,SAAS7jB,MAAMiL,UAAY,UAElCgU,KAAK4E,SAASvJ,UAAU5E,IAAIshB,IAC5B/X,KAAKmF,gBAAe,KAClBnF,KAAK4E,SAASvJ,UAAU1B,OAAOoe,IAC/B/X,KAAKmF,gBAAe,KAClBnF,KAAK4E,SAAS7jB,MAAMiL,UAAYktB,CAAgB,GAC/ClZ,KAAKmY,QAAQ,GACfnY,KAAKmY,SACRnY,KAAK4E,SAAS0N,QAChB,CAMA,aAAAmG,GACE,MAAMQ,EAAqBjZ,KAAK4E,SAASvX,aAAehI,SAASC,gBAAgBsC,aAC3E+uB,EAAiB3W,KAAKwY,WAAWvC,WACjCkD,EAAoBxC,EAAiB,EAC3C,GAAIwC,IAAsBF,EAAoB,CAC5C,MAAMn3B,EAAWma,KAAU,cAAgB,eAC3C+D,KAAK4E,SAAS7jB,MAAMe,GAAY,GAAG60B,KACrC,CACA,IAAKwC,GAAqBF,EAAoB,CAC5C,MAAMn3B,EAAWma,KAAU,eAAiB,cAC5C+D,KAAK4E,SAAS7jB,MAAMe,GAAY,GAAG60B,KACrC,CACF,CACA,iBAAAqC,GACEhZ,KAAK4E,SAAS7jB,MAAMq4B,YAAc,GAClCpZ,KAAK4E,SAAS7jB,MAAMs4B,aAAe,EACrC,CAGA,sBAAO5c,CAAgBqH,EAAQhE,GAC7B,OAAOE,KAAKuH,MAAK,WACf,MAAMld,EAAO6tB,GAAM5S,oBAAoBtF,KAAM8D,GAC7C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQhE,EAJb,CAKF,GACF,EAOFS,GAAac,GAAGhc,SAAUuyB,GA9OK,4BA8O2C,SAAUxY,GAClF,MAAM7S,EAASqZ,GAAec,uBAAuB1G,MACjD,CAAC,IAAK,QAAQoB,SAASpB,KAAKgH,UAC9B5H,EAAMkD,iBAER/B,GAAae,IAAI/U,EAAQ+qB,IAAcgC,IACjCA,EAAUtX,kBAIdzB,GAAae,IAAI/U,EAAQ8qB,IAAgB,KACnC1c,GAAUqF,OACZA,KAAKsS,OACP,GACA,IAIJ,MAAMiH,EAAc3T,GAAeC,QAnQb,eAoQlB0T,GACFrB,GAAM7S,YAAYkU,GAAa9J,OAEpByI,GAAM5S,oBAAoB/Y,GAClCmb,OAAO1H,KACd,IACA4G,GAAqBsR,IAMrB/b,GAAmB+b,IAcnB,MAEMsB,GAAc,gBACdC,GAAiB,YACjBC,GAAwB,OAAOF,KAAcC,KAE7CE,GAAoB,OACpBC,GAAuB,UACvBC,GAAoB,SAEpBC,GAAgB,kBAChBC,GAAe,OAAOP,KACtBQ,GAAgB,QAAQR,KACxBS,GAAe,OAAOT,KACtBU,GAAuB,gBAAgBV,KACvCW,GAAiB,SAASX,KAC1BY,GAAe,SAASZ,KACxBa,GAAyB,QAAQb,KAAcC,KAC/Ca,GAAwB,kBAAkBd,KAE1Ce,GAAY,CAChB9F,UAAU,EACV5J,UAAU,EACVpgB,QAAQ,GAEJ+vB,GAAgB,CACpB/F,SAAU,mBACV5J,SAAU,UACVpgB,OAAQ,WAOV,MAAMgwB,WAAkB/V,GACtB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKwP,UAAW,EAChBxP,KAAKoY,UAAYpY,KAAKqY,sBACtBrY,KAAKsY,WAAatY,KAAKuY,uBACvBvY,KAAK0L,oBACP,CAGA,kBAAWhI,GACT,OAAO6W,EACT,CACA,sBAAW5W,GACT,OAAO6W,EACT,CACA,eAAWje,GACT,MApDW,WAqDb,CAGA,MAAAmL,CAAO5H,GACL,OAAOE,KAAKwP,SAAWxP,KAAKyP,OAASzP,KAAK0P,KAAK5P,EACjD,CACA,IAAA4P,CAAK5P,GACCE,KAAKwP,UAGSjP,GAAaqB,QAAQ5B,KAAK4E,SAAUmV,GAAc,CAClEja,kBAEYkC,mBAGdhC,KAAKwP,UAAW,EAChBxP,KAAKoY,UAAU1I,OACV1P,KAAK6E,QAAQpa,SAChB,IAAIurB,IAAkBvG,OAExBzP,KAAK4E,SAASxjB,aAAa,cAAc,GACzC4e,KAAK4E,SAASxjB,aAAa,OAAQ,UACnC4e,KAAK4E,SAASvJ,UAAU5E,IAAImjB,IAW5B5Z,KAAKmF,gBAVoB,KAClBnF,KAAK6E,QAAQpa,SAAUuV,KAAK6E,QAAQ4P,UACvCzU,KAAKsY,WAAW/C,WAElBvV,KAAK4E,SAASvJ,UAAU5E,IAAIkjB,IAC5B3Z,KAAK4E,SAASvJ,UAAU1B,OAAOigB,IAC/BrZ,GAAaqB,QAAQ5B,KAAK4E,SAAUoV,GAAe,CACjDla,iBACA,GAEkCE,KAAK4E,UAAU,GACvD,CACA,IAAA6K,GACOzP,KAAKwP,WAGQjP,GAAaqB,QAAQ5B,KAAK4E,SAAUqV,IACxCjY,mBAGdhC,KAAKsY,WAAW5C,aAChB1V,KAAK4E,SAAS8V,OACd1a,KAAKwP,UAAW,EAChBxP,KAAK4E,SAASvJ,UAAU5E,IAAIojB,IAC5B7Z,KAAKoY,UAAU3I,OAUfzP,KAAKmF,gBAToB,KACvBnF,KAAK4E,SAASvJ,UAAU1B,OAAOggB,GAAmBE,IAClD7Z,KAAK4E,SAASzjB,gBAAgB,cAC9B6e,KAAK4E,SAASzjB,gBAAgB,QACzB6e,KAAK6E,QAAQpa,SAChB,IAAIurB,IAAkB3jB,QAExBkO,GAAaqB,QAAQ5B,KAAK4E,SAAUuV,GAAe,GAEfna,KAAK4E,UAAU,IACvD,CACA,OAAAG,GACE/E,KAAKoY,UAAUrT,UACf/E,KAAKsY,WAAW5C,aAChB/Q,MAAMI,SACR,CAGA,mBAAAsT,GACE,MASM1d,EAAYmG,QAAQd,KAAK6E,QAAQ4P,UACvC,OAAO,IAAIL,GAAS,CAClBJ,UA3HsB,qBA4HtBrZ,YACAyK,YAAY,EACZ8O,YAAalU,KAAK4E,SAAS7f,WAC3BkvB,cAAetZ,EAfK,KACU,WAA1BqF,KAAK6E,QAAQ4P,SAIjBzU,KAAKyP,OAHHlP,GAAaqB,QAAQ5B,KAAK4E,SAAUsV,GAG3B,EAUgC,MAE/C,CACA,oBAAA3B,GACE,OAAO,IAAInD,GAAU,CACnBF,YAAalV,KAAK4E,UAEtB,CACA,kBAAA8G,GACEnL,GAAac,GAAGrB,KAAK4E,SAAU0V,IAAuBlb,IA5IvC,WA6ITA,EAAMtiB,MAGNkjB,KAAK6E,QAAQgG,SACf7K,KAAKyP,OAGPlP,GAAaqB,QAAQ5B,KAAK4E,SAAUsV,IAAqB,GAE7D,CAGA,sBAAOzd,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAOowB,GAAUnV,oBAAoBtF,KAAM8D,GACjD,GAAsB,iBAAXA,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KAJb,CAKF,GACF,EAOFO,GAAac,GAAGhc,SAAUg1B,GA7JK,gCA6J2C,SAAUjb,GAClF,MAAM7S,EAASqZ,GAAec,uBAAuB1G,MAIrD,GAHI,CAAC,IAAK,QAAQoB,SAASpB,KAAKgH,UAC9B5H,EAAMkD,iBAEJpH,GAAW8E,MACb,OAEFO,GAAae,IAAI/U,EAAQ4tB,IAAgB,KAEnCxf,GAAUqF,OACZA,KAAKsS,OACP,IAIF,MAAMiH,EAAc3T,GAAeC,QAAQiU,IACvCP,GAAeA,IAAgBhtB,GACjCkuB,GAAUpV,YAAYkU,GAAa9J,OAExBgL,GAAUnV,oBAAoB/Y,GACtCmb,OAAO1H,KACd,IACAO,GAAac,GAAGzhB,OAAQ85B,IAAuB,KAC7C,IAAK,MAAM3f,KAAY6L,GAAezT,KAAK2nB,IACzCW,GAAUnV,oBAAoBvL,GAAU2V,MAC1C,IAEFnP,GAAac,GAAGzhB,OAAQw6B,IAAc,KACpC,IAAK,MAAM76B,KAAWqmB,GAAezT,KAAK,gDACG,UAAvClN,iBAAiB1F,GAASiC,UAC5Bi5B,GAAUnV,oBAAoB/lB,GAASkwB,MAE3C,IAEF7I,GAAqB6T,IAMrBte,GAAmBse,IAUnB,MACME,GAAmB,CAEvB,IAAK,CAAC,QAAS,MAAO,KAAM,OAAQ,OAHP,kBAI7B9pB,EAAG,CAAC,SAAU,OAAQ,QAAS,OAC/B+pB,KAAM,GACN9pB,EAAG,GACH+pB,GAAI,GACJC,IAAK,GACLC,KAAM,GACNC,IAAK,GACLC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJC,GAAI,GACJnqB,EAAG,GACHub,IAAK,CAAC,MAAO,SAAU,MAAO,QAAS,QAAS,UAChD6O,GAAI,GACJC,GAAI,GACJC,EAAG,GACHC,IAAK,GACLC,EAAG,GACHC,MAAO,GACPC,KAAM,GACNC,IAAK,GACLC,IAAK,GACLC,OAAQ,GACRC,EAAG,GACHC,GAAI,IAIAC,GAAgB,IAAI/lB,IAAI,CAAC,aAAc,OAAQ,OAAQ,WAAY,WAAY,SAAU,MAAO,eAShGgmB,GAAmB,0DACnBC,GAAmB,CAACx6B,EAAWy6B,KACnC,MAAMC,EAAgB16B,EAAUvC,SAASC,cACzC,OAAI+8B,EAAqBpb,SAASqb,IAC5BJ,GAAc1lB,IAAI8lB,IACb3b,QAAQwb,GAAiBj5B,KAAKtB,EAAU26B,YAM5CF,EAAqBr2B,QAAOw2B,GAAkBA,aAA0BpY,SAAQ9R,MAAKmqB,GAASA,EAAMv5B,KAAKo5B,IAAe,EA0C3HI,GAAY,CAChBC,UAAWnC,GACXoC,QAAS,CAAC,EAEVC,WAAY,GACZnwB,MAAM,EACNowB,UAAU,EACVC,WAAY,KACZC,SAAU,eAENC,GAAgB,CACpBN,UAAW,SACXC,QAAS,SACTC,WAAY,oBACZnwB,KAAM,UACNowB,SAAU,UACVC,WAAY,kBACZC,SAAU,UAENE,GAAqB,CACzBC,MAAO,iCACPvjB,SAAU,oBAOZ,MAAMwjB,WAAwB9Z,GAC5B,WAAAU,CAAYL,GACVa,QACA3E,KAAK6E,QAAU7E,KAAK6D,WAAWC,EACjC,CAGA,kBAAWJ,GACT,OAAOmZ,EACT,CACA,sBAAWlZ,GACT,OAAOyZ,EACT,CACA,eAAW7gB,GACT,MA3CW,iBA4Cb,CAGA,UAAAihB,GACE,OAAOxgC,OAAOmiB,OAAOa,KAAK6E,QAAQkY,SAASj6B,KAAIghB,GAAU9D,KAAKyd,yBAAyB3Z,KAAS3d,OAAO2a,QACzG,CACA,UAAA4c,GACE,OAAO1d,KAAKwd,aAAa9sB,OAAS,CACpC,CACA,aAAAitB,CAAcZ,GAMZ,OALA/c,KAAK4d,cAAcb,GACnB/c,KAAK6E,QAAQkY,QAAU,IAClB/c,KAAK6E,QAAQkY,WACbA,GAEE/c,IACT,CACA,MAAA6d,GACE,MAAMC,EAAkBz4B,SAASqvB,cAAc,OAC/CoJ,EAAgBC,UAAY/d,KAAKge,eAAehe,KAAK6E,QAAQsY,UAC7D,IAAK,MAAOpjB,EAAUkkB,KAASjhC,OAAOmkB,QAAQnB,KAAK6E,QAAQkY,SACzD/c,KAAKke,YAAYJ,EAAiBG,EAAMlkB,GAE1C,MAAMojB,EAAWW,EAAgBhY,SAAS,GACpCkX,EAAahd,KAAKyd,yBAAyBzd,KAAK6E,QAAQmY,YAI9D,OAHIA,GACFG,EAAS9hB,UAAU5E,OAAOumB,EAAW96B,MAAM,MAEtCi7B,CACT,CAGA,gBAAAlZ,CAAiBH,GACfa,MAAMV,iBAAiBH,GACvB9D,KAAK4d,cAAc9Z,EAAOiZ,QAC5B,CACA,aAAAa,CAAcO,GACZ,IAAK,MAAOpkB,EAAUgjB,KAAY//B,OAAOmkB,QAAQgd,GAC/CxZ,MAAMV,iBAAiB,CACrBlK,WACAujB,MAAOP,GACNM,GAEP,CACA,WAAAa,CAAYf,EAAUJ,EAAShjB,GAC7B,MAAMqkB,EAAkBxY,GAAeC,QAAQ9L,EAAUojB,GACpDiB,KAGLrB,EAAU/c,KAAKyd,yBAAyBV,IAKpC,GAAUA,GACZ/c,KAAKqe,sBAAsB3jB,GAAWqiB,GAAUqB,GAG9Cpe,KAAK6E,QAAQhY,KACfuxB,EAAgBL,UAAY/d,KAAKge,eAAejB,GAGlDqB,EAAgBE,YAAcvB,EAX5BqB,EAAgBzkB,SAYpB,CACA,cAAAqkB,CAAeG,GACb,OAAOne,KAAK6E,QAAQoY,SApJxB,SAAsBsB,EAAYzB,EAAW0B,GAC3C,IAAKD,EAAW7tB,OACd,OAAO6tB,EAET,GAAIC,GAAgD,mBAArBA,EAC7B,OAAOA,EAAiBD,GAE1B,MACME,GADY,IAAI7+B,OAAO8+B,WACKC,gBAAgBJ,EAAY,aACxD19B,EAAW,GAAGlC,UAAU8/B,EAAgBvyB,KAAKkU,iBAAiB,MACpE,IAAK,MAAM7gB,KAAWsB,EAAU,CAC9B,MAAM+9B,EAAcr/B,EAAQC,SAASC,cACrC,IAAKzC,OAAO4D,KAAKk8B,GAAW1b,SAASwd,GAAc,CACjDr/B,EAAQoa,SACR,QACF,CACA,MAAMklB,EAAgB,GAAGlgC,UAAUY,EAAQ0B,YACrC69B,EAAoB,GAAGngC,OAAOm+B,EAAU,MAAQ,GAAIA,EAAU8B,IAAgB,IACpF,IAAK,MAAM78B,KAAa88B,EACjBtC,GAAiBx6B,EAAW+8B,IAC/Bv/B,EAAQ4B,gBAAgBY,EAAUvC,SAGxC,CACA,OAAOi/B,EAAgBvyB,KAAK6xB,SAC9B,CA2HmCgB,CAAaZ,EAAKne,KAAK6E,QAAQiY,UAAW9c,KAAK6E,QAAQqY,YAAciB,CACtG,CACA,wBAAAV,CAAyBU,GACvB,OAAOthB,GAAQshB,EAAK,CAACne,MACvB,CACA,qBAAAqe,CAAsB9+B,EAAS6+B,GAC7B,GAAIpe,KAAK6E,QAAQhY,KAGf,OAFAuxB,EAAgBL,UAAY,QAC5BK,EAAgBzJ,OAAOp1B,GAGzB6+B,EAAgBE,YAAc/+B,EAAQ++B,WACxC,EAeF,MACMU,GAAwB,IAAI1oB,IAAI,CAAC,WAAY,YAAa,eAC1D2oB,GAAoB,OAEpBC,GAAoB,OAEpBC,GAAiB,SACjBC,GAAmB,gBACnBC,GAAgB,QAChBC,GAAgB,QAahBC,GAAgB,CACpBC,KAAM,OACNC,IAAK,MACLC,MAAOzjB,KAAU,OAAS,QAC1B0jB,OAAQ,SACRC,KAAM3jB,KAAU,QAAU,QAEtB4jB,GAAY,CAChB/C,UAAWnC,GACXmF,WAAW,EACX7xB,SAAU,kBACV8xB,WAAW,EACXC,YAAa,GACbC,MAAO,EACPjwB,mBAAoB,CAAC,MAAO,QAAS,SAAU,QAC/CnD,MAAM,EACN7E,OAAQ,CAAC,EAAG,GACZtJ,UAAW,MACXmzB,aAAc,KACdoL,UAAU,EACVC,WAAY,KACZnjB,UAAU,EACVojB,SAAU,+GACV+C,MAAO,GACPte,QAAS,eAELue,GAAgB,CACpBrD,UAAW,SACXgD,UAAW,UACX7xB,SAAU,mBACV8xB,UAAW,2BACXC,YAAa,oBACbC,MAAO,kBACPjwB,mBAAoB,QACpBnD,KAAM,UACN7E,OAAQ,0BACRtJ,UAAW,oBACXmzB,aAAc,yBACdoL,SAAU,UACVC,WAAY,kBACZnjB,SAAU,mBACVojB,SAAU,SACV+C,MAAO,4BACPte,QAAS,UAOX,MAAMwe,WAAgB1b,GACpB,WAAAP,CAAY5kB,EAASukB,GACnB,QAAsB,IAAX,EACT,MAAM,IAAIU,UAAU,+DAEtBG,MAAMplB,EAASukB,GAGf9D,KAAKqgB,YAAa,EAClBrgB,KAAKsgB,SAAW,EAChBtgB,KAAKugB,WAAa,KAClBvgB,KAAKwgB,eAAiB,CAAC,EACvBxgB,KAAKgS,QAAU,KACfhS,KAAKygB,iBAAmB,KACxBzgB,KAAK0gB,YAAc,KAGnB1gB,KAAK2gB,IAAM,KACX3gB,KAAK4gB,gBACA5gB,KAAK6E,QAAQ9K,UAChBiG,KAAK6gB,WAET,CAGA,kBAAWnd,GACT,OAAOmc,EACT,CACA,sBAAWlc,GACT,OAAOwc,EACT,CACA,eAAW5jB,GACT,MAxGW,SAyGb,CAGA,MAAAukB,GACE9gB,KAAKqgB,YAAa,CACpB,CACA,OAAAU,GACE/gB,KAAKqgB,YAAa,CACpB,CACA,aAAAW,GACEhhB,KAAKqgB,YAAcrgB,KAAKqgB,UAC1B,CACA,MAAA3Y,GACO1H,KAAKqgB,aAGVrgB,KAAKwgB,eAAeS,OAASjhB,KAAKwgB,eAAeS,MAC7CjhB,KAAKwP,WACPxP,KAAKkhB,SAGPlhB,KAAKmhB,SACP,CACA,OAAApc,GACEgI,aAAa/M,KAAKsgB,UAClB/f,GAAaC,IAAIR,KAAK4E,SAAS5J,QAAQmkB,IAAiBC,GAAkBpf,KAAKohB,mBAC3EphB,KAAK4E,SAASpJ,aAAa,2BAC7BwE,KAAK4E,SAASxjB,aAAa,QAAS4e,KAAK4E,SAASpJ,aAAa,2BAEjEwE,KAAKqhB,iBACL1c,MAAMI,SACR,CACA,IAAA2K,GACE,GAAoC,SAAhC1P,KAAK4E,SAAS7jB,MAAM6wB,QACtB,MAAM,IAAIhO,MAAM,uCAElB,IAAM5D,KAAKshB,mBAAoBthB,KAAKqgB,WAClC,OAEF,MAAM/G,EAAY/Y,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAlItD,SAoIX+b,GADa9lB,GAAeuE,KAAK4E,WACL5E,KAAK4E,SAAS9kB,cAAcwF,iBAAiBd,SAASwb,KAAK4E,UAC7F,GAAI0U,EAAUtX,mBAAqBuf,EACjC,OAIFvhB,KAAKqhB,iBACL,MAAMV,EAAM3gB,KAAKwhB,iBACjBxhB,KAAK4E,SAASxjB,aAAa,mBAAoBu/B,EAAInlB,aAAa,OAChE,MAAM,UACJukB,GACE/f,KAAK6E,QAYT,GAXK7E,KAAK4E,SAAS9kB,cAAcwF,gBAAgBd,SAASwb,KAAK2gB,OAC7DZ,EAAUpL,OAAOgM,GACjBpgB,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAhJpC,cAkJnBxF,KAAKgS,QAAUhS,KAAKqS,cAAcsO,GAClCA,EAAItlB,UAAU5E,IAAIyoB,IAMd,iBAAkB75B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK4Z,UAC/CvF,GAAac,GAAG9hB,EAAS,YAAaqc,IAU1CoE,KAAKmF,gBAPY,KACf5E,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAhKrC,WAiKQ,IAApBxF,KAAKugB,YACPvgB,KAAKkhB,SAEPlhB,KAAKugB,YAAa,CAAK,GAEKvgB,KAAK2gB,IAAK3gB,KAAK6N,cAC/C,CACA,IAAA4B,GACE,GAAKzP,KAAKwP,aAGQjP,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UA/KtD,SAgLHxD,iBAAd,CAQA,GALYhC,KAAKwhB,iBACbnmB,UAAU1B,OAAOulB,IAIjB,iBAAkB75B,SAASC,gBAC7B,IAAK,MAAM/F,IAAW,GAAGZ,UAAU0G,SAAS6G,KAAK4Z,UAC/CvF,GAAaC,IAAIjhB,EAAS,YAAaqc,IAG3CoE,KAAKwgB,eAA4B,OAAI,EACrCxgB,KAAKwgB,eAAelB,KAAiB,EACrCtf,KAAKwgB,eAAenB,KAAiB,EACrCrf,KAAKugB,WAAa,KAYlBvgB,KAAKmF,gBAVY,KACXnF,KAAKyhB,yBAGJzhB,KAAKugB,YACRvgB,KAAKqhB,iBAEPrhB,KAAK4E,SAASzjB,gBAAgB,oBAC9Bof,GAAaqB,QAAQ5B,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAzMpC,WAyM8D,GAEnDxF,KAAK2gB,IAAK3gB,KAAK6N,cA1B7C,CA2BF,CACA,MAAA9iB,GACMiV,KAAKgS,SACPhS,KAAKgS,QAAQjnB,QAEjB,CAGA,cAAAu2B,GACE,OAAOxgB,QAAQd,KAAK0hB,YACtB,CACA,cAAAF,GAIE,OAHKxhB,KAAK2gB,MACR3gB,KAAK2gB,IAAM3gB,KAAK2hB,kBAAkB3hB,KAAK0gB,aAAe1gB,KAAK4hB,2BAEtD5hB,KAAK2gB,GACd,CACA,iBAAAgB,CAAkB5E,GAChB,MAAM4D,EAAM3gB,KAAK6hB,oBAAoB9E,GAASc,SAG9C,IAAK8C,EACH,OAAO,KAETA,EAAItlB,UAAU1B,OAAOslB,GAAmBC,IAExCyB,EAAItlB,UAAU5E,IAAI,MAAMuJ,KAAKmE,YAAY5H,aACzC,MAAMulB,EAvuGKC,KACb,GACEA,GAAU5/B,KAAK6/B,MA/BH,IA+BS7/B,KAAK8/B,gBACnB58B,SAAS68B,eAAeH,IACjC,OAAOA,CAAM,EAmuGGI,CAAOniB,KAAKmE,YAAY5H,MAAM1c,WAK5C,OAJA8gC,EAAIv/B,aAAa,KAAM0gC,GACnB9hB,KAAK6N,eACP8S,EAAItlB,UAAU5E,IAAIwoB,IAEb0B,CACT,CACA,UAAAyB,CAAWrF,GACT/c,KAAK0gB,YAAc3D,EACf/c,KAAKwP,aACPxP,KAAKqhB,iBACLrhB,KAAK0P,OAET,CACA,mBAAAmS,CAAoB9E,GAYlB,OAXI/c,KAAKygB,iBACPzgB,KAAKygB,iBAAiB9C,cAAcZ,GAEpC/c,KAAKygB,iBAAmB,IAAIlD,GAAgB,IACvCvd,KAAK6E,QAGRkY,UACAC,WAAYhd,KAAKyd,yBAAyBzd,KAAK6E,QAAQmb,eAGpDhgB,KAAKygB,gBACd,CACA,sBAAAmB,GACE,MAAO,CACL,iBAA0B5hB,KAAK0hB,YAEnC,CACA,SAAAA,GACE,OAAO1hB,KAAKyd,yBAAyBzd,KAAK6E,QAAQqb,QAAUlgB,KAAK4E,SAASpJ,aAAa,yBACzF,CAGA,4BAAA6mB,CAA6BjjB,GAC3B,OAAOY,KAAKmE,YAAYmB,oBAAoBlG,EAAMW,eAAgBC,KAAKsiB,qBACzE,CACA,WAAAzU,GACE,OAAO7N,KAAK6E,QAAQib,WAAa9f,KAAK2gB,KAAO3gB,KAAK2gB,IAAItlB,UAAU7W,SAASy6B,GAC3E,CACA,QAAAzP,GACE,OAAOxP,KAAK2gB,KAAO3gB,KAAK2gB,IAAItlB,UAAU7W,SAAS06B,GACjD,CACA,aAAA7M,CAAcsO,GACZ,MAAMjiC,EAAYme,GAAQmD,KAAK6E,QAAQnmB,UAAW,CAACshB,KAAM2gB,EAAK3gB,KAAK4E,WAC7D2d,EAAahD,GAAc7gC,EAAU+lB,eAC3C,OAAO,GAAoBzE,KAAK4E,SAAU+b,EAAK3gB,KAAKyS,iBAAiB8P,GACvE,CACA,UAAA1P,GACE,MAAM,OACJ7qB,GACEgY,KAAK6E,QACT,MAAsB,iBAAX7c,EACFA,EAAO9F,MAAM,KAAKY,KAAInF,GAAS4f,OAAO6P,SAASzvB,EAAO,MAEzC,mBAAXqK,EACF8qB,GAAc9qB,EAAO8qB,EAAY9S,KAAK4E,UAExC5c,CACT,CACA,wBAAAy1B,CAAyBU,GACvB,OAAOthB,GAAQshB,EAAK,CAACne,KAAK4E,UAC5B,CACA,gBAAA6N,CAAiB8P,GACf,MAAMxP,EAAwB,CAC5Br0B,UAAW6jC,EACXnsB,UAAW,CAAC,CACV9V,KAAM,OACNmB,QAAS,CACPuO,mBAAoBgQ,KAAK6E,QAAQ7U,qBAElC,CACD1P,KAAM,SACNmB,QAAS,CACPuG,OAAQgY,KAAK6S,eAEd,CACDvyB,KAAM,kBACNmB,QAAS,CACPwM,SAAU+R,KAAK6E,QAAQ5W,WAExB,CACD3N,KAAM,QACNmB,QAAS,CACPlC,QAAS,IAAIygB,KAAKmE,YAAY5H,eAE/B,CACDjc,KAAM,kBACNC,SAAS,EACTC,MAAO,aACPC,GAAI4J,IAGF2V,KAAKwhB,iBAAiBpgC,aAAa,wBAAyBiJ,EAAK1J,MAAMjC,UAAU,KAIvF,MAAO,IACFq0B,KACAlW,GAAQmD,KAAK6E,QAAQgN,aAAc,CAACkB,IAE3C,CACA,aAAA6N,GACE,MAAM4B,EAAWxiB,KAAK6E,QAAQjD,QAAQ1f,MAAM,KAC5C,IAAK,MAAM0f,KAAW4gB,EACpB,GAAgB,UAAZ5gB,EACFrB,GAAac,GAAGrB,KAAK4E,SAAU5E,KAAKmE,YAAYqB,UAjVlC,SAiV4DxF,KAAK6E,QAAQ9K,UAAUqF,IAC/EY,KAAKqiB,6BAA6BjjB,GAC1CsI,QAAQ,SAEb,GA3VU,WA2VN9F,EAA4B,CACrC,MAAM6gB,EAAU7gB,IAAYyd,GAAgBrf,KAAKmE,YAAYqB,UAnV5C,cAmV0ExF,KAAKmE,YAAYqB,UArV5F,WAsVVkd,EAAW9gB,IAAYyd,GAAgBrf,KAAKmE,YAAYqB,UAnV7C,cAmV2ExF,KAAKmE,YAAYqB,UArV5F,YAsVjBjF,GAAac,GAAGrB,KAAK4E,SAAU6d,EAASziB,KAAK6E,QAAQ9K,UAAUqF,IAC7D,MAAM+T,EAAUnT,KAAKqiB,6BAA6BjjB,GAClD+T,EAAQqN,eAA8B,YAAfphB,EAAMqB,KAAqB6e,GAAgBD,KAAiB,EACnFlM,EAAQgO,QAAQ,IAElB5gB,GAAac,GAAGrB,KAAK4E,SAAU8d,EAAU1iB,KAAK6E,QAAQ9K,UAAUqF,IAC9D,MAAM+T,EAAUnT,KAAKqiB,6BAA6BjjB,GAClD+T,EAAQqN,eAA8B,aAAfphB,EAAMqB,KAAsB6e,GAAgBD,IAAiBlM,EAAQvO,SAASpgB,SAAS4a,EAAMU,eACpHqT,EAAQ+N,QAAQ,GAEpB,CAEFlhB,KAAKohB,kBAAoB,KACnBphB,KAAK4E,UACP5E,KAAKyP,MACP,EAEFlP,GAAac,GAAGrB,KAAK4E,SAAS5J,QAAQmkB,IAAiBC,GAAkBpf,KAAKohB,kBAChF,CACA,SAAAP,GACE,MAAMX,EAAQlgB,KAAK4E,SAASpJ,aAAa,SACpC0kB,IAGAlgB,KAAK4E,SAASpJ,aAAa,eAAkBwE,KAAK4E,SAAS0Z,YAAY3Y,QAC1E3F,KAAK4E,SAASxjB,aAAa,aAAc8+B,GAE3ClgB,KAAK4E,SAASxjB,aAAa,yBAA0B8+B,GACrDlgB,KAAK4E,SAASzjB,gBAAgB,SAChC,CACA,MAAAggC,GACMnhB,KAAKwP,YAAcxP,KAAKugB,WAC1BvgB,KAAKugB,YAAa,GAGpBvgB,KAAKugB,YAAa,EAClBvgB,KAAK2iB,aAAY,KACX3iB,KAAKugB,YACPvgB,KAAK0P,MACP,GACC1P,KAAK6E,QAAQob,MAAMvQ,MACxB,CACA,MAAAwR,GACMlhB,KAAKyhB,yBAGTzhB,KAAKugB,YAAa,EAClBvgB,KAAK2iB,aAAY,KACV3iB,KAAKugB,YACRvgB,KAAKyP,MACP,GACCzP,KAAK6E,QAAQob,MAAMxQ,MACxB,CACA,WAAAkT,CAAY/kB,EAASglB,GACnB7V,aAAa/M,KAAKsgB,UAClBtgB,KAAKsgB,SAAWziB,WAAWD,EAASglB,EACtC,CACA,oBAAAnB,GACE,OAAOzkC,OAAOmiB,OAAOa,KAAKwgB,gBAAgBpf,UAAS,EACrD,CACA,UAAAyC,CAAWC,GACT,MAAM+e,EAAiB7f,GAAYG,kBAAkBnD,KAAK4E,UAC1D,IAAK,MAAMke,KAAiB9lC,OAAO4D,KAAKiiC,GAClC7D,GAAsBroB,IAAImsB,WACrBD,EAAeC,GAU1B,OAPAhf,EAAS,IACJ+e,KACmB,iBAAX/e,GAAuBA,EAASA,EAAS,CAAC,GAEvDA,EAAS9D,KAAK+D,gBAAgBD,GAC9BA,EAAS9D,KAAKgE,kBAAkBF,GAChC9D,KAAKiE,iBAAiBH,GACfA,CACT,CACA,iBAAAE,CAAkBF,GAchB,OAbAA,EAAOic,WAAiC,IAArBjc,EAAOic,UAAsB16B,SAAS6G,KAAOwO,GAAWoJ,EAAOic,WACtD,iBAAjBjc,EAAOmc,QAChBnc,EAAOmc,MAAQ,CACbvQ,KAAM5L,EAAOmc,MACbxQ,KAAM3L,EAAOmc,QAGW,iBAAjBnc,EAAOoc,QAChBpc,EAAOoc,MAAQpc,EAAOoc,MAAMrgC,YAEA,iBAAnBikB,EAAOiZ,UAChBjZ,EAAOiZ,QAAUjZ,EAAOiZ,QAAQl9B,YAE3BikB,CACT,CACA,kBAAAwe,GACE,MAAMxe,EAAS,CAAC,EAChB,IAAK,MAAOhnB,EAAKa,KAAUX,OAAOmkB,QAAQnB,KAAK6E,SACzC7E,KAAKmE,YAAYT,QAAQ5mB,KAASa,IACpCmmB,EAAOhnB,GAAOa,GASlB,OANAmmB,EAAO/J,UAAW,EAClB+J,EAAOlC,QAAU,SAKVkC,CACT,CACA,cAAAud,GACMrhB,KAAKgS,UACPhS,KAAKgS,QAAQhZ,UACbgH,KAAKgS,QAAU,MAEbhS,KAAK2gB,MACP3gB,KAAK2gB,IAAIhnB,SACTqG,KAAK2gB,IAAM,KAEf,CAGA,sBAAOlkB,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAO+1B,GAAQ9a,oBAAoBtF,KAAM8D,GAC/C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOF3H,GAAmBikB,IAcnB,MAGM2C,GAAY,IACb3C,GAAQ1c,QACXqZ,QAAS,GACT/0B,OAAQ,CAAC,EAAG,GACZtJ,UAAW,QACXy+B,SAAU,8IACVvb,QAAS,SAELohB,GAAgB,IACjB5C,GAAQzc,YACXoZ,QAAS,kCAOX,MAAMkG,WAAgB7C,GAEpB,kBAAW1c,GACT,OAAOqf,EACT,CACA,sBAAWpf,GACT,OAAOqf,EACT,CACA,eAAWzmB,GACT,MA7BW,SA8Bb,CAGA,cAAA+kB,GACE,OAAOthB,KAAK0hB,aAAe1hB,KAAKkjB,aAClC,CAGA,sBAAAtB,GACE,MAAO,CACL,kBAAkB5hB,KAAK0hB,YACvB,gBAAoB1hB,KAAKkjB,cAE7B,CACA,WAAAA,GACE,OAAOljB,KAAKyd,yBAAyBzd,KAAK6E,QAAQkY,QACpD,CAGA,sBAAOtgB,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAO44B,GAAQ3d,oBAAoBtF,KAAM8D,GAC/C,GAAsB,iBAAXA,EAAX,CAGA,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOF3H,GAAmB8mB,IAcnB,MAEME,GAAc,gBAEdC,GAAiB,WAAWD,KAC5BE,GAAc,QAAQF,KACtBG,GAAwB,OAAOH,cAE/BI,GAAsB,SAEtBC,GAAwB,SAExBC,GAAqB,YAGrBC,GAAsB,GAAGD,mBAA+CA,uBAGxEE,GAAY,CAChB37B,OAAQ,KAER47B,WAAY,eACZC,cAAc,EACdt3B,OAAQ,KACRu3B,UAAW,CAAC,GAAK,GAAK,IAElBC,GAAgB,CACpB/7B,OAAQ,gBAER47B,WAAY,SACZC,aAAc,UACdt3B,OAAQ,UACRu3B,UAAW,SAOb,MAAME,WAAkBtf,GACtB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GAGf9D,KAAKikB,aAAe,IAAI/yB,IACxB8O,KAAKkkB,oBAAsB,IAAIhzB,IAC/B8O,KAAKmkB,aAA6D,YAA9Cl/B,iBAAiB+a,KAAK4E,UAAU5Y,UAA0B,KAAOgU,KAAK4E,SAC1F5E,KAAKokB,cAAgB,KACrBpkB,KAAKqkB,UAAY,KACjBrkB,KAAKskB,oBAAsB,CACzBC,gBAAiB,EACjBC,gBAAiB,GAEnBxkB,KAAKykB,SACP,CAGA,kBAAW/gB,GACT,OAAOigB,EACT,CACA,sBAAWhgB,GACT,OAAOogB,EACT,CACA,eAAWxnB,GACT,MAhEW,WAiEb,CAGA,OAAAkoB,GACEzkB,KAAK0kB,mCACL1kB,KAAK2kB,2BACD3kB,KAAKqkB,UACPrkB,KAAKqkB,UAAUO,aAEf5kB,KAAKqkB,UAAYrkB,KAAK6kB,kBAExB,IAAK,MAAMC,KAAW9kB,KAAKkkB,oBAAoB/kB,SAC7Ca,KAAKqkB,UAAUU,QAAQD,EAE3B,CACA,OAAA/f,GACE/E,KAAKqkB,UAAUO,aACfjgB,MAAMI,SACR,CAGA,iBAAAf,CAAkBF,GAShB,OAPAA,EAAOvX,OAASmO,GAAWoJ,EAAOvX,SAAWlH,SAAS6G,KAGtD4X,EAAO8f,WAAa9f,EAAO9b,OAAS,GAAG8b,EAAO9b,oBAAsB8b,EAAO8f,WAC3C,iBAArB9f,EAAOggB,YAChBhgB,EAAOggB,UAAYhgB,EAAOggB,UAAU5hC,MAAM,KAAKY,KAAInF,GAAS4f,OAAOC,WAAW7f,MAEzEmmB,CACT,CACA,wBAAA6gB,GACO3kB,KAAK6E,QAAQgf,eAKlBtjB,GAAaC,IAAIR,KAAK6E,QAAQtY,OAAQ82B,IACtC9iB,GAAac,GAAGrB,KAAK6E,QAAQtY,OAAQ82B,GAAaG,IAAuBpkB,IACvE,MAAM4lB,EAAoBhlB,KAAKkkB,oBAAoB/mC,IAAIiiB,EAAM7S,OAAOtB,MACpE,GAAI+5B,EAAmB,CACrB5lB,EAAMkD,iBACN,MAAM3G,EAAOqE,KAAKmkB,cAAgBvkC,OAC5BmE,EAASihC,EAAkB3gC,UAAY2b,KAAK4E,SAASvgB,UAC3D,GAAIsX,EAAKspB,SAKP,YAJAtpB,EAAKspB,SAAS,CACZtjC,IAAKoC,EACLmhC,SAAU,WAMdvpB,EAAKlQ,UAAY1H,CACnB,KAEJ,CACA,eAAA8gC,GACE,MAAMpjC,EAAU,CACdka,KAAMqE,KAAKmkB,aACXL,UAAW9jB,KAAK6E,QAAQif,UACxBF,WAAY5jB,KAAK6E,QAAQ+e,YAE3B,OAAO,IAAIuB,sBAAqBhkB,GAAWnB,KAAKolB,kBAAkBjkB,IAAU1f,EAC9E,CAGA,iBAAA2jC,CAAkBjkB,GAChB,MAAMkkB,EAAgB/H,GAAStd,KAAKikB,aAAa9mC,IAAI,IAAImgC,EAAM/wB,OAAO4N,MAChEob,EAAW+H,IACftd,KAAKskB,oBAAoBC,gBAAkBjH,EAAM/wB,OAAOlI,UACxD2b,KAAKslB,SAASD,EAAc/H,GAAO,EAE/BkH,GAAmBxkB,KAAKmkB,cAAgB9+B,SAASC,iBAAiBmG,UAClE85B,EAAkBf,GAAmBxkB,KAAKskB,oBAAoBE,gBACpExkB,KAAKskB,oBAAoBE,gBAAkBA,EAC3C,IAAK,MAAMlH,KAASnc,EAAS,CAC3B,IAAKmc,EAAMkI,eAAgB,CACzBxlB,KAAKokB,cAAgB,KACrBpkB,KAAKylB,kBAAkBJ,EAAc/H,IACrC,QACF,CACA,MAAMoI,EAA2BpI,EAAM/wB,OAAOlI,WAAa2b,KAAKskB,oBAAoBC,gBAEpF,GAAIgB,GAAmBG,GAGrB,GAFAnQ,EAAS+H,IAEJkH,EACH,YAMCe,GAAoBG,GACvBnQ,EAAS+H,EAEb,CACF,CACA,gCAAAoH,GACE1kB,KAAKikB,aAAe,IAAI/yB,IACxB8O,KAAKkkB,oBAAsB,IAAIhzB,IAC/B,MAAMy0B,EAAc/f,GAAezT,KAAKqxB,GAAuBxjB,KAAK6E,QAAQtY,QAC5E,IAAK,MAAMq5B,KAAUD,EAAa,CAEhC,IAAKC,EAAO36B,MAAQiQ,GAAW0qB,GAC7B,SAEF,MAAMZ,EAAoBpf,GAAeC,QAAQggB,UAAUD,EAAO36B,MAAO+U,KAAK4E,UAG1EjK,GAAUqqB,KACZhlB,KAAKikB,aAAalyB,IAAI8zB,UAAUD,EAAO36B,MAAO26B,GAC9C5lB,KAAKkkB,oBAAoBnyB,IAAI6zB,EAAO36B,KAAM+5B,GAE9C,CACF,CACA,QAAAM,CAAS/4B,GACHyT,KAAKokB,gBAAkB73B,IAG3ByT,KAAKylB,kBAAkBzlB,KAAK6E,QAAQtY,QACpCyT,KAAKokB,cAAgB73B,EACrBA,EAAO8O,UAAU5E,IAAI8sB,IACrBvjB,KAAK8lB,iBAAiBv5B,GACtBgU,GAAaqB,QAAQ5B,KAAK4E,SAAUwe,GAAgB,CAClDtjB,cAAevT,IAEnB,CACA,gBAAAu5B,CAAiBv5B,GAEf,GAAIA,EAAO8O,UAAU7W,SA9LQ,iBA+L3BohB,GAAeC,QArLc,mBAqLsBtZ,EAAOyO,QAtLtC,cAsLkEK,UAAU5E,IAAI8sB,SAGtG,IAAK,MAAMwC,KAAangB,GAAeI,QAAQzZ,EA9LnB,qBAiM1B,IAAK,MAAMxJ,KAAQ6iB,GAAeM,KAAK6f,EAAWrC,IAChD3gC,EAAKsY,UAAU5E,IAAI8sB,GAGzB,CACA,iBAAAkC,CAAkBhhC,GAChBA,EAAO4W,UAAU1B,OAAO4pB,IACxB,MAAMyC,EAAcpgB,GAAezT,KAAK,GAAGqxB,MAAyBD,KAAuB9+B,GAC3F,IAAK,MAAM9E,KAAQqmC,EACjBrmC,EAAK0b,UAAU1B,OAAO4pB,GAE1B,CAGA,sBAAO9mB,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAO25B,GAAU1e,oBAAoBtF,KAAM8D,GACjD,GAAsB,iBAAXA,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOFvD,GAAac,GAAGzhB,OAAQ0jC,IAAuB,KAC7C,IAAK,MAAM2C,KAAOrgB,GAAezT,KApOT,0BAqOtB6xB,GAAU1e,oBAAoB2gB,EAChC,IAOF9pB,GAAmB6nB,IAcnB,MAEMkC,GAAc,UACdC,GAAe,OAAOD,KACtBE,GAAiB,SAASF,KAC1BG,GAAe,OAAOH,KACtBI,GAAgB,QAAQJ,KACxBK,GAAuB,QAAQL,KAC/BM,GAAgB,UAAUN,KAC1BO,GAAsB,OAAOP,KAC7BQ,GAAiB,YACjBC,GAAkB,aAClBC,GAAe,UACfC,GAAiB,YACjBC,GAAW,OACXC,GAAU,MACVC,GAAoB,SACpBC,GAAoB,OACpBC,GAAoB,OAEpBC,GAA2B,mBAE3BC,GAA+B,QAAQD,MAIvCE,GAAuB,2EACvBC,GAAsB,YAFOF,uBAAiDA,mBAA6CA,OAE/EC,KAC5CE,GAA8B,IAAIP,8BAA6CA,+BAA8CA,4BAMnI,MAAMQ,WAAY9iB,GAChB,WAAAP,CAAY5kB,GACVolB,MAAMplB,GACNygB,KAAKiS,QAAUjS,KAAK4E,SAAS5J,QAdN,uCAelBgF,KAAKiS,UAOVjS,KAAKynB,sBAAsBznB,KAAKiS,QAASjS,KAAK0nB,gBAC9CnnB,GAAac,GAAGrB,KAAK4E,SAAU4hB,IAAepnB,GAASY,KAAK0M,SAAStN,KACvE,CAGA,eAAW7C,GACT,MAnDW,KAoDb,CAGA,IAAAmT,GAEE,MAAMiY,EAAY3nB,KAAK4E,SACvB,GAAI5E,KAAK4nB,cAAcD,GACrB,OAIF,MAAME,EAAS7nB,KAAK8nB,iBACdC,EAAYF,EAAStnB,GAAaqB,QAAQimB,EAAQ1B,GAAc,CACpErmB,cAAe6nB,IACZ,KACapnB,GAAaqB,QAAQ+lB,EAAWtB,GAAc,CAC9DvmB,cAAe+nB,IAEH7lB,kBAAoB+lB,GAAaA,EAAU/lB,mBAGzDhC,KAAKgoB,YAAYH,EAAQF,GACzB3nB,KAAKioB,UAAUN,EAAWE,GAC5B,CAGA,SAAAI,CAAU1oC,EAAS2oC,GACZ3oC,IAGLA,EAAQ8b,UAAU5E,IAAIuwB,IACtBhnB,KAAKioB,UAAUriB,GAAec,uBAAuBnnB,IAcrDygB,KAAKmF,gBAZY,KACsB,QAAjC5lB,EAAQic,aAAa,SAIzBjc,EAAQ4B,gBAAgB,YACxB5B,EAAQ6B,aAAa,iBAAiB,GACtC4e,KAAKmoB,gBAAgB5oC,GAAS,GAC9BghB,GAAaqB,QAAQriB,EAAS+mC,GAAe,CAC3CxmB,cAAeooB,KAPf3oC,EAAQ8b,UAAU5E,IAAIywB,GAQtB,GAE0B3nC,EAASA,EAAQ8b,UAAU7W,SAASyiC,KACpE,CACA,WAAAe,CAAYzoC,EAAS2oC,GACd3oC,IAGLA,EAAQ8b,UAAU1B,OAAOqtB,IACzBznC,EAAQm7B,OACR1a,KAAKgoB,YAAYpiB,GAAec,uBAAuBnnB,IAcvDygB,KAAKmF,gBAZY,KACsB,QAAjC5lB,EAAQic,aAAa,SAIzBjc,EAAQ6B,aAAa,iBAAiB,GACtC7B,EAAQ6B,aAAa,WAAY,MACjC4e,KAAKmoB,gBAAgB5oC,GAAS,GAC9BghB,GAAaqB,QAAQriB,EAAS6mC,GAAgB,CAC5CtmB,cAAeooB,KAPf3oC,EAAQ8b,UAAU1B,OAAOutB,GAQzB,GAE0B3nC,EAASA,EAAQ8b,UAAU7W,SAASyiC,KACpE,CACA,QAAAva,CAAStN,GACP,IAAK,CAACsnB,GAAgBC,GAAiBC,GAAcC,GAAgBC,GAAUC,IAAS3lB,SAAShC,EAAMtiB,KACrG,OAEFsiB,EAAMuU,kBACNvU,EAAMkD,iBACN,MAAMwD,EAAW9F,KAAK0nB,eAAevhC,QAAO5G,IAAY2b,GAAW3b,KACnE,IAAI6oC,EACJ,GAAI,CAACtB,GAAUC,IAAS3lB,SAAShC,EAAMtiB,KACrCsrC,EAAoBtiB,EAAS1G,EAAMtiB,MAAQgqC,GAAW,EAAIhhB,EAASpV,OAAS,OACvE,CACL,MAAM2c,EAAS,CAACsZ,GAAiBE,IAAgBzlB,SAAShC,EAAMtiB,KAChEsrC,EAAoBtqB,GAAqBgI,EAAU1G,EAAM7S,OAAQ8gB,GAAQ,EAC3E,CACI+a,IACFA,EAAkB9V,MAAM,CACtB+V,eAAe,IAEjBb,GAAIliB,oBAAoB8iB,GAAmB1Y,OAE/C,CACA,YAAAgY,GAEE,OAAO9hB,GAAezT,KAAKm1B,GAAqBtnB,KAAKiS,QACvD,CACA,cAAA6V,GACE,OAAO9nB,KAAK0nB,eAAev1B,MAAKzN,GAASsb,KAAK4nB,cAAcljC,MAAW,IACzE,CACA,qBAAA+iC,CAAsBhjC,EAAQqhB,GAC5B9F,KAAKsoB,yBAAyB7jC,EAAQ,OAAQ,WAC9C,IAAK,MAAMC,KAASohB,EAClB9F,KAAKuoB,6BAA6B7jC,EAEtC,CACA,4BAAA6jC,CAA6B7jC,GAC3BA,EAAQsb,KAAKwoB,iBAAiB9jC,GAC9B,MAAM+jC,EAAWzoB,KAAK4nB,cAAcljC,GAC9BgkC,EAAY1oB,KAAK2oB,iBAAiBjkC,GACxCA,EAAMtD,aAAa,gBAAiBqnC,GAChCC,IAAchkC,GAChBsb,KAAKsoB,yBAAyBI,EAAW,OAAQ,gBAE9CD,GACH/jC,EAAMtD,aAAa,WAAY,MAEjC4e,KAAKsoB,yBAAyB5jC,EAAO,OAAQ,OAG7Csb,KAAK4oB,mCAAmClkC,EAC1C,CACA,kCAAAkkC,CAAmClkC,GACjC,MAAM6H,EAASqZ,GAAec,uBAAuBhiB,GAChD6H,IAGLyT,KAAKsoB,yBAAyB/7B,EAAQ,OAAQ,YAC1C7H,EAAMyV,IACR6F,KAAKsoB,yBAAyB/7B,EAAQ,kBAAmB,GAAG7H,EAAMyV,MAEtE,CACA,eAAAguB,CAAgB5oC,EAASspC,GACvB,MAAMH,EAAY1oB,KAAK2oB,iBAAiBppC,GACxC,IAAKmpC,EAAUrtB,UAAU7W,SApKN,YAqKjB,OAEF,MAAMkjB,EAAS,CAAC3N,EAAUia,KACxB,MAAMz0B,EAAUqmB,GAAeC,QAAQ9L,EAAU2uB,GAC7CnpC,GACFA,EAAQ8b,UAAUqM,OAAOsM,EAAW6U,EACtC,EAEFnhB,EAAOyf,GAA0BH,IACjCtf,EA5K2B,iBA4KIwf,IAC/BwB,EAAUtnC,aAAa,gBAAiBynC,EAC1C,CACA,wBAAAP,CAAyB/oC,EAASwC,EAAWpE,GACtC4B,EAAQgc,aAAaxZ,IACxBxC,EAAQ6B,aAAaW,EAAWpE,EAEpC,CACA,aAAAiqC,CAAczY,GACZ,OAAOA,EAAK9T,UAAU7W,SAASwiC,GACjC,CAGA,gBAAAwB,CAAiBrZ,GACf,OAAOA,EAAKpJ,QAAQuhB,IAAuBnY,EAAOvJ,GAAeC,QAAQyhB,GAAqBnY,EAChG,CAGA,gBAAAwZ,CAAiBxZ,GACf,OAAOA,EAAKnU,QA5LO,gCA4LoBmU,CACzC,CAGA,sBAAO1S,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAOm9B,GAAIliB,oBAAoBtF,MACrC,GAAsB,iBAAX8D,EAAX,CAGA,QAAqB/K,IAAjB1O,EAAKyZ,IAAyBA,EAAOrC,WAAW,MAAmB,gBAAXqC,EAC1D,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,IAJL,CAKF,GACF,EAOFvD,GAAac,GAAGhc,SAAUkhC,GAAsBc,IAAsB,SAAUjoB,GAC1E,CAAC,IAAK,QAAQgC,SAASpB,KAAKgH,UAC9B5H,EAAMkD,iBAEJpH,GAAW8E,OAGfwnB,GAAIliB,oBAAoBtF,MAAM0P,MAChC,IAKAnP,GAAac,GAAGzhB,OAAQ6mC,IAAqB,KAC3C,IAAK,MAAMlnC,KAAWqmB,GAAezT,KAAKo1B,IACxCC,GAAIliB,oBAAoB/lB,EAC1B,IAMF4c,GAAmBqrB,IAcnB,MAEMxiB,GAAY,YACZ8jB,GAAkB,YAAY9jB,KAC9B+jB,GAAiB,WAAW/jB,KAC5BgkB,GAAgB,UAAUhkB,KAC1BikB,GAAiB,WAAWjkB,KAC5BkkB,GAAa,OAAOlkB,KACpBmkB,GAAe,SAASnkB,KACxBokB,GAAa,OAAOpkB,KACpBqkB,GAAc,QAAQrkB,KAEtBskB,GAAkB,OAClBC,GAAkB,OAClBC,GAAqB,UACrB7lB,GAAc,CAClBmc,UAAW,UACX2J,SAAU,UACVxJ,MAAO,UAEHvc,GAAU,CACdoc,WAAW,EACX2J,UAAU,EACVxJ,MAAO,KAOT,MAAMyJ,WAAchlB,GAClB,WAAAP,CAAY5kB,EAASukB,GACnBa,MAAMplB,EAASukB,GACf9D,KAAKsgB,SAAW,KAChBtgB,KAAK2pB,sBAAuB,EAC5B3pB,KAAK4pB,yBAA0B,EAC/B5pB,KAAK4gB,eACP,CAGA,kBAAWld,GACT,OAAOA,EACT,CACA,sBAAWC,GACT,OAAOA,EACT,CACA,eAAWpH,GACT,MA/CS,OAgDX,CAGA,IAAAmT,GACoBnP,GAAaqB,QAAQ5B,KAAK4E,SAAUwkB,IACxCpnB,mBAGdhC,KAAK6pB,gBACD7pB,KAAK6E,QAAQib,WACf9f,KAAK4E,SAASvJ,UAAU5E,IA/CN,QAsDpBuJ,KAAK4E,SAASvJ,UAAU1B,OAAO2vB,IAC/BztB,GAAOmE,KAAK4E,UACZ5E,KAAK4E,SAASvJ,UAAU5E,IAAI8yB,GAAiBC,IAC7CxpB,KAAKmF,gBARY,KACfnF,KAAK4E,SAASvJ,UAAU1B,OAAO6vB,IAC/BjpB,GAAaqB,QAAQ5B,KAAK4E,SAAUykB,IACpCrpB,KAAK8pB,oBAAoB,GAKG9pB,KAAK4E,SAAU5E,KAAK6E,QAAQib,WAC5D,CACA,IAAArQ,GACOzP,KAAK+pB,YAGQxpB,GAAaqB,QAAQ5B,KAAK4E,SAAUskB,IACxClnB,mBAQdhC,KAAK4E,SAASvJ,UAAU5E,IAAI+yB,IAC5BxpB,KAAKmF,gBANY,KACfnF,KAAK4E,SAASvJ,UAAU5E,IAAI6yB,IAC5BtpB,KAAK4E,SAASvJ,UAAU1B,OAAO6vB,GAAoBD,IACnDhpB,GAAaqB,QAAQ5B,KAAK4E,SAAUukB,GAAa,GAGrBnpB,KAAK4E,SAAU5E,KAAK6E,QAAQib,YAC5D,CACA,OAAA/a,GACE/E,KAAK6pB,gBACD7pB,KAAK+pB,WACP/pB,KAAK4E,SAASvJ,UAAU1B,OAAO4vB,IAEjC5kB,MAAMI,SACR,CACA,OAAAglB,GACE,OAAO/pB,KAAK4E,SAASvJ,UAAU7W,SAAS+kC,GAC1C,CAIA,kBAAAO,GACO9pB,KAAK6E,QAAQ4kB,WAGdzpB,KAAK2pB,sBAAwB3pB,KAAK4pB,0BAGtC5pB,KAAKsgB,SAAWziB,YAAW,KACzBmC,KAAKyP,MAAM,GACVzP,KAAK6E,QAAQob,QAClB,CACA,cAAA+J,CAAe5qB,EAAO6qB,GACpB,OAAQ7qB,EAAMqB,MACZ,IAAK,YACL,IAAK,WAEDT,KAAK2pB,qBAAuBM,EAC5B,MAEJ,IAAK,UACL,IAAK,WAEDjqB,KAAK4pB,wBAA0BK,EAIrC,GAAIA,EAEF,YADAjqB,KAAK6pB,gBAGP,MAAMvc,EAAclO,EAAMU,cACtBE,KAAK4E,WAAa0I,GAAetN,KAAK4E,SAASpgB,SAAS8oB,IAG5DtN,KAAK8pB,oBACP,CACA,aAAAlJ,GACErgB,GAAac,GAAGrB,KAAK4E,SAAUkkB,IAAiB1pB,GAASY,KAAKgqB,eAAe5qB,GAAO,KACpFmB,GAAac,GAAGrB,KAAK4E,SAAUmkB,IAAgB3pB,GAASY,KAAKgqB,eAAe5qB,GAAO,KACnFmB,GAAac,GAAGrB,KAAK4E,SAAUokB,IAAe5pB,GAASY,KAAKgqB,eAAe5qB,GAAO,KAClFmB,GAAac,GAAGrB,KAAK4E,SAAUqkB,IAAgB7pB,GAASY,KAAKgqB,eAAe5qB,GAAO,IACrF,CACA,aAAAyqB,GACE9c,aAAa/M,KAAKsgB,UAClBtgB,KAAKsgB,SAAW,IAClB,CAGA,sBAAO7jB,CAAgBqH,GACrB,OAAO9D,KAAKuH,MAAK,WACf,MAAMld,EAAOq/B,GAAMpkB,oBAAoBtF,KAAM8D,GAC7C,GAAsB,iBAAXA,EAAqB,CAC9B,QAA4B,IAAjBzZ,EAAKyZ,GACd,MAAM,IAAIU,UAAU,oBAAoBV,MAE1CzZ,EAAKyZ,GAAQ9D,KACf,CACF,GACF,ECr0IK,SAASkqB,GAAc7tB,GACD,WAAvBhX,SAASuX,WAAyBP,IACjChX,SAASyF,iBAAiB,mBAAoBuR,EACrD,CDy0IAuK,GAAqB8iB,IAMrBvtB,GAAmButB,IEpyInBQ,IAzCA,WAC2B,GAAG93B,MAAM5U,KAChC6H,SAAS+a,iBAAiB,+BAETtd,KAAI,SAAUqnC,GAC/B,OAAO,IAAI,GAAkBA,EAAkB,CAC7ClK,MAAO,CAAEvQ,KAAM,IAAKD,KAAM,MAE9B,GACF,IAiCAya,IA5BA,WACY7kC,SAAS68B,eAAe,mBAC9Bp3B,iBAAiB,SAAS,WAC5BzF,SAAS6G,KAAKT,UAAY,EAC1BpG,SAASC,gBAAgBmG,UAAY,CACvC,GACF,IAuBAy+B,IArBA,WACE,IAAIE,EAAM/kC,SAAS68B,eAAe,mBAC9BmI,EAAShlC,SACVilC,uBAAuB,aAAa,GACpChnC,wBACH1D,OAAOkL,iBAAiB,UAAU,WAC5BkV,KAAKuqB,UAAYvqB,KAAKwqB,SAAWxqB,KAAKwqB,QAAUH,EAAOzsC,OACzDwsC,EAAIrpC,MAAM6wB,QAAU,QAEpBwY,EAAIrpC,MAAM6wB,QAAU,OAEtB5R,KAAKuqB,UAAYvqB,KAAKwqB,OACxB,GACF,IAUA5qC,OAAO6qC,UAAY","sources":["webpack://pydata_sphinx_theme/webpack/bootstrap","webpack://pydata_sphinx_theme/webpack/runtime/define property getters","webpack://pydata_sphinx_theme/webpack/runtime/hasOwnProperty shorthand","webpack://pydata_sphinx_theme/webpack/runtime/make namespace object","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/enums.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/instanceOf.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/applyStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getBasePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/math.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/userAgent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isLayoutViewport.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getBoundingClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getLayoutRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/contains.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getComputedStyle.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isTableElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentElement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getParentNode.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getOffsetParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getMainAxisFromPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/within.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergePaddingObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getFreshSideObject.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/expandToHashMap.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/arrow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getVariation.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/computeStyles.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/eventListeners.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositePlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getOppositeVariationPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getWindowScrollBarX.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/isScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getScrollParent.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/listScrollParents.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/rectToClientRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getClippingRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getViewportRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getDocumentRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/detectOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/flip.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/computeAutoPlacement.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/hide.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/offset.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/popperOffsets.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/modifiers/preventOverflow.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/getAltAxis.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getCompositeRect.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getNodeScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/dom-utils/getHTMLElementScroll.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/orderModifiers.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/createPopper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/debounce.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/utils/mergeByName.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper.js","webpack://pydata_sphinx_theme/./node_modules/@popperjs/core/lib/popper-lite.js","webpack://pydata_sphinx_theme/./node_modules/bootstrap/dist/js/bootstrap.esm.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/mixin.js","webpack://pydata_sphinx_theme/./src/pydata_sphinx_theme/assets/scripts/bootstrap.js"],"sourcesContent":["// The require scope\nvar __webpack_require__ = {};\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","export var top = 'top';\nexport var bottom = 'bottom';\nexport var right = 'right';\nexport var left = 'left';\nexport var auto = 'auto';\nexport var basePlacements = [top, bottom, right, left];\nexport var start = 'start';\nexport var end = 'end';\nexport var clippingParents = 'clippingParents';\nexport var viewport = 'viewport';\nexport var popper = 'popper';\nexport var reference = 'reference';\nexport var variationPlacements = /*#__PURE__*/basePlacements.reduce(function (acc, placement) {\n return acc.concat([placement + \"-\" + start, placement + \"-\" + end]);\n}, []);\nexport var placements = /*#__PURE__*/[].concat(basePlacements, [auto]).reduce(function (acc, placement) {\n return acc.concat([placement, placement + \"-\" + start, placement + \"-\" + end]);\n}, []); // modifiers that need to read the DOM\n\nexport var beforeRead = 'beforeRead';\nexport var read = 'read';\nexport var afterRead = 'afterRead'; // pure-logic modifiers\n\nexport var beforeMain = 'beforeMain';\nexport var main = 'main';\nexport var afterMain = 'afterMain'; // modifier with the purpose to write to the DOM (or write into a framework state)\n\nexport var beforeWrite = 'beforeWrite';\nexport var write = 'write';\nexport var afterWrite = 'afterWrite';\nexport var modifierPhases = [beforeRead, read, afterRead, beforeMain, main, afterMain, beforeWrite, write, afterWrite];","export default function getNodeName(element) {\n return element ? (element.nodeName || '').toLowerCase() : null;\n}","export default function getWindow(node) {\n if (node == null) {\n return window;\n }\n\n if (node.toString() !== '[object Window]') {\n var ownerDocument = node.ownerDocument;\n return ownerDocument ? ownerDocument.defaultView || window : window;\n }\n\n return node;\n}","import getWindow from \"./getWindow.js\";\n\nfunction isElement(node) {\n var OwnElement = getWindow(node).Element;\n return node instanceof OwnElement || node instanceof Element;\n}\n\nfunction isHTMLElement(node) {\n var OwnElement = getWindow(node).HTMLElement;\n return node instanceof OwnElement || node instanceof HTMLElement;\n}\n\nfunction isShadowRoot(node) {\n // IE 11 has no ShadowRoot\n if (typeof ShadowRoot === 'undefined') {\n return false;\n }\n\n var OwnElement = getWindow(node).ShadowRoot;\n return node instanceof OwnElement || node instanceof ShadowRoot;\n}\n\nexport { isElement, isHTMLElement, isShadowRoot };","import getNodeName from \"../dom-utils/getNodeName.js\";\nimport { isHTMLElement } from \"../dom-utils/instanceOf.js\"; // This modifier takes the styles prepared by the `computeStyles` modifier\n// and applies them to the HTMLElements such as popper and arrow\n\nfunction applyStyles(_ref) {\n var state = _ref.state;\n Object.keys(state.elements).forEach(function (name) {\n var style = state.styles[name] || {};\n var attributes = state.attributes[name] || {};\n var element = state.elements[name]; // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n } // Flow doesn't support to extend this property, but it's the most\n // effective way to apply styles to an HTMLElement\n // $FlowFixMe[cannot-write]\n\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (name) {\n var value = attributes[name];\n\n if (value === false) {\n element.removeAttribute(name);\n } else {\n element.setAttribute(name, value === true ? '' : value);\n }\n });\n });\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state;\n var initialStyles = {\n popper: {\n position: state.options.strategy,\n left: '0',\n top: '0',\n margin: '0'\n },\n arrow: {\n position: 'absolute'\n },\n reference: {}\n };\n Object.assign(state.elements.popper.style, initialStyles.popper);\n state.styles = initialStyles;\n\n if (state.elements.arrow) {\n Object.assign(state.elements.arrow.style, initialStyles.arrow);\n }\n\n return function () {\n Object.keys(state.elements).forEach(function (name) {\n var element = state.elements[name];\n var attributes = state.attributes[name] || {};\n var styleProperties = Object.keys(state.styles.hasOwnProperty(name) ? state.styles[name] : initialStyles[name]); // Set all values to an empty string to unset them\n\n var style = styleProperties.reduce(function (style, property) {\n style[property] = '';\n return style;\n }, {}); // arrow is optional + virtual elements\n\n if (!isHTMLElement(element) || !getNodeName(element)) {\n return;\n }\n\n Object.assign(element.style, style);\n Object.keys(attributes).forEach(function (attribute) {\n element.removeAttribute(attribute);\n });\n });\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'applyStyles',\n enabled: true,\n phase: 'write',\n fn: applyStyles,\n effect: effect,\n requires: ['computeStyles']\n};","import { auto } from \"../enums.js\";\nexport default function getBasePlacement(placement) {\n return placement.split('-')[0];\n}","export var max = Math.max;\nexport var min = Math.min;\nexport var round = Math.round;","export default function getUAString() {\n var uaData = navigator.userAgentData;\n\n if (uaData != null && uaData.brands && Array.isArray(uaData.brands)) {\n return uaData.brands.map(function (item) {\n return item.brand + \"/\" + item.version;\n }).join(' ');\n }\n\n return navigator.userAgent;\n}","import getUAString from \"../utils/userAgent.js\";\nexport default function isLayoutViewport() {\n return !/^((?!chrome|android).)*safari/i.test(getUAString());\n}","import { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport { round } from \"../utils/math.js\";\nimport getWindow from \"./getWindow.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getBoundingClientRect(element, includeScale, isFixedStrategy) {\n if (includeScale === void 0) {\n includeScale = false;\n }\n\n if (isFixedStrategy === void 0) {\n isFixedStrategy = false;\n }\n\n var clientRect = element.getBoundingClientRect();\n var scaleX = 1;\n var scaleY = 1;\n\n if (includeScale && isHTMLElement(element)) {\n scaleX = element.offsetWidth > 0 ? round(clientRect.width) / element.offsetWidth || 1 : 1;\n scaleY = element.offsetHeight > 0 ? round(clientRect.height) / element.offsetHeight || 1 : 1;\n }\n\n var _ref = isElement(element) ? getWindow(element) : window,\n visualViewport = _ref.visualViewport;\n\n var addVisualOffsets = !isLayoutViewport() && isFixedStrategy;\n var x = (clientRect.left + (addVisualOffsets && visualViewport ? visualViewport.offsetLeft : 0)) / scaleX;\n var y = (clientRect.top + (addVisualOffsets && visualViewport ? visualViewport.offsetTop : 0)) / scaleY;\n var width = clientRect.width / scaleX;\n var height = clientRect.height / scaleY;\n return {\n width: width,\n height: height,\n top: y,\n right: x + width,\n bottom: y + height,\n left: x,\n x: x,\n y: y\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\"; // Returns the layout rect of an element relative to its offsetParent. Layout\n// means it doesn't take into account transforms.\n\nexport default function getLayoutRect(element) {\n var clientRect = getBoundingClientRect(element); // Use the clientRect sizes if it's not been transformed.\n // Fixes https://github.com/popperjs/popper-core/issues/1223\n\n var width = element.offsetWidth;\n var height = element.offsetHeight;\n\n if (Math.abs(clientRect.width - width) <= 1) {\n width = clientRect.width;\n }\n\n if (Math.abs(clientRect.height - height) <= 1) {\n height = clientRect.height;\n }\n\n return {\n x: element.offsetLeft,\n y: element.offsetTop,\n width: width,\n height: height\n };\n}","import { isShadowRoot } from \"./instanceOf.js\";\nexport default function contains(parent, child) {\n var rootNode = child.getRootNode && child.getRootNode(); // First, attempt with faster native method\n\n if (parent.contains(child)) {\n return true;\n } // then fallback to custom implementation with Shadow DOM support\n else if (rootNode && isShadowRoot(rootNode)) {\n var next = child;\n\n do {\n if (next && parent.isSameNode(next)) {\n return true;\n } // $FlowFixMe[prop-missing]: need a better way to handle this...\n\n\n next = next.parentNode || next.host;\n } while (next);\n } // Give up, the result is false\n\n\n return false;\n}","import getWindow from \"./getWindow.js\";\nexport default function getComputedStyle(element) {\n return getWindow(element).getComputedStyle(element);\n}","import getNodeName from \"./getNodeName.js\";\nexport default function isTableElement(element) {\n return ['table', 'td', 'th'].indexOf(getNodeName(element)) >= 0;\n}","import { isElement } from \"./instanceOf.js\";\nexport default function getDocumentElement(element) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return ((isElement(element) ? element.ownerDocument : // $FlowFixMe[prop-missing]\n element.document) || window.document).documentElement;\n}","import getNodeName from \"./getNodeName.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport { isShadowRoot } from \"./instanceOf.js\";\nexport default function getParentNode(element) {\n if (getNodeName(element) === 'html') {\n return element;\n }\n\n return (// this is a quicker (but less type safe) way to save quite some bytes from the bundle\n // $FlowFixMe[incompatible-return]\n // $FlowFixMe[prop-missing]\n element.assignedSlot || // step into the shadow DOM of the parent of a slotted node\n element.parentNode || ( // DOM Element detected\n isShadowRoot(element) ? element.host : null) || // ShadowRoot detected\n // $FlowFixMe[incompatible-call]: HTMLElement is a Node\n getDocumentElement(element) // fallback\n\n );\n}","import getWindow from \"./getWindow.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isHTMLElement, isShadowRoot } from \"./instanceOf.js\";\nimport isTableElement from \"./isTableElement.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getUAString from \"../utils/userAgent.js\";\n\nfunction getTrueOffsetParent(element) {\n if (!isHTMLElement(element) || // https://github.com/popperjs/popper-core/issues/837\n getComputedStyle(element).position === 'fixed') {\n return null;\n }\n\n return element.offsetParent;\n} // `.offsetParent` reports `null` for fixed elements, while absolute elements\n// return the containing block\n\n\nfunction getContainingBlock(element) {\n var isFirefox = /firefox/i.test(getUAString());\n var isIE = /Trident/i.test(getUAString());\n\n if (isIE && isHTMLElement(element)) {\n // In IE 9, 10 and 11 fixed elements containing block is always established by the viewport\n var elementCss = getComputedStyle(element);\n\n if (elementCss.position === 'fixed') {\n return null;\n }\n }\n\n var currentNode = getParentNode(element);\n\n if (isShadowRoot(currentNode)) {\n currentNode = currentNode.host;\n }\n\n while (isHTMLElement(currentNode) && ['html', 'body'].indexOf(getNodeName(currentNode)) < 0) {\n var css = getComputedStyle(currentNode); // This is non-exhaustive but covers the most common CSS properties that\n // create a containing block.\n // https://developer.mozilla.org/en-US/docs/Web/CSS/Containing_block#identifying_the_containing_block\n\n if (css.transform !== 'none' || css.perspective !== 'none' || css.contain === 'paint' || ['transform', 'perspective'].indexOf(css.willChange) !== -1 || isFirefox && css.willChange === 'filter' || isFirefox && css.filter && css.filter !== 'none') {\n return currentNode;\n } else {\n currentNode = currentNode.parentNode;\n }\n }\n\n return null;\n} // Gets the closest ancestor positioned element. Handles some edge cases,\n// such as table ancestors and cross browser bugs.\n\n\nexport default function getOffsetParent(element) {\n var window = getWindow(element);\n var offsetParent = getTrueOffsetParent(element);\n\n while (offsetParent && isTableElement(offsetParent) && getComputedStyle(offsetParent).position === 'static') {\n offsetParent = getTrueOffsetParent(offsetParent);\n }\n\n if (offsetParent && (getNodeName(offsetParent) === 'html' || getNodeName(offsetParent) === 'body' && getComputedStyle(offsetParent).position === 'static')) {\n return window;\n }\n\n return offsetParent || getContainingBlock(element) || window;\n}","export default function getMainAxisFromPlacement(placement) {\n return ['top', 'bottom'].indexOf(placement) >= 0 ? 'x' : 'y';\n}","import { max as mathMax, min as mathMin } from \"./math.js\";\nexport function within(min, value, max) {\n return mathMax(min, mathMin(value, max));\n}\nexport function withinMaxClamp(min, value, max) {\n var v = within(min, value, max);\n return v > max ? max : v;\n}","import getFreshSideObject from \"./getFreshSideObject.js\";\nexport default function mergePaddingObject(paddingObject) {\n return Object.assign({}, getFreshSideObject(), paddingObject);\n}","export default function getFreshSideObject() {\n return {\n top: 0,\n right: 0,\n bottom: 0,\n left: 0\n };\n}","export default function expandToHashMap(value, keys) {\n return keys.reduce(function (hashMap, key) {\n hashMap[key] = value;\n return hashMap;\n }, {});\n}","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport contains from \"../dom-utils/contains.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport { within } from \"../utils/within.js\";\nimport mergePaddingObject from \"../utils/mergePaddingObject.js\";\nimport expandToHashMap from \"../utils/expandToHashMap.js\";\nimport { left, right, basePlacements, top, bottom } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar toPaddingObject = function toPaddingObject(padding, state) {\n padding = typeof padding === 'function' ? padding(Object.assign({}, state.rects, {\n placement: state.placement\n })) : padding;\n return mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n};\n\nfunction arrow(_ref) {\n var _state$modifiersData$;\n\n var state = _ref.state,\n name = _ref.name,\n options = _ref.options;\n var arrowElement = state.elements.arrow;\n var popperOffsets = state.modifiersData.popperOffsets;\n var basePlacement = getBasePlacement(state.placement);\n var axis = getMainAxisFromPlacement(basePlacement);\n var isVertical = [left, right].indexOf(basePlacement) >= 0;\n var len = isVertical ? 'height' : 'width';\n\n if (!arrowElement || !popperOffsets) {\n return;\n }\n\n var paddingObject = toPaddingObject(options.padding, state);\n var arrowRect = getLayoutRect(arrowElement);\n var minProp = axis === 'y' ? top : left;\n var maxProp = axis === 'y' ? bottom : right;\n var endDiff = state.rects.reference[len] + state.rects.reference[axis] - popperOffsets[axis] - state.rects.popper[len];\n var startDiff = popperOffsets[axis] - state.rects.reference[axis];\n var arrowOffsetParent = getOffsetParent(arrowElement);\n var clientSize = arrowOffsetParent ? axis === 'y' ? arrowOffsetParent.clientHeight || 0 : arrowOffsetParent.clientWidth || 0 : 0;\n var centerToReference = endDiff / 2 - startDiff / 2; // Make sure the arrow doesn't overflow the popper if the center point is\n // outside of the popper bounds\n\n var min = paddingObject[minProp];\n var max = clientSize - arrowRect[len] - paddingObject[maxProp];\n var center = clientSize / 2 - arrowRect[len] / 2 + centerToReference;\n var offset = within(min, center, max); // Prevents breaking syntax highlighting...\n\n var axisProp = axis;\n state.modifiersData[name] = (_state$modifiersData$ = {}, _state$modifiersData$[axisProp] = offset, _state$modifiersData$.centerOffset = offset - center, _state$modifiersData$);\n}\n\nfunction effect(_ref2) {\n var state = _ref2.state,\n options = _ref2.options;\n var _options$element = options.element,\n arrowElement = _options$element === void 0 ? '[data-popper-arrow]' : _options$element;\n\n if (arrowElement == null) {\n return;\n } // CSS selector\n\n\n if (typeof arrowElement === 'string') {\n arrowElement = state.elements.popper.querySelector(arrowElement);\n\n if (!arrowElement) {\n return;\n }\n }\n\n if (!contains(state.elements.popper, arrowElement)) {\n return;\n }\n\n state.elements.arrow = arrowElement;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'arrow',\n enabled: true,\n phase: 'main',\n fn: arrow,\n effect: effect,\n requires: ['popperOffsets'],\n requiresIfExists: ['preventOverflow']\n};","export default function getVariation(placement) {\n return placement.split('-')[1];\n}","import { top, left, right, bottom, end } from \"../enums.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport getWindow from \"../dom-utils/getWindow.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getComputedStyle from \"../dom-utils/getComputedStyle.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport { round } from \"../utils/math.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar unsetSides = {\n top: 'auto',\n right: 'auto',\n bottom: 'auto',\n left: 'auto'\n}; // Round the offsets to the nearest suitable subpixel based on the DPR.\n// Zooming can change the DPR, but it seems to report a value that will\n// cleanly divide the values into the appropriate subpixels.\n\nfunction roundOffsetsByDPR(_ref, win) {\n var x = _ref.x,\n y = _ref.y;\n var dpr = win.devicePixelRatio || 1;\n return {\n x: round(x * dpr) / dpr || 0,\n y: round(y * dpr) / dpr || 0\n };\n}\n\nexport function mapToStyles(_ref2) {\n var _Object$assign2;\n\n var popper = _ref2.popper,\n popperRect = _ref2.popperRect,\n placement = _ref2.placement,\n variation = _ref2.variation,\n offsets = _ref2.offsets,\n position = _ref2.position,\n gpuAcceleration = _ref2.gpuAcceleration,\n adaptive = _ref2.adaptive,\n roundOffsets = _ref2.roundOffsets,\n isFixed = _ref2.isFixed;\n var _offsets$x = offsets.x,\n x = _offsets$x === void 0 ? 0 : _offsets$x,\n _offsets$y = offsets.y,\n y = _offsets$y === void 0 ? 0 : _offsets$y;\n\n var _ref3 = typeof roundOffsets === 'function' ? roundOffsets({\n x: x,\n y: y\n }) : {\n x: x,\n y: y\n };\n\n x = _ref3.x;\n y = _ref3.y;\n var hasX = offsets.hasOwnProperty('x');\n var hasY = offsets.hasOwnProperty('y');\n var sideX = left;\n var sideY = top;\n var win = window;\n\n if (adaptive) {\n var offsetParent = getOffsetParent(popper);\n var heightProp = 'clientHeight';\n var widthProp = 'clientWidth';\n\n if (offsetParent === getWindow(popper)) {\n offsetParent = getDocumentElement(popper);\n\n if (getComputedStyle(offsetParent).position !== 'static' && position === 'absolute') {\n heightProp = 'scrollHeight';\n widthProp = 'scrollWidth';\n }\n } // $FlowFixMe[incompatible-cast]: force type refinement, we compare offsetParent with window above, but Flow doesn't detect it\n\n\n offsetParent = offsetParent;\n\n if (placement === top || (placement === left || placement === right) && variation === end) {\n sideY = bottom;\n var offsetY = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.height : // $FlowFixMe[prop-missing]\n offsetParent[heightProp];\n y -= offsetY - popperRect.height;\n y *= gpuAcceleration ? 1 : -1;\n }\n\n if (placement === left || (placement === top || placement === bottom) && variation === end) {\n sideX = right;\n var offsetX = isFixed && offsetParent === win && win.visualViewport ? win.visualViewport.width : // $FlowFixMe[prop-missing]\n offsetParent[widthProp];\n x -= offsetX - popperRect.width;\n x *= gpuAcceleration ? 1 : -1;\n }\n }\n\n var commonStyles = Object.assign({\n position: position\n }, adaptive && unsetSides);\n\n var _ref4 = roundOffsets === true ? roundOffsetsByDPR({\n x: x,\n y: y\n }, getWindow(popper)) : {\n x: x,\n y: y\n };\n\n x = _ref4.x;\n y = _ref4.y;\n\n if (gpuAcceleration) {\n var _Object$assign;\n\n return Object.assign({}, commonStyles, (_Object$assign = {}, _Object$assign[sideY] = hasY ? '0' : '', _Object$assign[sideX] = hasX ? '0' : '', _Object$assign.transform = (win.devicePixelRatio || 1) <= 1 ? \"translate(\" + x + \"px, \" + y + \"px)\" : \"translate3d(\" + x + \"px, \" + y + \"px, 0)\", _Object$assign));\n }\n\n return Object.assign({}, commonStyles, (_Object$assign2 = {}, _Object$assign2[sideY] = hasY ? y + \"px\" : '', _Object$assign2[sideX] = hasX ? x + \"px\" : '', _Object$assign2.transform = '', _Object$assign2));\n}\n\nfunction computeStyles(_ref5) {\n var state = _ref5.state,\n options = _ref5.options;\n var _options$gpuAccelerat = options.gpuAcceleration,\n gpuAcceleration = _options$gpuAccelerat === void 0 ? true : _options$gpuAccelerat,\n _options$adaptive = options.adaptive,\n adaptive = _options$adaptive === void 0 ? true : _options$adaptive,\n _options$roundOffsets = options.roundOffsets,\n roundOffsets = _options$roundOffsets === void 0 ? true : _options$roundOffsets;\n var commonStyles = {\n placement: getBasePlacement(state.placement),\n variation: getVariation(state.placement),\n popper: state.elements.popper,\n popperRect: state.rects.popper,\n gpuAcceleration: gpuAcceleration,\n isFixed: state.options.strategy === 'fixed'\n };\n\n if (state.modifiersData.popperOffsets != null) {\n state.styles.popper = Object.assign({}, state.styles.popper, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.popperOffsets,\n position: state.options.strategy,\n adaptive: adaptive,\n roundOffsets: roundOffsets\n })));\n }\n\n if (state.modifiersData.arrow != null) {\n state.styles.arrow = Object.assign({}, state.styles.arrow, mapToStyles(Object.assign({}, commonStyles, {\n offsets: state.modifiersData.arrow,\n position: 'absolute',\n adaptive: false,\n roundOffsets: roundOffsets\n })));\n }\n\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-placement': state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'computeStyles',\n enabled: true,\n phase: 'beforeWrite',\n fn: computeStyles,\n data: {}\n};","import getWindow from \"../dom-utils/getWindow.js\"; // eslint-disable-next-line import/no-unused-modules\n\nvar passive = {\n passive: true\n};\n\nfunction effect(_ref) {\n var state = _ref.state,\n instance = _ref.instance,\n options = _ref.options;\n var _options$scroll = options.scroll,\n scroll = _options$scroll === void 0 ? true : _options$scroll,\n _options$resize = options.resize,\n resize = _options$resize === void 0 ? true : _options$resize;\n var window = getWindow(state.elements.popper);\n var scrollParents = [].concat(state.scrollParents.reference, state.scrollParents.popper);\n\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.addEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.addEventListener('resize', instance.update, passive);\n }\n\n return function () {\n if (scroll) {\n scrollParents.forEach(function (scrollParent) {\n scrollParent.removeEventListener('scroll', instance.update, passive);\n });\n }\n\n if (resize) {\n window.removeEventListener('resize', instance.update, passive);\n }\n };\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'eventListeners',\n enabled: true,\n phase: 'write',\n fn: function fn() {},\n effect: effect,\n data: {}\n};","var hash = {\n left: 'right',\n right: 'left',\n bottom: 'top',\n top: 'bottom'\n};\nexport default function getOppositePlacement(placement) {\n return placement.replace(/left|right|bottom|top/g, function (matched) {\n return hash[matched];\n });\n}","var hash = {\n start: 'end',\n end: 'start'\n};\nexport default function getOppositeVariationPlacement(placement) {\n return placement.replace(/start|end/g, function (matched) {\n return hash[matched];\n });\n}","import getWindow from \"./getWindow.js\";\nexport default function getWindowScroll(node) {\n var win = getWindow(node);\n var scrollLeft = win.pageXOffset;\n var scrollTop = win.pageYOffset;\n return {\n scrollLeft: scrollLeft,\n scrollTop: scrollTop\n };\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nexport default function getWindowScrollBarX(element) {\n // If has a CSS width greater than the viewport, then this will be\n // incorrect for RTL.\n // Popper 1 is broken in this case and never had a bug report so let's assume\n // it's not an issue. I don't think anyone ever specifies width on \n // anyway.\n // Browsers where the left scrollbar doesn't cause an issue report `0` for\n // this (e.g. Edge 2019, IE11, Safari)\n return getBoundingClientRect(getDocumentElement(element)).left + getWindowScroll(element).scrollLeft;\n}","import getComputedStyle from \"./getComputedStyle.js\";\nexport default function isScrollParent(element) {\n // Firefox wants us to check `-x` and `-y` variations as well\n var _getComputedStyle = getComputedStyle(element),\n overflow = _getComputedStyle.overflow,\n overflowX = _getComputedStyle.overflowX,\n overflowY = _getComputedStyle.overflowY;\n\n return /auto|scroll|overlay|hidden/.test(overflow + overflowY + overflowX);\n}","import getParentNode from \"./getParentNode.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nexport default function getScrollParent(node) {\n if (['html', 'body', '#document'].indexOf(getNodeName(node)) >= 0) {\n // $FlowFixMe[incompatible-return]: assume body is always available\n return node.ownerDocument.body;\n }\n\n if (isHTMLElement(node) && isScrollParent(node)) {\n return node;\n }\n\n return getScrollParent(getParentNode(node));\n}","import getScrollParent from \"./getScrollParent.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport getWindow from \"./getWindow.js\";\nimport isScrollParent from \"./isScrollParent.js\";\n/*\ngiven a DOM element, return the list of all scroll parents, up the list of ancesors\nuntil we get to the top window object. This list is what we attach scroll listeners\nto, because if any of these parent elements scroll, we'll need to re-calculate the\nreference element's position.\n*/\n\nexport default function listScrollParents(element, list) {\n var _element$ownerDocumen;\n\n if (list === void 0) {\n list = [];\n }\n\n var scrollParent = getScrollParent(element);\n var isBody = scrollParent === ((_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body);\n var win = getWindow(scrollParent);\n var target = isBody ? [win].concat(win.visualViewport || [], isScrollParent(scrollParent) ? scrollParent : []) : scrollParent;\n var updatedList = list.concat(target);\n return isBody ? updatedList : // $FlowFixMe[incompatible-call]: isBody tells us target will be an HTMLElement here\n updatedList.concat(listScrollParents(getParentNode(target)));\n}","export default function rectToClientRect(rect) {\n return Object.assign({}, rect, {\n left: rect.x,\n top: rect.y,\n right: rect.x + rect.width,\n bottom: rect.y + rect.height\n });\n}","import { viewport } from \"../enums.js\";\nimport getViewportRect from \"./getViewportRect.js\";\nimport getDocumentRect from \"./getDocumentRect.js\";\nimport listScrollParents from \"./listScrollParents.js\";\nimport getOffsetParent from \"./getOffsetParent.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport { isElement, isHTMLElement } from \"./instanceOf.js\";\nimport getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getParentNode from \"./getParentNode.js\";\nimport contains from \"./contains.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport rectToClientRect from \"../utils/rectToClientRect.js\";\nimport { max, min } from \"../utils/math.js\";\n\nfunction getInnerBoundingClientRect(element, strategy) {\n var rect = getBoundingClientRect(element, false, strategy === 'fixed');\n rect.top = rect.top + element.clientTop;\n rect.left = rect.left + element.clientLeft;\n rect.bottom = rect.top + element.clientHeight;\n rect.right = rect.left + element.clientWidth;\n rect.width = element.clientWidth;\n rect.height = element.clientHeight;\n rect.x = rect.left;\n rect.y = rect.top;\n return rect;\n}\n\nfunction getClientRectFromMixedType(element, clippingParent, strategy) {\n return clippingParent === viewport ? rectToClientRect(getViewportRect(element, strategy)) : isElement(clippingParent) ? getInnerBoundingClientRect(clippingParent, strategy) : rectToClientRect(getDocumentRect(getDocumentElement(element)));\n} // A \"clipping parent\" is an overflowable container with the characteristic of\n// clipping (or hiding) overflowing elements with a position different from\n// `initial`\n\n\nfunction getClippingParents(element) {\n var clippingParents = listScrollParents(getParentNode(element));\n var canEscapeClipping = ['absolute', 'fixed'].indexOf(getComputedStyle(element).position) >= 0;\n var clipperElement = canEscapeClipping && isHTMLElement(element) ? getOffsetParent(element) : element;\n\n if (!isElement(clipperElement)) {\n return [];\n } // $FlowFixMe[incompatible-return]: https://github.com/facebook/flow/issues/1414\n\n\n return clippingParents.filter(function (clippingParent) {\n return isElement(clippingParent) && contains(clippingParent, clipperElement) && getNodeName(clippingParent) !== 'body';\n });\n} // Gets the maximum area that the element is visible in due to any number of\n// clipping parents\n\n\nexport default function getClippingRect(element, boundary, rootBoundary, strategy) {\n var mainClippingParents = boundary === 'clippingParents' ? getClippingParents(element) : [].concat(boundary);\n var clippingParents = [].concat(mainClippingParents, [rootBoundary]);\n var firstClippingParent = clippingParents[0];\n var clippingRect = clippingParents.reduce(function (accRect, clippingParent) {\n var rect = getClientRectFromMixedType(element, clippingParent, strategy);\n accRect.top = max(rect.top, accRect.top);\n accRect.right = min(rect.right, accRect.right);\n accRect.bottom = min(rect.bottom, accRect.bottom);\n accRect.left = max(rect.left, accRect.left);\n return accRect;\n }, getClientRectFromMixedType(element, firstClippingParent, strategy));\n clippingRect.width = clippingRect.right - clippingRect.left;\n clippingRect.height = clippingRect.bottom - clippingRect.top;\n clippingRect.x = clippingRect.left;\n clippingRect.y = clippingRect.top;\n return clippingRect;\n}","import getWindow from \"./getWindow.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport isLayoutViewport from \"./isLayoutViewport.js\";\nexport default function getViewportRect(element, strategy) {\n var win = getWindow(element);\n var html = getDocumentElement(element);\n var visualViewport = win.visualViewport;\n var width = html.clientWidth;\n var height = html.clientHeight;\n var x = 0;\n var y = 0;\n\n if (visualViewport) {\n width = visualViewport.width;\n height = visualViewport.height;\n var layoutViewport = isLayoutViewport();\n\n if (layoutViewport || !layoutViewport && strategy === 'fixed') {\n x = visualViewport.offsetLeft;\n y = visualViewport.offsetTop;\n }\n }\n\n return {\n width: width,\n height: height,\n x: x + getWindowScrollBarX(element),\n y: y\n };\n}","import getDocumentElement from \"./getDocumentElement.js\";\nimport getComputedStyle from \"./getComputedStyle.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getWindowScroll from \"./getWindowScroll.js\";\nimport { max } from \"../utils/math.js\"; // Gets the entire size of the scrollable document area, even extending outside\n// of the `` and `` rect bounds if horizontally scrollable\n\nexport default function getDocumentRect(element) {\n var _element$ownerDocumen;\n\n var html = getDocumentElement(element);\n var winScroll = getWindowScroll(element);\n var body = (_element$ownerDocumen = element.ownerDocument) == null ? void 0 : _element$ownerDocumen.body;\n var width = max(html.scrollWidth, html.clientWidth, body ? body.scrollWidth : 0, body ? body.clientWidth : 0);\n var height = max(html.scrollHeight, html.clientHeight, body ? body.scrollHeight : 0, body ? body.clientHeight : 0);\n var x = -winScroll.scrollLeft + getWindowScrollBarX(element);\n var y = -winScroll.scrollTop;\n\n if (getComputedStyle(body || html).direction === 'rtl') {\n x += max(html.clientWidth, body ? body.clientWidth : 0) - width;\n }\n\n return {\n width: width,\n height: height,\n x: x,\n y: y\n };\n}","import getBasePlacement from \"./getBasePlacement.js\";\nimport getVariation from \"./getVariation.js\";\nimport getMainAxisFromPlacement from \"./getMainAxisFromPlacement.js\";\nimport { top, right, bottom, left, start, end } from \"../enums.js\";\nexport default function computeOffsets(_ref) {\n var reference = _ref.reference,\n element = _ref.element,\n placement = _ref.placement;\n var basePlacement = placement ? getBasePlacement(placement) : null;\n var variation = placement ? getVariation(placement) : null;\n var commonX = reference.x + reference.width / 2 - element.width / 2;\n var commonY = reference.y + reference.height / 2 - element.height / 2;\n var offsets;\n\n switch (basePlacement) {\n case top:\n offsets = {\n x: commonX,\n y: reference.y - element.height\n };\n break;\n\n case bottom:\n offsets = {\n x: commonX,\n y: reference.y + reference.height\n };\n break;\n\n case right:\n offsets = {\n x: reference.x + reference.width,\n y: commonY\n };\n break;\n\n case left:\n offsets = {\n x: reference.x - element.width,\n y: commonY\n };\n break;\n\n default:\n offsets = {\n x: reference.x,\n y: reference.y\n };\n }\n\n var mainAxis = basePlacement ? getMainAxisFromPlacement(basePlacement) : null;\n\n if (mainAxis != null) {\n var len = mainAxis === 'y' ? 'height' : 'width';\n\n switch (variation) {\n case start:\n offsets[mainAxis] = offsets[mainAxis] - (reference[len] / 2 - element[len] / 2);\n break;\n\n case end:\n offsets[mainAxis] = offsets[mainAxis] + (reference[len] / 2 - element[len] / 2);\n break;\n\n default:\n }\n }\n\n return offsets;\n}","import getClippingRect from \"../dom-utils/getClippingRect.js\";\nimport getDocumentElement from \"../dom-utils/getDocumentElement.js\";\nimport getBoundingClientRect from \"../dom-utils/getBoundingClientRect.js\";\nimport computeOffsets from \"./computeOffsets.js\";\nimport rectToClientRect from \"./rectToClientRect.js\";\nimport { clippingParents, reference, popper, bottom, top, right, basePlacements, viewport } from \"../enums.js\";\nimport { isElement } from \"../dom-utils/instanceOf.js\";\nimport mergePaddingObject from \"./mergePaddingObject.js\";\nimport expandToHashMap from \"./expandToHashMap.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport default function detectOverflow(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n _options$placement = _options.placement,\n placement = _options$placement === void 0 ? state.placement : _options$placement,\n _options$strategy = _options.strategy,\n strategy = _options$strategy === void 0 ? state.strategy : _options$strategy,\n _options$boundary = _options.boundary,\n boundary = _options$boundary === void 0 ? clippingParents : _options$boundary,\n _options$rootBoundary = _options.rootBoundary,\n rootBoundary = _options$rootBoundary === void 0 ? viewport : _options$rootBoundary,\n _options$elementConte = _options.elementContext,\n elementContext = _options$elementConte === void 0 ? popper : _options$elementConte,\n _options$altBoundary = _options.altBoundary,\n altBoundary = _options$altBoundary === void 0 ? false : _options$altBoundary,\n _options$padding = _options.padding,\n padding = _options$padding === void 0 ? 0 : _options$padding;\n var paddingObject = mergePaddingObject(typeof padding !== 'number' ? padding : expandToHashMap(padding, basePlacements));\n var altContext = elementContext === popper ? reference : popper;\n var popperRect = state.rects.popper;\n var element = state.elements[altBoundary ? altContext : elementContext];\n var clippingClientRect = getClippingRect(isElement(element) ? element : element.contextElement || getDocumentElement(state.elements.popper), boundary, rootBoundary, strategy);\n var referenceClientRect = getBoundingClientRect(state.elements.reference);\n var popperOffsets = computeOffsets({\n reference: referenceClientRect,\n element: popperRect,\n strategy: 'absolute',\n placement: placement\n });\n var popperClientRect = rectToClientRect(Object.assign({}, popperRect, popperOffsets));\n var elementClientRect = elementContext === popper ? popperClientRect : referenceClientRect; // positive = overflowing the clipping rect\n // 0 or negative = within the clipping rect\n\n var overflowOffsets = {\n top: clippingClientRect.top - elementClientRect.top + paddingObject.top,\n bottom: elementClientRect.bottom - clippingClientRect.bottom + paddingObject.bottom,\n left: clippingClientRect.left - elementClientRect.left + paddingObject.left,\n right: elementClientRect.right - clippingClientRect.right + paddingObject.right\n };\n var offsetData = state.modifiersData.offset; // Offsets can be applied only to the popper element\n\n if (elementContext === popper && offsetData) {\n var offset = offsetData[placement];\n Object.keys(overflowOffsets).forEach(function (key) {\n var multiply = [right, bottom].indexOf(key) >= 0 ? 1 : -1;\n var axis = [top, bottom].indexOf(key) >= 0 ? 'y' : 'x';\n overflowOffsets[key] += offset[axis] * multiply;\n });\n }\n\n return overflowOffsets;\n}","import getOppositePlacement from \"../utils/getOppositePlacement.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getOppositeVariationPlacement from \"../utils/getOppositeVariationPlacement.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport computeAutoPlacement from \"../utils/computeAutoPlacement.js\";\nimport { bottom, top, start, right, left, auto } from \"../enums.js\";\nimport getVariation from \"../utils/getVariation.js\"; // eslint-disable-next-line import/no-unused-modules\n\nfunction getExpandedFallbackPlacements(placement) {\n if (getBasePlacement(placement) === auto) {\n return [];\n }\n\n var oppositePlacement = getOppositePlacement(placement);\n return [getOppositeVariationPlacement(placement), oppositePlacement, getOppositeVariationPlacement(oppositePlacement)];\n}\n\nfunction flip(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n\n if (state.modifiersData[name]._skip) {\n return;\n }\n\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? true : _options$altAxis,\n specifiedFallbackPlacements = options.fallbackPlacements,\n padding = options.padding,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n _options$flipVariatio = options.flipVariations,\n flipVariations = _options$flipVariatio === void 0 ? true : _options$flipVariatio,\n allowedAutoPlacements = options.allowedAutoPlacements;\n var preferredPlacement = state.options.placement;\n var basePlacement = getBasePlacement(preferredPlacement);\n var isBasePlacement = basePlacement === preferredPlacement;\n var fallbackPlacements = specifiedFallbackPlacements || (isBasePlacement || !flipVariations ? [getOppositePlacement(preferredPlacement)] : getExpandedFallbackPlacements(preferredPlacement));\n var placements = [preferredPlacement].concat(fallbackPlacements).reduce(function (acc, placement) {\n return acc.concat(getBasePlacement(placement) === auto ? computeAutoPlacement(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n flipVariations: flipVariations,\n allowedAutoPlacements: allowedAutoPlacements\n }) : placement);\n }, []);\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var checksMap = new Map();\n var makeFallbackChecks = true;\n var firstFittingPlacement = placements[0];\n\n for (var i = 0; i < placements.length; i++) {\n var placement = placements[i];\n\n var _basePlacement = getBasePlacement(placement);\n\n var isStartVariation = getVariation(placement) === start;\n var isVertical = [top, bottom].indexOf(_basePlacement) >= 0;\n var len = isVertical ? 'width' : 'height';\n var overflow = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n altBoundary: altBoundary,\n padding: padding\n });\n var mainVariationSide = isVertical ? isStartVariation ? right : left : isStartVariation ? bottom : top;\n\n if (referenceRect[len] > popperRect[len]) {\n mainVariationSide = getOppositePlacement(mainVariationSide);\n }\n\n var altVariationSide = getOppositePlacement(mainVariationSide);\n var checks = [];\n\n if (checkMainAxis) {\n checks.push(overflow[_basePlacement] <= 0);\n }\n\n if (checkAltAxis) {\n checks.push(overflow[mainVariationSide] <= 0, overflow[altVariationSide] <= 0);\n }\n\n if (checks.every(function (check) {\n return check;\n })) {\n firstFittingPlacement = placement;\n makeFallbackChecks = false;\n break;\n }\n\n checksMap.set(placement, checks);\n }\n\n if (makeFallbackChecks) {\n // `2` may be desired in some cases – research later\n var numberOfChecks = flipVariations ? 3 : 1;\n\n var _loop = function _loop(_i) {\n var fittingPlacement = placements.find(function (placement) {\n var checks = checksMap.get(placement);\n\n if (checks) {\n return checks.slice(0, _i).every(function (check) {\n return check;\n });\n }\n });\n\n if (fittingPlacement) {\n firstFittingPlacement = fittingPlacement;\n return \"break\";\n }\n };\n\n for (var _i = numberOfChecks; _i > 0; _i--) {\n var _ret = _loop(_i);\n\n if (_ret === \"break\") break;\n }\n }\n\n if (state.placement !== firstFittingPlacement) {\n state.modifiersData[name]._skip = true;\n state.placement = firstFittingPlacement;\n state.reset = true;\n }\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'flip',\n enabled: true,\n phase: 'main',\n fn: flip,\n requiresIfExists: ['offset'],\n data: {\n _skip: false\n }\n};","import getVariation from \"./getVariation.js\";\nimport { variationPlacements, basePlacements, placements as allPlacements } from \"../enums.js\";\nimport detectOverflow from \"./detectOverflow.js\";\nimport getBasePlacement from \"./getBasePlacement.js\";\nexport default function computeAutoPlacement(state, options) {\n if (options === void 0) {\n options = {};\n }\n\n var _options = options,\n placement = _options.placement,\n boundary = _options.boundary,\n rootBoundary = _options.rootBoundary,\n padding = _options.padding,\n flipVariations = _options.flipVariations,\n _options$allowedAutoP = _options.allowedAutoPlacements,\n allowedAutoPlacements = _options$allowedAutoP === void 0 ? allPlacements : _options$allowedAutoP;\n var variation = getVariation(placement);\n var placements = variation ? flipVariations ? variationPlacements : variationPlacements.filter(function (placement) {\n return getVariation(placement) === variation;\n }) : basePlacements;\n var allowedPlacements = placements.filter(function (placement) {\n return allowedAutoPlacements.indexOf(placement) >= 0;\n });\n\n if (allowedPlacements.length === 0) {\n allowedPlacements = placements;\n } // $FlowFixMe[incompatible-type]: Flow seems to have problems with two array unions...\n\n\n var overflows = allowedPlacements.reduce(function (acc, placement) {\n acc[placement] = detectOverflow(state, {\n placement: placement,\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding\n })[getBasePlacement(placement)];\n return acc;\n }, {});\n return Object.keys(overflows).sort(function (a, b) {\n return overflows[a] - overflows[b];\n });\n}","import { top, bottom, left, right } from \"../enums.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\n\nfunction getSideOffsets(overflow, rect, preventedOffsets) {\n if (preventedOffsets === void 0) {\n preventedOffsets = {\n x: 0,\n y: 0\n };\n }\n\n return {\n top: overflow.top - rect.height - preventedOffsets.y,\n right: overflow.right - rect.width + preventedOffsets.x,\n bottom: overflow.bottom - rect.height + preventedOffsets.y,\n left: overflow.left - rect.width - preventedOffsets.x\n };\n}\n\nfunction isAnySideFullyClipped(overflow) {\n return [top, right, bottom, left].some(function (side) {\n return overflow[side] >= 0;\n });\n}\n\nfunction hide(_ref) {\n var state = _ref.state,\n name = _ref.name;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var preventedOffsets = state.modifiersData.preventOverflow;\n var referenceOverflow = detectOverflow(state, {\n elementContext: 'reference'\n });\n var popperAltOverflow = detectOverflow(state, {\n altBoundary: true\n });\n var referenceClippingOffsets = getSideOffsets(referenceOverflow, referenceRect);\n var popperEscapeOffsets = getSideOffsets(popperAltOverflow, popperRect, preventedOffsets);\n var isReferenceHidden = isAnySideFullyClipped(referenceClippingOffsets);\n var hasPopperEscaped = isAnySideFullyClipped(popperEscapeOffsets);\n state.modifiersData[name] = {\n referenceClippingOffsets: referenceClippingOffsets,\n popperEscapeOffsets: popperEscapeOffsets,\n isReferenceHidden: isReferenceHidden,\n hasPopperEscaped: hasPopperEscaped\n };\n state.attributes.popper = Object.assign({}, state.attributes.popper, {\n 'data-popper-reference-hidden': isReferenceHidden,\n 'data-popper-escaped': hasPopperEscaped\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'hide',\n enabled: true,\n phase: 'main',\n requiresIfExists: ['preventOverflow'],\n fn: hide\n};","import getBasePlacement from \"../utils/getBasePlacement.js\";\nimport { top, left, right, placements } from \"../enums.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport function distanceAndSkiddingToXY(placement, rects, offset) {\n var basePlacement = getBasePlacement(placement);\n var invertDistance = [left, top].indexOf(basePlacement) >= 0 ? -1 : 1;\n\n var _ref = typeof offset === 'function' ? offset(Object.assign({}, rects, {\n placement: placement\n })) : offset,\n skidding = _ref[0],\n distance = _ref[1];\n\n skidding = skidding || 0;\n distance = (distance || 0) * invertDistance;\n return [left, right].indexOf(basePlacement) >= 0 ? {\n x: distance,\n y: skidding\n } : {\n x: skidding,\n y: distance\n };\n}\n\nfunction offset(_ref2) {\n var state = _ref2.state,\n options = _ref2.options,\n name = _ref2.name;\n var _options$offset = options.offset,\n offset = _options$offset === void 0 ? [0, 0] : _options$offset;\n var data = placements.reduce(function (acc, placement) {\n acc[placement] = distanceAndSkiddingToXY(placement, state.rects, offset);\n return acc;\n }, {});\n var _data$state$placement = data[state.placement],\n x = _data$state$placement.x,\n y = _data$state$placement.y;\n\n if (state.modifiersData.popperOffsets != null) {\n state.modifiersData.popperOffsets.x += x;\n state.modifiersData.popperOffsets.y += y;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'offset',\n enabled: true,\n phase: 'main',\n requires: ['popperOffsets'],\n fn: offset\n};","import computeOffsets from \"../utils/computeOffsets.js\";\n\nfunction popperOffsets(_ref) {\n var state = _ref.state,\n name = _ref.name;\n // Offsets are the actual position the popper needs to have to be\n // properly positioned near its reference element\n // This is the most basic placement, and will be adjusted by\n // the modifiers in the next step\n state.modifiersData[name] = computeOffsets({\n reference: state.rects.reference,\n element: state.rects.popper,\n strategy: 'absolute',\n placement: state.placement\n });\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'popperOffsets',\n enabled: true,\n phase: 'read',\n fn: popperOffsets,\n data: {}\n};","import { top, left, right, bottom, start } from \"../enums.js\";\nimport getBasePlacement from \"../utils/getBasePlacement.js\";\nimport getMainAxisFromPlacement from \"../utils/getMainAxisFromPlacement.js\";\nimport getAltAxis from \"../utils/getAltAxis.js\";\nimport { within, withinMaxClamp } from \"../utils/within.js\";\nimport getLayoutRect from \"../dom-utils/getLayoutRect.js\";\nimport getOffsetParent from \"../dom-utils/getOffsetParent.js\";\nimport detectOverflow from \"../utils/detectOverflow.js\";\nimport getVariation from \"../utils/getVariation.js\";\nimport getFreshSideObject from \"../utils/getFreshSideObject.js\";\nimport { min as mathMin, max as mathMax } from \"../utils/math.js\";\n\nfunction preventOverflow(_ref) {\n var state = _ref.state,\n options = _ref.options,\n name = _ref.name;\n var _options$mainAxis = options.mainAxis,\n checkMainAxis = _options$mainAxis === void 0 ? true : _options$mainAxis,\n _options$altAxis = options.altAxis,\n checkAltAxis = _options$altAxis === void 0 ? false : _options$altAxis,\n boundary = options.boundary,\n rootBoundary = options.rootBoundary,\n altBoundary = options.altBoundary,\n padding = options.padding,\n _options$tether = options.tether,\n tether = _options$tether === void 0 ? true : _options$tether,\n _options$tetherOffset = options.tetherOffset,\n tetherOffset = _options$tetherOffset === void 0 ? 0 : _options$tetherOffset;\n var overflow = detectOverflow(state, {\n boundary: boundary,\n rootBoundary: rootBoundary,\n padding: padding,\n altBoundary: altBoundary\n });\n var basePlacement = getBasePlacement(state.placement);\n var variation = getVariation(state.placement);\n var isBasePlacement = !variation;\n var mainAxis = getMainAxisFromPlacement(basePlacement);\n var altAxis = getAltAxis(mainAxis);\n var popperOffsets = state.modifiersData.popperOffsets;\n var referenceRect = state.rects.reference;\n var popperRect = state.rects.popper;\n var tetherOffsetValue = typeof tetherOffset === 'function' ? tetherOffset(Object.assign({}, state.rects, {\n placement: state.placement\n })) : tetherOffset;\n var normalizedTetherOffsetValue = typeof tetherOffsetValue === 'number' ? {\n mainAxis: tetherOffsetValue,\n altAxis: tetherOffsetValue\n } : Object.assign({\n mainAxis: 0,\n altAxis: 0\n }, tetherOffsetValue);\n var offsetModifierState = state.modifiersData.offset ? state.modifiersData.offset[state.placement] : null;\n var data = {\n x: 0,\n y: 0\n };\n\n if (!popperOffsets) {\n return;\n }\n\n if (checkMainAxis) {\n var _offsetModifierState$;\n\n var mainSide = mainAxis === 'y' ? top : left;\n var altSide = mainAxis === 'y' ? bottom : right;\n var len = mainAxis === 'y' ? 'height' : 'width';\n var offset = popperOffsets[mainAxis];\n var min = offset + overflow[mainSide];\n var max = offset - overflow[altSide];\n var additive = tether ? -popperRect[len] / 2 : 0;\n var minLen = variation === start ? referenceRect[len] : popperRect[len];\n var maxLen = variation === start ? -popperRect[len] : -referenceRect[len]; // We need to include the arrow in the calculation so the arrow doesn't go\n // outside the reference bounds\n\n var arrowElement = state.elements.arrow;\n var arrowRect = tether && arrowElement ? getLayoutRect(arrowElement) : {\n width: 0,\n height: 0\n };\n var arrowPaddingObject = state.modifiersData['arrow#persistent'] ? state.modifiersData['arrow#persistent'].padding : getFreshSideObject();\n var arrowPaddingMin = arrowPaddingObject[mainSide];\n var arrowPaddingMax = arrowPaddingObject[altSide]; // If the reference length is smaller than the arrow length, we don't want\n // to include its full size in the calculation. If the reference is small\n // and near the edge of a boundary, the popper can overflow even if the\n // reference is not overflowing as well (e.g. virtual elements with no\n // width or height)\n\n var arrowLen = within(0, referenceRect[len], arrowRect[len]);\n var minOffset = isBasePlacement ? referenceRect[len] / 2 - additive - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis : minLen - arrowLen - arrowPaddingMin - normalizedTetherOffsetValue.mainAxis;\n var maxOffset = isBasePlacement ? -referenceRect[len] / 2 + additive + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis : maxLen + arrowLen + arrowPaddingMax + normalizedTetherOffsetValue.mainAxis;\n var arrowOffsetParent = state.elements.arrow && getOffsetParent(state.elements.arrow);\n var clientOffset = arrowOffsetParent ? mainAxis === 'y' ? arrowOffsetParent.clientTop || 0 : arrowOffsetParent.clientLeft || 0 : 0;\n var offsetModifierValue = (_offsetModifierState$ = offsetModifierState == null ? void 0 : offsetModifierState[mainAxis]) != null ? _offsetModifierState$ : 0;\n var tetherMin = offset + minOffset - offsetModifierValue - clientOffset;\n var tetherMax = offset + maxOffset - offsetModifierValue;\n var preventedOffset = within(tether ? mathMin(min, tetherMin) : min, offset, tether ? mathMax(max, tetherMax) : max);\n popperOffsets[mainAxis] = preventedOffset;\n data[mainAxis] = preventedOffset - offset;\n }\n\n if (checkAltAxis) {\n var _offsetModifierState$2;\n\n var _mainSide = mainAxis === 'x' ? top : left;\n\n var _altSide = mainAxis === 'x' ? bottom : right;\n\n var _offset = popperOffsets[altAxis];\n\n var _len = altAxis === 'y' ? 'height' : 'width';\n\n var _min = _offset + overflow[_mainSide];\n\n var _max = _offset - overflow[_altSide];\n\n var isOriginSide = [top, left].indexOf(basePlacement) !== -1;\n\n var _offsetModifierValue = (_offsetModifierState$2 = offsetModifierState == null ? void 0 : offsetModifierState[altAxis]) != null ? _offsetModifierState$2 : 0;\n\n var _tetherMin = isOriginSide ? _min : _offset - referenceRect[_len] - popperRect[_len] - _offsetModifierValue + normalizedTetherOffsetValue.altAxis;\n\n var _tetherMax = isOriginSide ? _offset + referenceRect[_len] + popperRect[_len] - _offsetModifierValue - normalizedTetherOffsetValue.altAxis : _max;\n\n var _preventedOffset = tether && isOriginSide ? withinMaxClamp(_tetherMin, _offset, _tetherMax) : within(tether ? _tetherMin : _min, _offset, tether ? _tetherMax : _max);\n\n popperOffsets[altAxis] = _preventedOffset;\n data[altAxis] = _preventedOffset - _offset;\n }\n\n state.modifiersData[name] = data;\n} // eslint-disable-next-line import/no-unused-modules\n\n\nexport default {\n name: 'preventOverflow',\n enabled: true,\n phase: 'main',\n fn: preventOverflow,\n requiresIfExists: ['offset']\n};","export default function getAltAxis(axis) {\n return axis === 'x' ? 'y' : 'x';\n}","import getBoundingClientRect from \"./getBoundingClientRect.js\";\nimport getNodeScroll from \"./getNodeScroll.js\";\nimport getNodeName from \"./getNodeName.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getWindowScrollBarX from \"./getWindowScrollBarX.js\";\nimport getDocumentElement from \"./getDocumentElement.js\";\nimport isScrollParent from \"./isScrollParent.js\";\nimport { round } from \"../utils/math.js\";\n\nfunction isElementScaled(element) {\n var rect = element.getBoundingClientRect();\n var scaleX = round(rect.width) / element.offsetWidth || 1;\n var scaleY = round(rect.height) / element.offsetHeight || 1;\n return scaleX !== 1 || scaleY !== 1;\n} // Returns the composite rect of an element relative to its offsetParent.\n// Composite means it takes into account transforms as well as layout.\n\n\nexport default function getCompositeRect(elementOrVirtualElement, offsetParent, isFixed) {\n if (isFixed === void 0) {\n isFixed = false;\n }\n\n var isOffsetParentAnElement = isHTMLElement(offsetParent);\n var offsetParentIsScaled = isHTMLElement(offsetParent) && isElementScaled(offsetParent);\n var documentElement = getDocumentElement(offsetParent);\n var rect = getBoundingClientRect(elementOrVirtualElement, offsetParentIsScaled, isFixed);\n var scroll = {\n scrollLeft: 0,\n scrollTop: 0\n };\n var offsets = {\n x: 0,\n y: 0\n };\n\n if (isOffsetParentAnElement || !isOffsetParentAnElement && !isFixed) {\n if (getNodeName(offsetParent) !== 'body' || // https://github.com/popperjs/popper-core/issues/1078\n isScrollParent(documentElement)) {\n scroll = getNodeScroll(offsetParent);\n }\n\n if (isHTMLElement(offsetParent)) {\n offsets = getBoundingClientRect(offsetParent, true);\n offsets.x += offsetParent.clientLeft;\n offsets.y += offsetParent.clientTop;\n } else if (documentElement) {\n offsets.x = getWindowScrollBarX(documentElement);\n }\n }\n\n return {\n x: rect.left + scroll.scrollLeft - offsets.x,\n y: rect.top + scroll.scrollTop - offsets.y,\n width: rect.width,\n height: rect.height\n };\n}","import getWindowScroll from \"./getWindowScroll.js\";\nimport getWindow from \"./getWindow.js\";\nimport { isHTMLElement } from \"./instanceOf.js\";\nimport getHTMLElementScroll from \"./getHTMLElementScroll.js\";\nexport default function getNodeScroll(node) {\n if (node === getWindow(node) || !isHTMLElement(node)) {\n return getWindowScroll(node);\n } else {\n return getHTMLElementScroll(node);\n }\n}","export default function getHTMLElementScroll(element) {\n return {\n scrollLeft: element.scrollLeft,\n scrollTop: element.scrollTop\n };\n}","import { modifierPhases } from \"../enums.js\"; // source: https://stackoverflow.com/questions/49875255\n\nfunction order(modifiers) {\n var map = new Map();\n var visited = new Set();\n var result = [];\n modifiers.forEach(function (modifier) {\n map.set(modifier.name, modifier);\n }); // On visiting object, check for its dependencies and visit them recursively\n\n function sort(modifier) {\n visited.add(modifier.name);\n var requires = [].concat(modifier.requires || [], modifier.requiresIfExists || []);\n requires.forEach(function (dep) {\n if (!visited.has(dep)) {\n var depModifier = map.get(dep);\n\n if (depModifier) {\n sort(depModifier);\n }\n }\n });\n result.push(modifier);\n }\n\n modifiers.forEach(function (modifier) {\n if (!visited.has(modifier.name)) {\n // check for visited object\n sort(modifier);\n }\n });\n return result;\n}\n\nexport default function orderModifiers(modifiers) {\n // order based on dependencies\n var orderedModifiers = order(modifiers); // order based on phase\n\n return modifierPhases.reduce(function (acc, phase) {\n return acc.concat(orderedModifiers.filter(function (modifier) {\n return modifier.phase === phase;\n }));\n }, []);\n}","import getCompositeRect from \"./dom-utils/getCompositeRect.js\";\nimport getLayoutRect from \"./dom-utils/getLayoutRect.js\";\nimport listScrollParents from \"./dom-utils/listScrollParents.js\";\nimport getOffsetParent from \"./dom-utils/getOffsetParent.js\";\nimport orderModifiers from \"./utils/orderModifiers.js\";\nimport debounce from \"./utils/debounce.js\";\nimport mergeByName from \"./utils/mergeByName.js\";\nimport detectOverflow from \"./utils/detectOverflow.js\";\nimport { isElement } from \"./dom-utils/instanceOf.js\";\nvar DEFAULT_OPTIONS = {\n placement: 'bottom',\n modifiers: [],\n strategy: 'absolute'\n};\n\nfunction areValidElements() {\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n\n return !args.some(function (element) {\n return !(element && typeof element.getBoundingClientRect === 'function');\n });\n}\n\nexport function popperGenerator(generatorOptions) {\n if (generatorOptions === void 0) {\n generatorOptions = {};\n }\n\n var _generatorOptions = generatorOptions,\n _generatorOptions$def = _generatorOptions.defaultModifiers,\n defaultModifiers = _generatorOptions$def === void 0 ? [] : _generatorOptions$def,\n _generatorOptions$def2 = _generatorOptions.defaultOptions,\n defaultOptions = _generatorOptions$def2 === void 0 ? DEFAULT_OPTIONS : _generatorOptions$def2;\n return function createPopper(reference, popper, options) {\n if (options === void 0) {\n options = defaultOptions;\n }\n\n var state = {\n placement: 'bottom',\n orderedModifiers: [],\n options: Object.assign({}, DEFAULT_OPTIONS, defaultOptions),\n modifiersData: {},\n elements: {\n reference: reference,\n popper: popper\n },\n attributes: {},\n styles: {}\n };\n var effectCleanupFns = [];\n var isDestroyed = false;\n var instance = {\n state: state,\n setOptions: function setOptions(setOptionsAction) {\n var options = typeof setOptionsAction === 'function' ? setOptionsAction(state.options) : setOptionsAction;\n cleanupModifierEffects();\n state.options = Object.assign({}, defaultOptions, state.options, options);\n state.scrollParents = {\n reference: isElement(reference) ? listScrollParents(reference) : reference.contextElement ? listScrollParents(reference.contextElement) : [],\n popper: listScrollParents(popper)\n }; // Orders the modifiers based on their dependencies and `phase`\n // properties\n\n var orderedModifiers = orderModifiers(mergeByName([].concat(defaultModifiers, state.options.modifiers))); // Strip out disabled modifiers\n\n state.orderedModifiers = orderedModifiers.filter(function (m) {\n return m.enabled;\n });\n runModifierEffects();\n return instance.update();\n },\n // Sync update – it will always be executed, even if not necessary. This\n // is useful for low frequency updates where sync behavior simplifies the\n // logic.\n // For high frequency updates (e.g. `resize` and `scroll` events), always\n // prefer the async Popper#update method\n forceUpdate: function forceUpdate() {\n if (isDestroyed) {\n return;\n }\n\n var _state$elements = state.elements,\n reference = _state$elements.reference,\n popper = _state$elements.popper; // Don't proceed if `reference` or `popper` are not valid elements\n // anymore\n\n if (!areValidElements(reference, popper)) {\n return;\n } // Store the reference and popper rects to be read by modifiers\n\n\n state.rects = {\n reference: getCompositeRect(reference, getOffsetParent(popper), state.options.strategy === 'fixed'),\n popper: getLayoutRect(popper)\n }; // Modifiers have the ability to reset the current update cycle. The\n // most common use case for this is the `flip` modifier changing the\n // placement, which then needs to re-run all the modifiers, because the\n // logic was previously ran for the previous placement and is therefore\n // stale/incorrect\n\n state.reset = false;\n state.placement = state.options.placement; // On each update cycle, the `modifiersData` property for each modifier\n // is filled with the initial data specified by the modifier. This means\n // it doesn't persist and is fresh on each update.\n // To ensure persistent data, use `${name}#persistent`\n\n state.orderedModifiers.forEach(function (modifier) {\n return state.modifiersData[modifier.name] = Object.assign({}, modifier.data);\n });\n\n for (var index = 0; index < state.orderedModifiers.length; index++) {\n if (state.reset === true) {\n state.reset = false;\n index = -1;\n continue;\n }\n\n var _state$orderedModifie = state.orderedModifiers[index],\n fn = _state$orderedModifie.fn,\n _state$orderedModifie2 = _state$orderedModifie.options,\n _options = _state$orderedModifie2 === void 0 ? {} : _state$orderedModifie2,\n name = _state$orderedModifie.name;\n\n if (typeof fn === 'function') {\n state = fn({\n state: state,\n options: _options,\n name: name,\n instance: instance\n }) || state;\n }\n }\n },\n // Async and optimistically optimized update – it will not be executed if\n // not necessary (debounced to run at most once-per-tick)\n update: debounce(function () {\n return new Promise(function (resolve) {\n instance.forceUpdate();\n resolve(state);\n });\n }),\n destroy: function destroy() {\n cleanupModifierEffects();\n isDestroyed = true;\n }\n };\n\n if (!areValidElements(reference, popper)) {\n return instance;\n }\n\n instance.setOptions(options).then(function (state) {\n if (!isDestroyed && options.onFirstUpdate) {\n options.onFirstUpdate(state);\n }\n }); // Modifiers have the ability to execute arbitrary code before the first\n // update cycle runs. They will be executed in the same order as the update\n // cycle. This is useful when a modifier adds some persistent data that\n // other modifiers need to use, but the modifier is run after the dependent\n // one.\n\n function runModifierEffects() {\n state.orderedModifiers.forEach(function (_ref) {\n var name = _ref.name,\n _ref$options = _ref.options,\n options = _ref$options === void 0 ? {} : _ref$options,\n effect = _ref.effect;\n\n if (typeof effect === 'function') {\n var cleanupFn = effect({\n state: state,\n name: name,\n instance: instance,\n options: options\n });\n\n var noopFn = function noopFn() {};\n\n effectCleanupFns.push(cleanupFn || noopFn);\n }\n });\n }\n\n function cleanupModifierEffects() {\n effectCleanupFns.forEach(function (fn) {\n return fn();\n });\n effectCleanupFns = [];\n }\n\n return instance;\n };\n}\nexport var createPopper = /*#__PURE__*/popperGenerator(); // eslint-disable-next-line import/no-unused-modules\n\nexport { detectOverflow };","export default function debounce(fn) {\n var pending;\n return function () {\n if (!pending) {\n pending = new Promise(function (resolve) {\n Promise.resolve().then(function () {\n pending = undefined;\n resolve(fn());\n });\n });\n }\n\n return pending;\n };\n}","export default function mergeByName(modifiers) {\n var merged = modifiers.reduce(function (merged, current) {\n var existing = merged[current.name];\n merged[current.name] = existing ? Object.assign({}, existing, current, {\n options: Object.assign({}, existing.options, current.options),\n data: Object.assign({}, existing.data, current.data)\n }) : current;\n return merged;\n }, {}); // IE11 does not support Object.values\n\n return Object.keys(merged).map(function (key) {\n return merged[key];\n });\n}","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nimport offset from \"./modifiers/offset.js\";\nimport flip from \"./modifiers/flip.js\";\nimport preventOverflow from \"./modifiers/preventOverflow.js\";\nimport arrow from \"./modifiers/arrow.js\";\nimport hide from \"./modifiers/hide.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles, offset, flip, preventOverflow, arrow, hide];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow }; // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper as createPopperLite } from \"./popper-lite.js\"; // eslint-disable-next-line import/no-unused-modules\n\nexport * from \"./modifiers/index.js\";","import { popperGenerator, detectOverflow } from \"./createPopper.js\";\nimport eventListeners from \"./modifiers/eventListeners.js\";\nimport popperOffsets from \"./modifiers/popperOffsets.js\";\nimport computeStyles from \"./modifiers/computeStyles.js\";\nimport applyStyles from \"./modifiers/applyStyles.js\";\nvar defaultModifiers = [eventListeners, popperOffsets, computeStyles, applyStyles];\nvar createPopper = /*#__PURE__*/popperGenerator({\n defaultModifiers: defaultModifiers\n}); // eslint-disable-next-line import/no-unused-modules\n\nexport { createPopper, popperGenerator, defaultModifiers, detectOverflow };","/*!\n * Bootstrap v5.3.2 (https://getbootstrap.com/)\n * Copyright 2011-2023 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n */\nimport * as Popper from '@popperjs/core';\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/data.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n/**\n * Constants\n */\n\nconst elementMap = new Map();\nconst Data = {\n set(element, key, instance) {\n if (!elementMap.has(element)) {\n elementMap.set(element, new Map());\n }\n const instanceMap = elementMap.get(element);\n\n // make it clear we only want one instance per element\n // can be removed later when multiple key/instances are fine to be used\n if (!instanceMap.has(key) && instanceMap.size !== 0) {\n // eslint-disable-next-line no-console\n console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(instanceMap.keys())[0]}.`);\n return;\n }\n instanceMap.set(key, instance);\n },\n get(element, key) {\n if (elementMap.has(element)) {\n return elementMap.get(element).get(key) || null;\n }\n return null;\n },\n remove(element, key) {\n if (!elementMap.has(element)) {\n return;\n }\n const instanceMap = elementMap.get(element);\n instanceMap.delete(key);\n\n // free up element references if there are no instances left for an element\n if (instanceMap.size === 0) {\n elementMap.delete(element);\n }\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/index.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst MAX_UID = 1000000;\nconst MILLISECONDS_MULTIPLIER = 1000;\nconst TRANSITION_END = 'transitionend';\n\n/**\n * Properly escape IDs selectors to handle weird IDs\n * @param {string} selector\n * @returns {string}\n */\nconst parseSelector = selector => {\n if (selector && window.CSS && window.CSS.escape) {\n // document.querySelector needs escaping to handle IDs (html5+) containing for instance /\n selector = selector.replace(/#([^\\s\"#']+)/g, (match, id) => `#${CSS.escape(id)}`);\n }\n return selector;\n};\n\n// Shout-out Angus Croll (https://goo.gl/pxwQGp)\nconst toType = object => {\n if (object === null || object === undefined) {\n return `${object}`;\n }\n return Object.prototype.toString.call(object).match(/\\s([a-z]+)/i)[1].toLowerCase();\n};\n\n/**\n * Public Util API\n */\n\nconst getUID = prefix => {\n do {\n prefix += Math.floor(Math.random() * MAX_UID);\n } while (document.getElementById(prefix));\n return prefix;\n};\nconst getTransitionDurationFromElement = element => {\n if (!element) {\n return 0;\n }\n\n // Get transition-duration of the element\n let {\n transitionDuration,\n transitionDelay\n } = window.getComputedStyle(element);\n const floatTransitionDuration = Number.parseFloat(transitionDuration);\n const floatTransitionDelay = Number.parseFloat(transitionDelay);\n\n // Return 0 if element or transition duration is not found\n if (!floatTransitionDuration && !floatTransitionDelay) {\n return 0;\n }\n\n // If multiple durations are defined, take the first\n transitionDuration = transitionDuration.split(',')[0];\n transitionDelay = transitionDelay.split(',')[0];\n return (Number.parseFloat(transitionDuration) + Number.parseFloat(transitionDelay)) * MILLISECONDS_MULTIPLIER;\n};\nconst triggerTransitionEnd = element => {\n element.dispatchEvent(new Event(TRANSITION_END));\n};\nconst isElement = object => {\n if (!object || typeof object !== 'object') {\n return false;\n }\n if (typeof object.jquery !== 'undefined') {\n object = object[0];\n }\n return typeof object.nodeType !== 'undefined';\n};\nconst getElement = object => {\n // it's a jQuery object or a node element\n if (isElement(object)) {\n return object.jquery ? object[0] : object;\n }\n if (typeof object === 'string' && object.length > 0) {\n return document.querySelector(parseSelector(object));\n }\n return null;\n};\nconst isVisible = element => {\n if (!isElement(element) || element.getClientRects().length === 0) {\n return false;\n }\n const elementIsVisible = getComputedStyle(element).getPropertyValue('visibility') === 'visible';\n // Handle `details` element as its content may falsie appear visible when it is closed\n const closedDetails = element.closest('details:not([open])');\n if (!closedDetails) {\n return elementIsVisible;\n }\n if (closedDetails !== element) {\n const summary = element.closest('summary');\n if (summary && summary.parentNode !== closedDetails) {\n return false;\n }\n if (summary === null) {\n return false;\n }\n }\n return elementIsVisible;\n};\nconst isDisabled = element => {\n if (!element || element.nodeType !== Node.ELEMENT_NODE) {\n return true;\n }\n if (element.classList.contains('disabled')) {\n return true;\n }\n if (typeof element.disabled !== 'undefined') {\n return element.disabled;\n }\n return element.hasAttribute('disabled') && element.getAttribute('disabled') !== 'false';\n};\nconst findShadowRoot = element => {\n if (!document.documentElement.attachShadow) {\n return null;\n }\n\n // Can find the shadow root otherwise it'll return the document\n if (typeof element.getRootNode === 'function') {\n const root = element.getRootNode();\n return root instanceof ShadowRoot ? root : null;\n }\n if (element instanceof ShadowRoot) {\n return element;\n }\n\n // when we don't find a shadow root\n if (!element.parentNode) {\n return null;\n }\n return findShadowRoot(element.parentNode);\n};\nconst noop = () => {};\n\n/**\n * Trick to restart an element's animation\n *\n * @param {HTMLElement} element\n * @return void\n *\n * @see https://www.charistheo.io/blog/2021/02/restart-a-css-animation-with-javascript/#restarting-a-css-animation\n */\nconst reflow = element => {\n element.offsetHeight; // eslint-disable-line no-unused-expressions\n};\n\nconst getjQuery = () => {\n if (window.jQuery && !document.body.hasAttribute('data-bs-no-jquery')) {\n return window.jQuery;\n }\n return null;\n};\nconst DOMContentLoadedCallbacks = [];\nconst onDOMContentLoaded = callback => {\n if (document.readyState === 'loading') {\n // add listener on the first call when the document is in loading state\n if (!DOMContentLoadedCallbacks.length) {\n document.addEventListener('DOMContentLoaded', () => {\n for (const callback of DOMContentLoadedCallbacks) {\n callback();\n }\n });\n }\n DOMContentLoadedCallbacks.push(callback);\n } else {\n callback();\n }\n};\nconst isRTL = () => document.documentElement.dir === 'rtl';\nconst defineJQueryPlugin = plugin => {\n onDOMContentLoaded(() => {\n const $ = getjQuery();\n /* istanbul ignore if */\n if ($) {\n const name = plugin.NAME;\n const JQUERY_NO_CONFLICT = $.fn[name];\n $.fn[name] = plugin.jQueryInterface;\n $.fn[name].Constructor = plugin;\n $.fn[name].noConflict = () => {\n $.fn[name] = JQUERY_NO_CONFLICT;\n return plugin.jQueryInterface;\n };\n }\n });\n};\nconst execute = (possibleCallback, args = [], defaultValue = possibleCallback) => {\n return typeof possibleCallback === 'function' ? possibleCallback(...args) : defaultValue;\n};\nconst executeAfterTransition = (callback, transitionElement, waitForTransition = true) => {\n if (!waitForTransition) {\n execute(callback);\n return;\n }\n const durationPadding = 5;\n const emulatedDuration = getTransitionDurationFromElement(transitionElement) + durationPadding;\n let called = false;\n const handler = ({\n target\n }) => {\n if (target !== transitionElement) {\n return;\n }\n called = true;\n transitionElement.removeEventListener(TRANSITION_END, handler);\n execute(callback);\n };\n transitionElement.addEventListener(TRANSITION_END, handler);\n setTimeout(() => {\n if (!called) {\n triggerTransitionEnd(transitionElement);\n }\n }, emulatedDuration);\n};\n\n/**\n * Return the previous/next element of a list.\n *\n * @param {array} list The list of elements\n * @param activeElement The active element\n * @param shouldGetNext Choose to get next or previous element\n * @param isCycleAllowed\n * @return {Element|elem} The proper element\n */\nconst getNextActiveElement = (list, activeElement, shouldGetNext, isCycleAllowed) => {\n const listLength = list.length;\n let index = list.indexOf(activeElement);\n\n // if the element does not exist in the list return an element\n // depending on the direction and if cycle is allowed\n if (index === -1) {\n return !shouldGetNext && isCycleAllowed ? list[listLength - 1] : list[0];\n }\n index += shouldGetNext ? 1 : -1;\n if (isCycleAllowed) {\n index = (index + listLength) % listLength;\n }\n return list[Math.max(0, Math.min(index, listLength - 1))];\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/event-handler.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst namespaceRegex = /[^.]*(?=\\..*)\\.|.*/;\nconst stripNameRegex = /\\..*/;\nconst stripUidRegex = /::\\d+$/;\nconst eventRegistry = {}; // Events storage\nlet uidEvent = 1;\nconst customEvents = {\n mouseenter: 'mouseover',\n mouseleave: 'mouseout'\n};\nconst nativeEvents = new Set(['click', 'dblclick', 'mouseup', 'mousedown', 'contextmenu', 'mousewheel', 'DOMMouseScroll', 'mouseover', 'mouseout', 'mousemove', 'selectstart', 'selectend', 'keydown', 'keypress', 'keyup', 'orientationchange', 'touchstart', 'touchmove', 'touchend', 'touchcancel', 'pointerdown', 'pointermove', 'pointerup', 'pointerleave', 'pointercancel', 'gesturestart', 'gesturechange', 'gestureend', 'focus', 'blur', 'change', 'reset', 'select', 'submit', 'focusin', 'focusout', 'load', 'unload', 'beforeunload', 'resize', 'move', 'DOMContentLoaded', 'readystatechange', 'error', 'abort', 'scroll']);\n\n/**\n * Private methods\n */\n\nfunction makeEventUid(element, uid) {\n return uid && `${uid}::${uidEvent++}` || element.uidEvent || uidEvent++;\n}\nfunction getElementEvents(element) {\n const uid = makeEventUid(element);\n element.uidEvent = uid;\n eventRegistry[uid] = eventRegistry[uid] || {};\n return eventRegistry[uid];\n}\nfunction bootstrapHandler(element, fn) {\n return function handler(event) {\n hydrateObj(event, {\n delegateTarget: element\n });\n if (handler.oneOff) {\n EventHandler.off(element, event.type, fn);\n }\n return fn.apply(element, [event]);\n };\n}\nfunction bootstrapDelegationHandler(element, selector, fn) {\n return function handler(event) {\n const domElements = element.querySelectorAll(selector);\n for (let {\n target\n } = event; target && target !== this; target = target.parentNode) {\n for (const domElement of domElements) {\n if (domElement !== target) {\n continue;\n }\n hydrateObj(event, {\n delegateTarget: target\n });\n if (handler.oneOff) {\n EventHandler.off(element, event.type, selector, fn);\n }\n return fn.apply(target, [event]);\n }\n }\n };\n}\nfunction findHandler(events, callable, delegationSelector = null) {\n return Object.values(events).find(event => event.callable === callable && event.delegationSelector === delegationSelector);\n}\nfunction normalizeParameters(originalTypeEvent, handler, delegationFunction) {\n const isDelegated = typeof handler === 'string';\n // TODO: tooltip passes `false` instead of selector, so we need to check\n const callable = isDelegated ? delegationFunction : handler || delegationFunction;\n let typeEvent = getTypeEvent(originalTypeEvent);\n if (!nativeEvents.has(typeEvent)) {\n typeEvent = originalTypeEvent;\n }\n return [isDelegated, callable, typeEvent];\n}\nfunction addHandler(element, originalTypeEvent, handler, delegationFunction, oneOff) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n let [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n\n // in case of mouseenter or mouseleave wrap the handler within a function that checks for its DOM position\n // this prevents the handler from being dispatched the same way as mouseover or mouseout does\n if (originalTypeEvent in customEvents) {\n const wrapFunction = fn => {\n return function (event) {\n if (!event.relatedTarget || event.relatedTarget !== event.delegateTarget && !event.delegateTarget.contains(event.relatedTarget)) {\n return fn.call(this, event);\n }\n };\n };\n callable = wrapFunction(callable);\n }\n const events = getElementEvents(element);\n const handlers = events[typeEvent] || (events[typeEvent] = {});\n const previousFunction = findHandler(handlers, callable, isDelegated ? handler : null);\n if (previousFunction) {\n previousFunction.oneOff = previousFunction.oneOff && oneOff;\n return;\n }\n const uid = makeEventUid(callable, originalTypeEvent.replace(namespaceRegex, ''));\n const fn = isDelegated ? bootstrapDelegationHandler(element, handler, callable) : bootstrapHandler(element, callable);\n fn.delegationSelector = isDelegated ? handler : null;\n fn.callable = callable;\n fn.oneOff = oneOff;\n fn.uidEvent = uid;\n handlers[uid] = fn;\n element.addEventListener(typeEvent, fn, isDelegated);\n}\nfunction removeHandler(element, events, typeEvent, handler, delegationSelector) {\n const fn = findHandler(events[typeEvent], handler, delegationSelector);\n if (!fn) {\n return;\n }\n element.removeEventListener(typeEvent, fn, Boolean(delegationSelector));\n delete events[typeEvent][fn.uidEvent];\n}\nfunction removeNamespacedHandlers(element, events, typeEvent, namespace) {\n const storeElementEvent = events[typeEvent] || {};\n for (const [handlerKey, event] of Object.entries(storeElementEvent)) {\n if (handlerKey.includes(namespace)) {\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n}\nfunction getTypeEvent(event) {\n // allow to get the native events from namespaced events ('click.bs.button' --> 'click')\n event = event.replace(stripNameRegex, '');\n return customEvents[event] || event;\n}\nconst EventHandler = {\n on(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, false);\n },\n one(element, event, handler, delegationFunction) {\n addHandler(element, event, handler, delegationFunction, true);\n },\n off(element, originalTypeEvent, handler, delegationFunction) {\n if (typeof originalTypeEvent !== 'string' || !element) {\n return;\n }\n const [isDelegated, callable, typeEvent] = normalizeParameters(originalTypeEvent, handler, delegationFunction);\n const inNamespace = typeEvent !== originalTypeEvent;\n const events = getElementEvents(element);\n const storeElementEvent = events[typeEvent] || {};\n const isNamespace = originalTypeEvent.startsWith('.');\n if (typeof callable !== 'undefined') {\n // Simplest case: handler is passed, remove that listener ONLY.\n if (!Object.keys(storeElementEvent).length) {\n return;\n }\n removeHandler(element, events, typeEvent, callable, isDelegated ? handler : null);\n return;\n }\n if (isNamespace) {\n for (const elementEvent of Object.keys(events)) {\n removeNamespacedHandlers(element, events, elementEvent, originalTypeEvent.slice(1));\n }\n }\n for (const [keyHandlers, event] of Object.entries(storeElementEvent)) {\n const handlerKey = keyHandlers.replace(stripUidRegex, '');\n if (!inNamespace || originalTypeEvent.includes(handlerKey)) {\n removeHandler(element, events, typeEvent, event.callable, event.delegationSelector);\n }\n }\n },\n trigger(element, event, args) {\n if (typeof event !== 'string' || !element) {\n return null;\n }\n const $ = getjQuery();\n const typeEvent = getTypeEvent(event);\n const inNamespace = event !== typeEvent;\n let jQueryEvent = null;\n let bubbles = true;\n let nativeDispatch = true;\n let defaultPrevented = false;\n if (inNamespace && $) {\n jQueryEvent = $.Event(event, args);\n $(element).trigger(jQueryEvent);\n bubbles = !jQueryEvent.isPropagationStopped();\n nativeDispatch = !jQueryEvent.isImmediatePropagationStopped();\n defaultPrevented = jQueryEvent.isDefaultPrevented();\n }\n const evt = hydrateObj(new Event(event, {\n bubbles,\n cancelable: true\n }), args);\n if (defaultPrevented) {\n evt.preventDefault();\n }\n if (nativeDispatch) {\n element.dispatchEvent(evt);\n }\n if (evt.defaultPrevented && jQueryEvent) {\n jQueryEvent.preventDefault();\n }\n return evt;\n }\n};\nfunction hydrateObj(obj, meta = {}) {\n for (const [key, value] of Object.entries(meta)) {\n try {\n obj[key] = value;\n } catch (_unused) {\n Object.defineProperty(obj, key, {\n configurable: true,\n get() {\n return value;\n }\n });\n }\n }\n return obj;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/manipulator.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nfunction normalizeData(value) {\n if (value === 'true') {\n return true;\n }\n if (value === 'false') {\n return false;\n }\n if (value === Number(value).toString()) {\n return Number(value);\n }\n if (value === '' || value === 'null') {\n return null;\n }\n if (typeof value !== 'string') {\n return value;\n }\n try {\n return JSON.parse(decodeURIComponent(value));\n } catch (_unused) {\n return value;\n }\n}\nfunction normalizeDataKey(key) {\n return key.replace(/[A-Z]/g, chr => `-${chr.toLowerCase()}`);\n}\nconst Manipulator = {\n setDataAttribute(element, key, value) {\n element.setAttribute(`data-bs-${normalizeDataKey(key)}`, value);\n },\n removeDataAttribute(element, key) {\n element.removeAttribute(`data-bs-${normalizeDataKey(key)}`);\n },\n getDataAttributes(element) {\n if (!element) {\n return {};\n }\n const attributes = {};\n const bsKeys = Object.keys(element.dataset).filter(key => key.startsWith('bs') && !key.startsWith('bsConfig'));\n for (const key of bsKeys) {\n let pureKey = key.replace(/^bs/, '');\n pureKey = pureKey.charAt(0).toLowerCase() + pureKey.slice(1, pureKey.length);\n attributes[pureKey] = normalizeData(element.dataset[key]);\n }\n return attributes;\n },\n getDataAttribute(element, key) {\n return normalizeData(element.getAttribute(`data-bs-${normalizeDataKey(key)}`));\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/config.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Class definition\n */\n\nclass Config {\n // Getters\n static get Default() {\n return {};\n }\n static get DefaultType() {\n return {};\n }\n static get NAME() {\n throw new Error('You have to implement the static method \"NAME\", for each component!');\n }\n _getConfig(config) {\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n _configAfterMerge(config) {\n return config;\n }\n _mergeConfigObj(config, element) {\n const jsonConfig = isElement(element) ? Manipulator.getDataAttribute(element, 'config') : {}; // try to parse\n\n return {\n ...this.constructor.Default,\n ...(typeof jsonConfig === 'object' ? jsonConfig : {}),\n ...(isElement(element) ? Manipulator.getDataAttributes(element) : {}),\n ...(typeof config === 'object' ? config : {})\n };\n }\n _typeCheckConfig(config, configTypes = this.constructor.DefaultType) {\n for (const [property, expectedTypes] of Object.entries(configTypes)) {\n const value = config[property];\n const valueType = isElement(value) ? 'element' : toType(value);\n if (!new RegExp(expectedTypes).test(valueType)) {\n throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option \"${property}\" provided type \"${valueType}\" but expected type \"${expectedTypes}\".`);\n }\n }\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap base-component.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst VERSION = '5.3.2';\n\n/**\n * Class definition\n */\n\nclass BaseComponent extends Config {\n constructor(element, config) {\n super();\n element = getElement(element);\n if (!element) {\n return;\n }\n this._element = element;\n this._config = this._getConfig(config);\n Data.set(this._element, this.constructor.DATA_KEY, this);\n }\n\n // Public\n dispose() {\n Data.remove(this._element, this.constructor.DATA_KEY);\n EventHandler.off(this._element, this.constructor.EVENT_KEY);\n for (const propertyName of Object.getOwnPropertyNames(this)) {\n this[propertyName] = null;\n }\n }\n _queueCallback(callback, element, isAnimated = true) {\n executeAfterTransition(callback, element, isAnimated);\n }\n _getConfig(config) {\n config = this._mergeConfigObj(config, this._element);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n\n // Static\n static getInstance(element) {\n return Data.get(getElement(element), this.DATA_KEY);\n }\n static getOrCreateInstance(element, config = {}) {\n return this.getInstance(element) || new this(element, typeof config === 'object' ? config : null);\n }\n static get VERSION() {\n return VERSION;\n }\n static get DATA_KEY() {\n return `bs.${this.NAME}`;\n }\n static get EVENT_KEY() {\n return `.${this.DATA_KEY}`;\n }\n static eventName(name) {\n return `${name}${this.EVENT_KEY}`;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dom/selector-engine.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst getSelector = element => {\n let selector = element.getAttribute('data-bs-target');\n if (!selector || selector === '#') {\n let hrefAttribute = element.getAttribute('href');\n\n // The only valid content that could double as a selector are IDs or classes,\n // so everything starting with `#` or `.`. If a \"real\" URL is used as the selector,\n // `document.querySelector` will rightfully complain it is invalid.\n // See https://github.com/twbs/bootstrap/issues/32273\n if (!hrefAttribute || !hrefAttribute.includes('#') && !hrefAttribute.startsWith('.')) {\n return null;\n }\n\n // Just in case some CMS puts out a full URL with the anchor appended\n if (hrefAttribute.includes('#') && !hrefAttribute.startsWith('#')) {\n hrefAttribute = `#${hrefAttribute.split('#')[1]}`;\n }\n selector = hrefAttribute && hrefAttribute !== '#' ? parseSelector(hrefAttribute.trim()) : null;\n }\n return selector;\n};\nconst SelectorEngine = {\n find(selector, element = document.documentElement) {\n return [].concat(...Element.prototype.querySelectorAll.call(element, selector));\n },\n findOne(selector, element = document.documentElement) {\n return Element.prototype.querySelector.call(element, selector);\n },\n children(element, selector) {\n return [].concat(...element.children).filter(child => child.matches(selector));\n },\n parents(element, selector) {\n const parents = [];\n let ancestor = element.parentNode.closest(selector);\n while (ancestor) {\n parents.push(ancestor);\n ancestor = ancestor.parentNode.closest(selector);\n }\n return parents;\n },\n prev(element, selector) {\n let previous = element.previousElementSibling;\n while (previous) {\n if (previous.matches(selector)) {\n return [previous];\n }\n previous = previous.previousElementSibling;\n }\n return [];\n },\n // TODO: this is now unused; remove later along with prev()\n next(element, selector) {\n let next = element.nextElementSibling;\n while (next) {\n if (next.matches(selector)) {\n return [next];\n }\n next = next.nextElementSibling;\n }\n return [];\n },\n focusableChildren(element) {\n const focusables = ['a', 'button', 'input', 'textarea', 'select', 'details', '[tabindex]', '[contenteditable=\"true\"]'].map(selector => `${selector}:not([tabindex^=\"-\"])`).join(',');\n return this.find(focusables, element).filter(el => !isDisabled(el) && isVisible(el));\n },\n getSelectorFromElement(element) {\n const selector = getSelector(element);\n if (selector) {\n return SelectorEngine.findOne(selector) ? selector : null;\n }\n return null;\n },\n getElementFromSelector(element) {\n const selector = getSelector(element);\n return selector ? SelectorEngine.findOne(selector) : null;\n },\n getMultipleElementsFromSelector(element) {\n const selector = getSelector(element);\n return selector ? SelectorEngine.find(selector) : [];\n }\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/component-functions.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\nconst enableDismissTrigger = (component, method = 'hide') => {\n const clickEvent = `click.dismiss${component.EVENT_KEY}`;\n const name = component.NAME;\n EventHandler.on(document, clickEvent, `[data-bs-dismiss=\"${name}\"]`, function (event) {\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n if (isDisabled(this)) {\n return;\n }\n const target = SelectorEngine.getElementFromSelector(this) || this.closest(`.${name}`);\n const instance = component.getOrCreateInstance(target);\n\n // Method argument is left, for Alert and only, as it doesn't implement the 'hide' method\n instance[method]();\n });\n};\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap alert.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$f = 'alert';\nconst DATA_KEY$a = 'bs.alert';\nconst EVENT_KEY$b = `.${DATA_KEY$a}`;\nconst EVENT_CLOSE = `close${EVENT_KEY$b}`;\nconst EVENT_CLOSED = `closed${EVENT_KEY$b}`;\nconst CLASS_NAME_FADE$5 = 'fade';\nconst CLASS_NAME_SHOW$8 = 'show';\n\n/**\n * Class definition\n */\n\nclass Alert extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$f;\n }\n\n // Public\n close() {\n const closeEvent = EventHandler.trigger(this._element, EVENT_CLOSE);\n if (closeEvent.defaultPrevented) {\n return;\n }\n this._element.classList.remove(CLASS_NAME_SHOW$8);\n const isAnimated = this._element.classList.contains(CLASS_NAME_FADE$5);\n this._queueCallback(() => this._destroyElement(), this._element, isAnimated);\n }\n\n // Private\n _destroyElement() {\n this._element.remove();\n EventHandler.trigger(this._element, EVENT_CLOSED);\n this.dispose();\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Alert.getOrCreateInstance(this);\n if (typeof config !== 'string') {\n return;\n }\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](this);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nenableDismissTrigger(Alert, 'close');\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Alert);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap button.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$e = 'button';\nconst DATA_KEY$9 = 'bs.button';\nconst EVENT_KEY$a = `.${DATA_KEY$9}`;\nconst DATA_API_KEY$6 = '.data-api';\nconst CLASS_NAME_ACTIVE$3 = 'active';\nconst SELECTOR_DATA_TOGGLE$5 = '[data-bs-toggle=\"button\"]';\nconst EVENT_CLICK_DATA_API$6 = `click${EVENT_KEY$a}${DATA_API_KEY$6}`;\n\n/**\n * Class definition\n */\n\nclass Button extends BaseComponent {\n // Getters\n static get NAME() {\n return NAME$e;\n }\n\n // Public\n toggle() {\n // Toggle class and sync the `aria-pressed` attribute with the return value of the `.toggle()` method\n this._element.setAttribute('aria-pressed', this._element.classList.toggle(CLASS_NAME_ACTIVE$3));\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Button.getOrCreateInstance(this);\n if (config === 'toggle') {\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$6, SELECTOR_DATA_TOGGLE$5, event => {\n event.preventDefault();\n const button = event.target.closest(SELECTOR_DATA_TOGGLE$5);\n const data = Button.getOrCreateInstance(button);\n data.toggle();\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Button);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/swipe.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$d = 'swipe';\nconst EVENT_KEY$9 = '.bs.swipe';\nconst EVENT_TOUCHSTART = `touchstart${EVENT_KEY$9}`;\nconst EVENT_TOUCHMOVE = `touchmove${EVENT_KEY$9}`;\nconst EVENT_TOUCHEND = `touchend${EVENT_KEY$9}`;\nconst EVENT_POINTERDOWN = `pointerdown${EVENT_KEY$9}`;\nconst EVENT_POINTERUP = `pointerup${EVENT_KEY$9}`;\nconst POINTER_TYPE_TOUCH = 'touch';\nconst POINTER_TYPE_PEN = 'pen';\nconst CLASS_NAME_POINTER_EVENT = 'pointer-event';\nconst SWIPE_THRESHOLD = 40;\nconst Default$c = {\n endCallback: null,\n leftCallback: null,\n rightCallback: null\n};\nconst DefaultType$c = {\n endCallback: '(function|null)',\n leftCallback: '(function|null)',\n rightCallback: '(function|null)'\n};\n\n/**\n * Class definition\n */\n\nclass Swipe extends Config {\n constructor(element, config) {\n super();\n this._element = element;\n if (!element || !Swipe.isSupported()) {\n return;\n }\n this._config = this._getConfig(config);\n this._deltaX = 0;\n this._supportPointerEvents = Boolean(window.PointerEvent);\n this._initEvents();\n }\n\n // Getters\n static get Default() {\n return Default$c;\n }\n static get DefaultType() {\n return DefaultType$c;\n }\n static get NAME() {\n return NAME$d;\n }\n\n // Public\n dispose() {\n EventHandler.off(this._element, EVENT_KEY$9);\n }\n\n // Private\n _start(event) {\n if (!this._supportPointerEvents) {\n this._deltaX = event.touches[0].clientX;\n return;\n }\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX;\n }\n }\n _end(event) {\n if (this._eventIsPointerPenTouch(event)) {\n this._deltaX = event.clientX - this._deltaX;\n }\n this._handleSwipe();\n execute(this._config.endCallback);\n }\n _move(event) {\n this._deltaX = event.touches && event.touches.length > 1 ? 0 : event.touches[0].clientX - this._deltaX;\n }\n _handleSwipe() {\n const absDeltaX = Math.abs(this._deltaX);\n if (absDeltaX <= SWIPE_THRESHOLD) {\n return;\n }\n const direction = absDeltaX / this._deltaX;\n this._deltaX = 0;\n if (!direction) {\n return;\n }\n execute(direction > 0 ? this._config.rightCallback : this._config.leftCallback);\n }\n _initEvents() {\n if (this._supportPointerEvents) {\n EventHandler.on(this._element, EVENT_POINTERDOWN, event => this._start(event));\n EventHandler.on(this._element, EVENT_POINTERUP, event => this._end(event));\n this._element.classList.add(CLASS_NAME_POINTER_EVENT);\n } else {\n EventHandler.on(this._element, EVENT_TOUCHSTART, event => this._start(event));\n EventHandler.on(this._element, EVENT_TOUCHMOVE, event => this._move(event));\n EventHandler.on(this._element, EVENT_TOUCHEND, event => this._end(event));\n }\n }\n _eventIsPointerPenTouch(event) {\n return this._supportPointerEvents && (event.pointerType === POINTER_TYPE_PEN || event.pointerType === POINTER_TYPE_TOUCH);\n }\n\n // Static\n static isSupported() {\n return 'ontouchstart' in document.documentElement || navigator.maxTouchPoints > 0;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap carousel.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$c = 'carousel';\nconst DATA_KEY$8 = 'bs.carousel';\nconst EVENT_KEY$8 = `.${DATA_KEY$8}`;\nconst DATA_API_KEY$5 = '.data-api';\nconst ARROW_LEFT_KEY$1 = 'ArrowLeft';\nconst ARROW_RIGHT_KEY$1 = 'ArrowRight';\nconst TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch\n\nconst ORDER_NEXT = 'next';\nconst ORDER_PREV = 'prev';\nconst DIRECTION_LEFT = 'left';\nconst DIRECTION_RIGHT = 'right';\nconst EVENT_SLIDE = `slide${EVENT_KEY$8}`;\nconst EVENT_SLID = `slid${EVENT_KEY$8}`;\nconst EVENT_KEYDOWN$1 = `keydown${EVENT_KEY$8}`;\nconst EVENT_MOUSEENTER$1 = `mouseenter${EVENT_KEY$8}`;\nconst EVENT_MOUSELEAVE$1 = `mouseleave${EVENT_KEY$8}`;\nconst EVENT_DRAG_START = `dragstart${EVENT_KEY$8}`;\nconst EVENT_LOAD_DATA_API$3 = `load${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst EVENT_CLICK_DATA_API$5 = `click${EVENT_KEY$8}${DATA_API_KEY$5}`;\nconst CLASS_NAME_CAROUSEL = 'carousel';\nconst CLASS_NAME_ACTIVE$2 = 'active';\nconst CLASS_NAME_SLIDE = 'slide';\nconst CLASS_NAME_END = 'carousel-item-end';\nconst CLASS_NAME_START = 'carousel-item-start';\nconst CLASS_NAME_NEXT = 'carousel-item-next';\nconst CLASS_NAME_PREV = 'carousel-item-prev';\nconst SELECTOR_ACTIVE = '.active';\nconst SELECTOR_ITEM = '.carousel-item';\nconst SELECTOR_ACTIVE_ITEM = SELECTOR_ACTIVE + SELECTOR_ITEM;\nconst SELECTOR_ITEM_IMG = '.carousel-item img';\nconst SELECTOR_INDICATORS = '.carousel-indicators';\nconst SELECTOR_DATA_SLIDE = '[data-bs-slide], [data-bs-slide-to]';\nconst SELECTOR_DATA_RIDE = '[data-bs-ride=\"carousel\"]';\nconst KEY_TO_DIRECTION = {\n [ARROW_LEFT_KEY$1]: DIRECTION_RIGHT,\n [ARROW_RIGHT_KEY$1]: DIRECTION_LEFT\n};\nconst Default$b = {\n interval: 5000,\n keyboard: true,\n pause: 'hover',\n ride: false,\n touch: true,\n wrap: true\n};\nconst DefaultType$b = {\n interval: '(number|boolean)',\n // TODO:v6 remove boolean support\n keyboard: 'boolean',\n pause: '(string|boolean)',\n ride: '(boolean|string)',\n touch: 'boolean',\n wrap: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Carousel extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._interval = null;\n this._activeElement = null;\n this._isSliding = false;\n this.touchTimeout = null;\n this._swipeHelper = null;\n this._indicatorsElement = SelectorEngine.findOne(SELECTOR_INDICATORS, this._element);\n this._addEventListeners();\n if (this._config.ride === CLASS_NAME_CAROUSEL) {\n this.cycle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$b;\n }\n static get DefaultType() {\n return DefaultType$b;\n }\n static get NAME() {\n return NAME$c;\n }\n\n // Public\n next() {\n this._slide(ORDER_NEXT);\n }\n nextWhenVisible() {\n // FIXME TODO use `document.visibilityState`\n // Don't call next when the page isn't visible\n // or the carousel or its parent isn't visible\n if (!document.hidden && isVisible(this._element)) {\n this.next();\n }\n }\n prev() {\n this._slide(ORDER_PREV);\n }\n pause() {\n if (this._isSliding) {\n triggerTransitionEnd(this._element);\n }\n this._clearInterval();\n }\n cycle() {\n this._clearInterval();\n this._updateInterval();\n this._interval = setInterval(() => this.nextWhenVisible(), this._config.interval);\n }\n _maybeEnableCycle() {\n if (!this._config.ride) {\n return;\n }\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.cycle());\n return;\n }\n this.cycle();\n }\n to(index) {\n const items = this._getItems();\n if (index > items.length - 1 || index < 0) {\n return;\n }\n if (this._isSliding) {\n EventHandler.one(this._element, EVENT_SLID, () => this.to(index));\n return;\n }\n const activeIndex = this._getItemIndex(this._getActive());\n if (activeIndex === index) {\n return;\n }\n const order = index > activeIndex ? ORDER_NEXT : ORDER_PREV;\n this._slide(order, items[index]);\n }\n dispose() {\n if (this._swipeHelper) {\n this._swipeHelper.dispose();\n }\n super.dispose();\n }\n\n // Private\n _configAfterMerge(config) {\n config.defaultInterval = config.interval;\n return config;\n }\n _addEventListeners() {\n if (this._config.keyboard) {\n EventHandler.on(this._element, EVENT_KEYDOWN$1, event => this._keydown(event));\n }\n if (this._config.pause === 'hover') {\n EventHandler.on(this._element, EVENT_MOUSEENTER$1, () => this.pause());\n EventHandler.on(this._element, EVENT_MOUSELEAVE$1, () => this._maybeEnableCycle());\n }\n if (this._config.touch && Swipe.isSupported()) {\n this._addTouchEventListeners();\n }\n }\n _addTouchEventListeners() {\n for (const img of SelectorEngine.find(SELECTOR_ITEM_IMG, this._element)) {\n EventHandler.on(img, EVENT_DRAG_START, event => event.preventDefault());\n }\n const endCallBack = () => {\n if (this._config.pause !== 'hover') {\n return;\n }\n\n // If it's a touch-enabled device, mouseenter/leave are fired as\n // part of the mouse compatibility events on first tap - the carousel\n // would stop cycling until user tapped out of it;\n // here, we listen for touchend, explicitly pause the carousel\n // (as if it's the second time we tap on it, mouseenter compat event\n // is NOT fired) and after a timeout (to allow for mouse compatibility\n // events to fire) we explicitly restart cycling\n\n this.pause();\n if (this.touchTimeout) {\n clearTimeout(this.touchTimeout);\n }\n this.touchTimeout = setTimeout(() => this._maybeEnableCycle(), TOUCHEVENT_COMPAT_WAIT + this._config.interval);\n };\n const swipeConfig = {\n leftCallback: () => this._slide(this._directionToOrder(DIRECTION_LEFT)),\n rightCallback: () => this._slide(this._directionToOrder(DIRECTION_RIGHT)),\n endCallback: endCallBack\n };\n this._swipeHelper = new Swipe(this._element, swipeConfig);\n }\n _keydown(event) {\n if (/input|textarea/i.test(event.target.tagName)) {\n return;\n }\n const direction = KEY_TO_DIRECTION[event.key];\n if (direction) {\n event.preventDefault();\n this._slide(this._directionToOrder(direction));\n }\n }\n _getItemIndex(element) {\n return this._getItems().indexOf(element);\n }\n _setActiveIndicatorElement(index) {\n if (!this._indicatorsElement) {\n return;\n }\n const activeIndicator = SelectorEngine.findOne(SELECTOR_ACTIVE, this._indicatorsElement);\n activeIndicator.classList.remove(CLASS_NAME_ACTIVE$2);\n activeIndicator.removeAttribute('aria-current');\n const newActiveIndicator = SelectorEngine.findOne(`[data-bs-slide-to=\"${index}\"]`, this._indicatorsElement);\n if (newActiveIndicator) {\n newActiveIndicator.classList.add(CLASS_NAME_ACTIVE$2);\n newActiveIndicator.setAttribute('aria-current', 'true');\n }\n }\n _updateInterval() {\n const element = this._activeElement || this._getActive();\n if (!element) {\n return;\n }\n const elementInterval = Number.parseInt(element.getAttribute('data-bs-interval'), 10);\n this._config.interval = elementInterval || this._config.defaultInterval;\n }\n _slide(order, element = null) {\n if (this._isSliding) {\n return;\n }\n const activeElement = this._getActive();\n const isNext = order === ORDER_NEXT;\n const nextElement = element || getNextActiveElement(this._getItems(), activeElement, isNext, this._config.wrap);\n if (nextElement === activeElement) {\n return;\n }\n const nextElementIndex = this._getItemIndex(nextElement);\n const triggerEvent = eventName => {\n return EventHandler.trigger(this._element, eventName, {\n relatedTarget: nextElement,\n direction: this._orderToDirection(order),\n from: this._getItemIndex(activeElement),\n to: nextElementIndex\n });\n };\n const slideEvent = triggerEvent(EVENT_SLIDE);\n if (slideEvent.defaultPrevented) {\n return;\n }\n if (!activeElement || !nextElement) {\n // Some weirdness is happening, so we bail\n // TODO: change tests that use empty divs to avoid this check\n return;\n }\n const isCycling = Boolean(this._interval);\n this.pause();\n this._isSliding = true;\n this._setActiveIndicatorElement(nextElementIndex);\n this._activeElement = nextElement;\n const directionalClassName = isNext ? CLASS_NAME_START : CLASS_NAME_END;\n const orderClassName = isNext ? CLASS_NAME_NEXT : CLASS_NAME_PREV;\n nextElement.classList.add(orderClassName);\n reflow(nextElement);\n activeElement.classList.add(directionalClassName);\n nextElement.classList.add(directionalClassName);\n const completeCallBack = () => {\n nextElement.classList.remove(directionalClassName, orderClassName);\n nextElement.classList.add(CLASS_NAME_ACTIVE$2);\n activeElement.classList.remove(CLASS_NAME_ACTIVE$2, orderClassName, directionalClassName);\n this._isSliding = false;\n triggerEvent(EVENT_SLID);\n };\n this._queueCallback(completeCallBack, activeElement, this._isAnimated());\n if (isCycling) {\n this.cycle();\n }\n }\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_SLIDE);\n }\n _getActive() {\n return SelectorEngine.findOne(SELECTOR_ACTIVE_ITEM, this._element);\n }\n _getItems() {\n return SelectorEngine.find(SELECTOR_ITEM, this._element);\n }\n _clearInterval() {\n if (this._interval) {\n clearInterval(this._interval);\n this._interval = null;\n }\n }\n _directionToOrder(direction) {\n if (isRTL()) {\n return direction === DIRECTION_LEFT ? ORDER_PREV : ORDER_NEXT;\n }\n return direction === DIRECTION_LEFT ? ORDER_NEXT : ORDER_PREV;\n }\n _orderToDirection(order) {\n if (isRTL()) {\n return order === ORDER_PREV ? DIRECTION_LEFT : DIRECTION_RIGHT;\n }\n return order === ORDER_PREV ? DIRECTION_RIGHT : DIRECTION_LEFT;\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Carousel.getOrCreateInstance(this, config);\n if (typeof config === 'number') {\n data.to(config);\n return;\n }\n if (typeof config === 'string') {\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$5, SELECTOR_DATA_SLIDE, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (!target || !target.classList.contains(CLASS_NAME_CAROUSEL)) {\n return;\n }\n event.preventDefault();\n const carousel = Carousel.getOrCreateInstance(target);\n const slideIndex = this.getAttribute('data-bs-slide-to');\n if (slideIndex) {\n carousel.to(slideIndex);\n carousel._maybeEnableCycle();\n return;\n }\n if (Manipulator.getDataAttribute(this, 'slide') === 'next') {\n carousel.next();\n carousel._maybeEnableCycle();\n return;\n }\n carousel.prev();\n carousel._maybeEnableCycle();\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$3, () => {\n const carousels = SelectorEngine.find(SELECTOR_DATA_RIDE);\n for (const carousel of carousels) {\n Carousel.getOrCreateInstance(carousel);\n }\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Carousel);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap collapse.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$b = 'collapse';\nconst DATA_KEY$7 = 'bs.collapse';\nconst EVENT_KEY$7 = `.${DATA_KEY$7}`;\nconst DATA_API_KEY$4 = '.data-api';\nconst EVENT_SHOW$6 = `show${EVENT_KEY$7}`;\nconst EVENT_SHOWN$6 = `shown${EVENT_KEY$7}`;\nconst EVENT_HIDE$6 = `hide${EVENT_KEY$7}`;\nconst EVENT_HIDDEN$6 = `hidden${EVENT_KEY$7}`;\nconst EVENT_CLICK_DATA_API$4 = `click${EVENT_KEY$7}${DATA_API_KEY$4}`;\nconst CLASS_NAME_SHOW$7 = 'show';\nconst CLASS_NAME_COLLAPSE = 'collapse';\nconst CLASS_NAME_COLLAPSING = 'collapsing';\nconst CLASS_NAME_COLLAPSED = 'collapsed';\nconst CLASS_NAME_DEEPER_CHILDREN = `:scope .${CLASS_NAME_COLLAPSE} .${CLASS_NAME_COLLAPSE}`;\nconst CLASS_NAME_HORIZONTAL = 'collapse-horizontal';\nconst WIDTH = 'width';\nconst HEIGHT = 'height';\nconst SELECTOR_ACTIVES = '.collapse.show, .collapse.collapsing';\nconst SELECTOR_DATA_TOGGLE$4 = '[data-bs-toggle=\"collapse\"]';\nconst Default$a = {\n parent: null,\n toggle: true\n};\nconst DefaultType$a = {\n parent: '(null|element)',\n toggle: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Collapse extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isTransitioning = false;\n this._triggerArray = [];\n const toggleList = SelectorEngine.find(SELECTOR_DATA_TOGGLE$4);\n for (const elem of toggleList) {\n const selector = SelectorEngine.getSelectorFromElement(elem);\n const filterElement = SelectorEngine.find(selector).filter(foundElement => foundElement === this._element);\n if (selector !== null && filterElement.length) {\n this._triggerArray.push(elem);\n }\n }\n this._initializeChildren();\n if (!this._config.parent) {\n this._addAriaAndCollapsedClass(this._triggerArray, this._isShown());\n }\n if (this._config.toggle) {\n this.toggle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$a;\n }\n static get DefaultType() {\n return DefaultType$a;\n }\n static get NAME() {\n return NAME$b;\n }\n\n // Public\n toggle() {\n if (this._isShown()) {\n this.hide();\n } else {\n this.show();\n }\n }\n show() {\n if (this._isTransitioning || this._isShown()) {\n return;\n }\n let activeChildren = [];\n\n // find active children\n if (this._config.parent) {\n activeChildren = this._getFirstLevelChildren(SELECTOR_ACTIVES).filter(element => element !== this._element).map(element => Collapse.getOrCreateInstance(element, {\n toggle: false\n }));\n }\n if (activeChildren.length && activeChildren[0]._isTransitioning) {\n return;\n }\n const startEvent = EventHandler.trigger(this._element, EVENT_SHOW$6);\n if (startEvent.defaultPrevented) {\n return;\n }\n for (const activeInstance of activeChildren) {\n activeInstance.hide();\n }\n const dimension = this._getDimension();\n this._element.classList.remove(CLASS_NAME_COLLAPSE);\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n this._element.style[dimension] = 0;\n this._addAriaAndCollapsedClass(this._triggerArray, true);\n this._isTransitioning = true;\n const complete = () => {\n this._isTransitioning = false;\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n this._element.classList.add(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n this._element.style[dimension] = '';\n EventHandler.trigger(this._element, EVENT_SHOWN$6);\n };\n const capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);\n const scrollSize = `scroll${capitalizedDimension}`;\n this._queueCallback(complete, this._element, true);\n this._element.style[dimension] = `${this._element[scrollSize]}px`;\n }\n hide() {\n if (this._isTransitioning || !this._isShown()) {\n return;\n }\n const startEvent = EventHandler.trigger(this._element, EVENT_HIDE$6);\n if (startEvent.defaultPrevented) {\n return;\n }\n const dimension = this._getDimension();\n this._element.style[dimension] = `${this._element.getBoundingClientRect()[dimension]}px`;\n reflow(this._element);\n this._element.classList.add(CLASS_NAME_COLLAPSING);\n this._element.classList.remove(CLASS_NAME_COLLAPSE, CLASS_NAME_SHOW$7);\n for (const trigger of this._triggerArray) {\n const element = SelectorEngine.getElementFromSelector(trigger);\n if (element && !this._isShown(element)) {\n this._addAriaAndCollapsedClass([trigger], false);\n }\n }\n this._isTransitioning = true;\n const complete = () => {\n this._isTransitioning = false;\n this._element.classList.remove(CLASS_NAME_COLLAPSING);\n this._element.classList.add(CLASS_NAME_COLLAPSE);\n EventHandler.trigger(this._element, EVENT_HIDDEN$6);\n };\n this._element.style[dimension] = '';\n this._queueCallback(complete, this._element, true);\n }\n _isShown(element = this._element) {\n return element.classList.contains(CLASS_NAME_SHOW$7);\n }\n\n // Private\n _configAfterMerge(config) {\n config.toggle = Boolean(config.toggle); // Coerce string values\n config.parent = getElement(config.parent);\n return config;\n }\n _getDimension() {\n return this._element.classList.contains(CLASS_NAME_HORIZONTAL) ? WIDTH : HEIGHT;\n }\n _initializeChildren() {\n if (!this._config.parent) {\n return;\n }\n const children = this._getFirstLevelChildren(SELECTOR_DATA_TOGGLE$4);\n for (const element of children) {\n const selected = SelectorEngine.getElementFromSelector(element);\n if (selected) {\n this._addAriaAndCollapsedClass([element], this._isShown(selected));\n }\n }\n }\n _getFirstLevelChildren(selector) {\n const children = SelectorEngine.find(CLASS_NAME_DEEPER_CHILDREN, this._config.parent);\n // remove children if greater depth\n return SelectorEngine.find(selector, this._config.parent).filter(element => !children.includes(element));\n }\n _addAriaAndCollapsedClass(triggerArray, isOpen) {\n if (!triggerArray.length) {\n return;\n }\n for (const element of triggerArray) {\n element.classList.toggle(CLASS_NAME_COLLAPSED, !isOpen);\n element.setAttribute('aria-expanded', isOpen);\n }\n }\n\n // Static\n static jQueryInterface(config) {\n const _config = {};\n if (typeof config === 'string' && /show|hide/.test(config)) {\n _config.toggle = false;\n }\n return this.each(function () {\n const data = Collapse.getOrCreateInstance(this, _config);\n if (typeof config === 'string') {\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n }\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$4, SELECTOR_DATA_TOGGLE$4, function (event) {\n // preventDefault only for elements (which change the URL) not inside the collapsible element\n if (event.target.tagName === 'A' || event.delegateTarget && event.delegateTarget.tagName === 'A') {\n event.preventDefault();\n }\n for (const element of SelectorEngine.getMultipleElementsFromSelector(this)) {\n Collapse.getOrCreateInstance(element, {\n toggle: false\n }).toggle();\n }\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Collapse);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap dropdown.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$a = 'dropdown';\nconst DATA_KEY$6 = 'bs.dropdown';\nconst EVENT_KEY$6 = `.${DATA_KEY$6}`;\nconst DATA_API_KEY$3 = '.data-api';\nconst ESCAPE_KEY$2 = 'Escape';\nconst TAB_KEY$1 = 'Tab';\nconst ARROW_UP_KEY$1 = 'ArrowUp';\nconst ARROW_DOWN_KEY$1 = 'ArrowDown';\nconst RIGHT_MOUSE_BUTTON = 2; // MouseEvent.button value for the secondary button, usually the right button\n\nconst EVENT_HIDE$5 = `hide${EVENT_KEY$6}`;\nconst EVENT_HIDDEN$5 = `hidden${EVENT_KEY$6}`;\nconst EVENT_SHOW$5 = `show${EVENT_KEY$6}`;\nconst EVENT_SHOWN$5 = `shown${EVENT_KEY$6}`;\nconst EVENT_CLICK_DATA_API$3 = `click${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYDOWN_DATA_API = `keydown${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst EVENT_KEYUP_DATA_API = `keyup${EVENT_KEY$6}${DATA_API_KEY$3}`;\nconst CLASS_NAME_SHOW$6 = 'show';\nconst CLASS_NAME_DROPUP = 'dropup';\nconst CLASS_NAME_DROPEND = 'dropend';\nconst CLASS_NAME_DROPSTART = 'dropstart';\nconst CLASS_NAME_DROPUP_CENTER = 'dropup-center';\nconst CLASS_NAME_DROPDOWN_CENTER = 'dropdown-center';\nconst SELECTOR_DATA_TOGGLE$3 = '[data-bs-toggle=\"dropdown\"]:not(.disabled):not(:disabled)';\nconst SELECTOR_DATA_TOGGLE_SHOWN = `${SELECTOR_DATA_TOGGLE$3}.${CLASS_NAME_SHOW$6}`;\nconst SELECTOR_MENU = '.dropdown-menu';\nconst SELECTOR_NAVBAR = '.navbar';\nconst SELECTOR_NAVBAR_NAV = '.navbar-nav';\nconst SELECTOR_VISIBLE_ITEMS = '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)';\nconst PLACEMENT_TOP = isRTL() ? 'top-end' : 'top-start';\nconst PLACEMENT_TOPEND = isRTL() ? 'top-start' : 'top-end';\nconst PLACEMENT_BOTTOM = isRTL() ? 'bottom-end' : 'bottom-start';\nconst PLACEMENT_BOTTOMEND = isRTL() ? 'bottom-start' : 'bottom-end';\nconst PLACEMENT_RIGHT = isRTL() ? 'left-start' : 'right-start';\nconst PLACEMENT_LEFT = isRTL() ? 'right-start' : 'left-start';\nconst PLACEMENT_TOPCENTER = 'top';\nconst PLACEMENT_BOTTOMCENTER = 'bottom';\nconst Default$9 = {\n autoClose: true,\n boundary: 'clippingParents',\n display: 'dynamic',\n offset: [0, 2],\n popperConfig: null,\n reference: 'toggle'\n};\nconst DefaultType$9 = {\n autoClose: '(boolean|string)',\n boundary: '(string|element)',\n display: 'string',\n offset: '(array|string|function)',\n popperConfig: '(null|object|function)',\n reference: '(string|element|object)'\n};\n\n/**\n * Class definition\n */\n\nclass Dropdown extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._popper = null;\n this._parent = this._element.parentNode; // dropdown wrapper\n // TODO: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.3/forms/input-group/\n this._menu = SelectorEngine.next(this._element, SELECTOR_MENU)[0] || SelectorEngine.prev(this._element, SELECTOR_MENU)[0] || SelectorEngine.findOne(SELECTOR_MENU, this._parent);\n this._inNavbar = this._detectNavbar();\n }\n\n // Getters\n static get Default() {\n return Default$9;\n }\n static get DefaultType() {\n return DefaultType$9;\n }\n static get NAME() {\n return NAME$a;\n }\n\n // Public\n toggle() {\n return this._isShown() ? this.hide() : this.show();\n }\n show() {\n if (isDisabled(this._element) || this._isShown()) {\n return;\n }\n const relatedTarget = {\n relatedTarget: this._element\n };\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$5, relatedTarget);\n if (showEvent.defaultPrevented) {\n return;\n }\n this._createPopper();\n\n // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n if ('ontouchstart' in document.documentElement && !this._parent.closest(SELECTOR_NAVBAR_NAV)) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n this._element.focus();\n this._element.setAttribute('aria-expanded', true);\n this._menu.classList.add(CLASS_NAME_SHOW$6);\n this._element.classList.add(CLASS_NAME_SHOW$6);\n EventHandler.trigger(this._element, EVENT_SHOWN$5, relatedTarget);\n }\n hide() {\n if (isDisabled(this._element) || !this._isShown()) {\n return;\n }\n const relatedTarget = {\n relatedTarget: this._element\n };\n this._completeHide(relatedTarget);\n }\n dispose() {\n if (this._popper) {\n this._popper.destroy();\n }\n super.dispose();\n }\n update() {\n this._inNavbar = this._detectNavbar();\n if (this._popper) {\n this._popper.update();\n }\n }\n\n // Private\n _completeHide(relatedTarget) {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$5, relatedTarget);\n if (hideEvent.defaultPrevented) {\n return;\n }\n\n // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n if (this._popper) {\n this._popper.destroy();\n }\n this._menu.classList.remove(CLASS_NAME_SHOW$6);\n this._element.classList.remove(CLASS_NAME_SHOW$6);\n this._element.setAttribute('aria-expanded', 'false');\n Manipulator.removeDataAttribute(this._menu, 'popper');\n EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget);\n }\n _getConfig(config) {\n config = super._getConfig(config);\n if (typeof config.reference === 'object' && !isElement(config.reference) && typeof config.reference.getBoundingClientRect !== 'function') {\n // Popper virtual elements require a getBoundingClientRect method\n throw new TypeError(`${NAME$a.toUpperCase()}: Option \"reference\" provided type \"object\" without a required \"getBoundingClientRect\" method.`);\n }\n return config;\n }\n _createPopper() {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s dropdowns require Popper (https://popper.js.org)');\n }\n let referenceElement = this._element;\n if (this._config.reference === 'parent') {\n referenceElement = this._parent;\n } else if (isElement(this._config.reference)) {\n referenceElement = getElement(this._config.reference);\n } else if (typeof this._config.reference === 'object') {\n referenceElement = this._config.reference;\n }\n const popperConfig = this._getPopperConfig();\n this._popper = Popper.createPopper(referenceElement, this._menu, popperConfig);\n }\n _isShown() {\n return this._menu.classList.contains(CLASS_NAME_SHOW$6);\n }\n _getPlacement() {\n const parentDropdown = this._parent;\n if (parentDropdown.classList.contains(CLASS_NAME_DROPEND)) {\n return PLACEMENT_RIGHT;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPSTART)) {\n return PLACEMENT_LEFT;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP_CENTER)) {\n return PLACEMENT_TOPCENTER;\n }\n if (parentDropdown.classList.contains(CLASS_NAME_DROPDOWN_CENTER)) {\n return PLACEMENT_BOTTOMCENTER;\n }\n\n // We need to trim the value because custom properties can also include spaces\n const isEnd = getComputedStyle(this._menu).getPropertyValue('--bs-position').trim() === 'end';\n if (parentDropdown.classList.contains(CLASS_NAME_DROPUP)) {\n return isEnd ? PLACEMENT_TOPEND : PLACEMENT_TOP;\n }\n return isEnd ? PLACEMENT_BOTTOMEND : PLACEMENT_BOTTOM;\n }\n _detectNavbar() {\n return this._element.closest(SELECTOR_NAVBAR) !== null;\n }\n _getOffset() {\n const {\n offset\n } = this._config;\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n return offset;\n }\n _getPopperConfig() {\n const defaultBsPopperConfig = {\n placement: this._getPlacement(),\n modifiers: [{\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }]\n };\n\n // Disable Popper if we have a static display or Dropdown is in Navbar\n if (this._inNavbar || this._config.display === 'static') {\n Manipulator.setDataAttribute(this._menu, 'popper', 'static'); // TODO: v6 remove\n defaultBsPopperConfig.modifiers = [{\n name: 'applyStyles',\n enabled: false\n }];\n }\n return {\n ...defaultBsPopperConfig,\n ...execute(this._config.popperConfig, [defaultBsPopperConfig])\n };\n }\n _selectMenuItem({\n key,\n target\n }) {\n const items = SelectorEngine.find(SELECTOR_VISIBLE_ITEMS, this._menu).filter(element => isVisible(element));\n if (!items.length) {\n return;\n }\n\n // if target isn't included in items (e.g. when expanding the dropdown)\n // allow cycling to get the last item in case key equals ARROW_UP_KEY\n getNextActiveElement(items, target, key === ARROW_DOWN_KEY$1, !items.includes(target)).focus();\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Dropdown.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n static clearMenus(event) {\n if (event.button === RIGHT_MOUSE_BUTTON || event.type === 'keyup' && event.key !== TAB_KEY$1) {\n return;\n }\n const openToggles = SelectorEngine.find(SELECTOR_DATA_TOGGLE_SHOWN);\n for (const toggle of openToggles) {\n const context = Dropdown.getInstance(toggle);\n if (!context || context._config.autoClose === false) {\n continue;\n }\n const composedPath = event.composedPath();\n const isMenuTarget = composedPath.includes(context._menu);\n if (composedPath.includes(context._element) || context._config.autoClose === 'inside' && !isMenuTarget || context._config.autoClose === 'outside' && isMenuTarget) {\n continue;\n }\n\n // Tab navigation through the dropdown menu or events from contained inputs shouldn't close the menu\n if (context._menu.contains(event.target) && (event.type === 'keyup' && event.key === TAB_KEY$1 || /input|select|option|textarea|form/i.test(event.target.tagName))) {\n continue;\n }\n const relatedTarget = {\n relatedTarget: context._element\n };\n if (event.type === 'click') {\n relatedTarget.clickEvent = event;\n }\n context._completeHide(relatedTarget);\n }\n }\n static dataApiKeydownHandler(event) {\n // If not an UP | DOWN | ESCAPE key => not a dropdown command\n // If input/textarea && if key is other than ESCAPE => not a dropdown command\n\n const isInput = /input|textarea/i.test(event.target.tagName);\n const isEscapeEvent = event.key === ESCAPE_KEY$2;\n const isUpOrDownEvent = [ARROW_UP_KEY$1, ARROW_DOWN_KEY$1].includes(event.key);\n if (!isUpOrDownEvent && !isEscapeEvent) {\n return;\n }\n if (isInput && !isEscapeEvent) {\n return;\n }\n event.preventDefault();\n\n // TODO: v6 revert #37011 & change markup https://getbootstrap.com/docs/5.3/forms/input-group/\n const getToggleButton = this.matches(SELECTOR_DATA_TOGGLE$3) ? this : SelectorEngine.prev(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.next(this, SELECTOR_DATA_TOGGLE$3)[0] || SelectorEngine.findOne(SELECTOR_DATA_TOGGLE$3, event.delegateTarget.parentNode);\n const instance = Dropdown.getOrCreateInstance(getToggleButton);\n if (isUpOrDownEvent) {\n event.stopPropagation();\n instance.show();\n instance._selectMenuItem(event);\n return;\n }\n if (instance._isShown()) {\n // else is escape and we check if it is shown\n event.stopPropagation();\n instance.hide();\n getToggleButton.focus();\n }\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_DATA_TOGGLE$3, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_KEYDOWN_DATA_API, SELECTOR_MENU, Dropdown.dataApiKeydownHandler);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_KEYUP_DATA_API, Dropdown.clearMenus);\nEventHandler.on(document, EVENT_CLICK_DATA_API$3, SELECTOR_DATA_TOGGLE$3, function (event) {\n event.preventDefault();\n Dropdown.getOrCreateInstance(this).toggle();\n});\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Dropdown);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/backdrop.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$9 = 'backdrop';\nconst CLASS_NAME_FADE$4 = 'fade';\nconst CLASS_NAME_SHOW$5 = 'show';\nconst EVENT_MOUSEDOWN = `mousedown.bs.${NAME$9}`;\nconst Default$8 = {\n className: 'modal-backdrop',\n clickCallback: null,\n isAnimated: false,\n isVisible: true,\n // if false, we use the backdrop helper without adding any element to the dom\n rootElement: 'body' // give the choice to place backdrop under different elements\n};\n\nconst DefaultType$8 = {\n className: 'string',\n clickCallback: '(function|null)',\n isAnimated: 'boolean',\n isVisible: 'boolean',\n rootElement: '(element|string)'\n};\n\n/**\n * Class definition\n */\n\nclass Backdrop extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isAppended = false;\n this._element = null;\n }\n\n // Getters\n static get Default() {\n return Default$8;\n }\n static get DefaultType() {\n return DefaultType$8;\n }\n static get NAME() {\n return NAME$9;\n }\n\n // Public\n show(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n this._append();\n const element = this._getElement();\n if (this._config.isAnimated) {\n reflow(element);\n }\n element.classList.add(CLASS_NAME_SHOW$5);\n this._emulateAnimation(() => {\n execute(callback);\n });\n }\n hide(callback) {\n if (!this._config.isVisible) {\n execute(callback);\n return;\n }\n this._getElement().classList.remove(CLASS_NAME_SHOW$5);\n this._emulateAnimation(() => {\n this.dispose();\n execute(callback);\n });\n }\n dispose() {\n if (!this._isAppended) {\n return;\n }\n EventHandler.off(this._element, EVENT_MOUSEDOWN);\n this._element.remove();\n this._isAppended = false;\n }\n\n // Private\n _getElement() {\n if (!this._element) {\n const backdrop = document.createElement('div');\n backdrop.className = this._config.className;\n if (this._config.isAnimated) {\n backdrop.classList.add(CLASS_NAME_FADE$4);\n }\n this._element = backdrop;\n }\n return this._element;\n }\n _configAfterMerge(config) {\n // use getElement() with the default \"body\" to get a fresh Element on each instantiation\n config.rootElement = getElement(config.rootElement);\n return config;\n }\n _append() {\n if (this._isAppended) {\n return;\n }\n const element = this._getElement();\n this._config.rootElement.append(element);\n EventHandler.on(element, EVENT_MOUSEDOWN, () => {\n execute(this._config.clickCallback);\n });\n this._isAppended = true;\n }\n _emulateAnimation(callback) {\n executeAfterTransition(callback, this._getElement(), this._config.isAnimated);\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/focustrap.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$8 = 'focustrap';\nconst DATA_KEY$5 = 'bs.focustrap';\nconst EVENT_KEY$5 = `.${DATA_KEY$5}`;\nconst EVENT_FOCUSIN$2 = `focusin${EVENT_KEY$5}`;\nconst EVENT_KEYDOWN_TAB = `keydown.tab${EVENT_KEY$5}`;\nconst TAB_KEY = 'Tab';\nconst TAB_NAV_FORWARD = 'forward';\nconst TAB_NAV_BACKWARD = 'backward';\nconst Default$7 = {\n autofocus: true,\n trapElement: null // The element to trap focus inside of\n};\n\nconst DefaultType$7 = {\n autofocus: 'boolean',\n trapElement: 'element'\n};\n\n/**\n * Class definition\n */\n\nclass FocusTrap extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n this._isActive = false;\n this._lastTabNavDirection = null;\n }\n\n // Getters\n static get Default() {\n return Default$7;\n }\n static get DefaultType() {\n return DefaultType$7;\n }\n static get NAME() {\n return NAME$8;\n }\n\n // Public\n activate() {\n if (this._isActive) {\n return;\n }\n if (this._config.autofocus) {\n this._config.trapElement.focus();\n }\n EventHandler.off(document, EVENT_KEY$5); // guard against infinite focus loop\n EventHandler.on(document, EVENT_FOCUSIN$2, event => this._handleFocusin(event));\n EventHandler.on(document, EVENT_KEYDOWN_TAB, event => this._handleKeydown(event));\n this._isActive = true;\n }\n deactivate() {\n if (!this._isActive) {\n return;\n }\n this._isActive = false;\n EventHandler.off(document, EVENT_KEY$5);\n }\n\n // Private\n _handleFocusin(event) {\n const {\n trapElement\n } = this._config;\n if (event.target === document || event.target === trapElement || trapElement.contains(event.target)) {\n return;\n }\n const elements = SelectorEngine.focusableChildren(trapElement);\n if (elements.length === 0) {\n trapElement.focus();\n } else if (this._lastTabNavDirection === TAB_NAV_BACKWARD) {\n elements[elements.length - 1].focus();\n } else {\n elements[0].focus();\n }\n }\n _handleKeydown(event) {\n if (event.key !== TAB_KEY) {\n return;\n }\n this._lastTabNavDirection = event.shiftKey ? TAB_NAV_BACKWARD : TAB_NAV_FORWARD;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/scrollBar.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst SELECTOR_FIXED_CONTENT = '.fixed-top, .fixed-bottom, .is-fixed, .sticky-top';\nconst SELECTOR_STICKY_CONTENT = '.sticky-top';\nconst PROPERTY_PADDING = 'padding-right';\nconst PROPERTY_MARGIN = 'margin-right';\n\n/**\n * Class definition\n */\n\nclass ScrollBarHelper {\n constructor() {\n this._element = document.body;\n }\n\n // Public\n getWidth() {\n // https://developer.mozilla.org/en-US/docs/Web/API/Window/innerWidth#usage_notes\n const documentWidth = document.documentElement.clientWidth;\n return Math.abs(window.innerWidth - documentWidth);\n }\n hide() {\n const width = this.getWidth();\n this._disableOverFlow();\n // give padding to element to balance the hidden scrollbar width\n this._setElementAttributes(this._element, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n // trick: We adjust positive paddingRight and negative marginRight to sticky-top elements to keep showing fullwidth\n this._setElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING, calculatedValue => calculatedValue + width);\n this._setElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN, calculatedValue => calculatedValue - width);\n }\n reset() {\n this._resetElementAttributes(this._element, 'overflow');\n this._resetElementAttributes(this._element, PROPERTY_PADDING);\n this._resetElementAttributes(SELECTOR_FIXED_CONTENT, PROPERTY_PADDING);\n this._resetElementAttributes(SELECTOR_STICKY_CONTENT, PROPERTY_MARGIN);\n }\n isOverflowing() {\n return this.getWidth() > 0;\n }\n\n // Private\n _disableOverFlow() {\n this._saveInitialAttribute(this._element, 'overflow');\n this._element.style.overflow = 'hidden';\n }\n _setElementAttributes(selector, styleProperty, callback) {\n const scrollbarWidth = this.getWidth();\n const manipulationCallBack = element => {\n if (element !== this._element && window.innerWidth > element.clientWidth + scrollbarWidth) {\n return;\n }\n this._saveInitialAttribute(element, styleProperty);\n const calculatedValue = window.getComputedStyle(element).getPropertyValue(styleProperty);\n element.style.setProperty(styleProperty, `${callback(Number.parseFloat(calculatedValue))}px`);\n };\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n _saveInitialAttribute(element, styleProperty) {\n const actualValue = element.style.getPropertyValue(styleProperty);\n if (actualValue) {\n Manipulator.setDataAttribute(element, styleProperty, actualValue);\n }\n }\n _resetElementAttributes(selector, styleProperty) {\n const manipulationCallBack = element => {\n const value = Manipulator.getDataAttribute(element, styleProperty);\n // We only want to remove the property if the value is `null`; the value can also be zero\n if (value === null) {\n element.style.removeProperty(styleProperty);\n return;\n }\n Manipulator.removeDataAttribute(element, styleProperty);\n element.style.setProperty(styleProperty, value);\n };\n this._applyManipulationCallback(selector, manipulationCallBack);\n }\n _applyManipulationCallback(selector, callBack) {\n if (isElement(selector)) {\n callBack(selector);\n return;\n }\n for (const sel of SelectorEngine.find(selector, this._element)) {\n callBack(sel);\n }\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap modal.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$7 = 'modal';\nconst DATA_KEY$4 = 'bs.modal';\nconst EVENT_KEY$4 = `.${DATA_KEY$4}`;\nconst DATA_API_KEY$2 = '.data-api';\nconst ESCAPE_KEY$1 = 'Escape';\nconst EVENT_HIDE$4 = `hide${EVENT_KEY$4}`;\nconst EVENT_HIDE_PREVENTED$1 = `hidePrevented${EVENT_KEY$4}`;\nconst EVENT_HIDDEN$4 = `hidden${EVENT_KEY$4}`;\nconst EVENT_SHOW$4 = `show${EVENT_KEY$4}`;\nconst EVENT_SHOWN$4 = `shown${EVENT_KEY$4}`;\nconst EVENT_RESIZE$1 = `resize${EVENT_KEY$4}`;\nconst EVENT_CLICK_DISMISS = `click.dismiss${EVENT_KEY$4}`;\nconst EVENT_MOUSEDOWN_DISMISS = `mousedown.dismiss${EVENT_KEY$4}`;\nconst EVENT_KEYDOWN_DISMISS$1 = `keydown.dismiss${EVENT_KEY$4}`;\nconst EVENT_CLICK_DATA_API$2 = `click${EVENT_KEY$4}${DATA_API_KEY$2}`;\nconst CLASS_NAME_OPEN = 'modal-open';\nconst CLASS_NAME_FADE$3 = 'fade';\nconst CLASS_NAME_SHOW$4 = 'show';\nconst CLASS_NAME_STATIC = 'modal-static';\nconst OPEN_SELECTOR$1 = '.modal.show';\nconst SELECTOR_DIALOG = '.modal-dialog';\nconst SELECTOR_MODAL_BODY = '.modal-body';\nconst SELECTOR_DATA_TOGGLE$2 = '[data-bs-toggle=\"modal\"]';\nconst Default$6 = {\n backdrop: true,\n focus: true,\n keyboard: true\n};\nconst DefaultType$6 = {\n backdrop: '(boolean|string)',\n focus: 'boolean',\n keyboard: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Modal extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._dialog = SelectorEngine.findOne(SELECTOR_DIALOG, this._element);\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._isShown = false;\n this._isTransitioning = false;\n this._scrollBar = new ScrollBarHelper();\n this._addEventListeners();\n }\n\n // Getters\n static get Default() {\n return Default$6;\n }\n static get DefaultType() {\n return DefaultType$6;\n }\n static get NAME() {\n return NAME$7;\n }\n\n // Public\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n show(relatedTarget) {\n if (this._isShown || this._isTransitioning) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$4, {\n relatedTarget\n });\n if (showEvent.defaultPrevented) {\n return;\n }\n this._isShown = true;\n this._isTransitioning = true;\n this._scrollBar.hide();\n document.body.classList.add(CLASS_NAME_OPEN);\n this._adjustDialog();\n this._backdrop.show(() => this._showElement(relatedTarget));\n }\n hide() {\n if (!this._isShown || this._isTransitioning) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$4);\n if (hideEvent.defaultPrevented) {\n return;\n }\n this._isShown = false;\n this._isTransitioning = true;\n this._focustrap.deactivate();\n this._element.classList.remove(CLASS_NAME_SHOW$4);\n this._queueCallback(() => this._hideModal(), this._element, this._isAnimated());\n }\n dispose() {\n EventHandler.off(window, EVENT_KEY$4);\n EventHandler.off(this._dialog, EVENT_KEY$4);\n this._backdrop.dispose();\n this._focustrap.deactivate();\n super.dispose();\n }\n handleUpdate() {\n this._adjustDialog();\n }\n\n // Private\n _initializeBackDrop() {\n return new Backdrop({\n isVisible: Boolean(this._config.backdrop),\n // 'static' option will be translated to true, and booleans will keep their value,\n isAnimated: this._isAnimated()\n });\n }\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n _showElement(relatedTarget) {\n // try to append dynamic modal\n if (!document.body.contains(this._element)) {\n document.body.append(this._element);\n }\n this._element.style.display = 'block';\n this._element.removeAttribute('aria-hidden');\n this._element.setAttribute('aria-modal', true);\n this._element.setAttribute('role', 'dialog');\n this._element.scrollTop = 0;\n const modalBody = SelectorEngine.findOne(SELECTOR_MODAL_BODY, this._dialog);\n if (modalBody) {\n modalBody.scrollTop = 0;\n }\n reflow(this._element);\n this._element.classList.add(CLASS_NAME_SHOW$4);\n const transitionComplete = () => {\n if (this._config.focus) {\n this._focustrap.activate();\n }\n this._isTransitioning = false;\n EventHandler.trigger(this._element, EVENT_SHOWN$4, {\n relatedTarget\n });\n };\n this._queueCallback(transitionComplete, this._dialog, this._isAnimated());\n }\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS$1, event => {\n if (event.key !== ESCAPE_KEY$1) {\n return;\n }\n if (this._config.keyboard) {\n this.hide();\n return;\n }\n this._triggerBackdropTransition();\n });\n EventHandler.on(window, EVENT_RESIZE$1, () => {\n if (this._isShown && !this._isTransitioning) {\n this._adjustDialog();\n }\n });\n EventHandler.on(this._element, EVENT_MOUSEDOWN_DISMISS, event => {\n // a bad trick to segregate clicks that may start inside dialog but end outside, and avoid listen to scrollbar clicks\n EventHandler.one(this._element, EVENT_CLICK_DISMISS, event2 => {\n if (this._element !== event.target || this._element !== event2.target) {\n return;\n }\n if (this._config.backdrop === 'static') {\n this._triggerBackdropTransition();\n return;\n }\n if (this._config.backdrop) {\n this.hide();\n }\n });\n });\n }\n _hideModal() {\n this._element.style.display = 'none';\n this._element.setAttribute('aria-hidden', true);\n this._element.removeAttribute('aria-modal');\n this._element.removeAttribute('role');\n this._isTransitioning = false;\n this._backdrop.hide(() => {\n document.body.classList.remove(CLASS_NAME_OPEN);\n this._resetAdjustments();\n this._scrollBar.reset();\n EventHandler.trigger(this._element, EVENT_HIDDEN$4);\n });\n }\n _isAnimated() {\n return this._element.classList.contains(CLASS_NAME_FADE$3);\n }\n _triggerBackdropTransition() {\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED$1);\n if (hideEvent.defaultPrevented) {\n return;\n }\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const initialOverflowY = this._element.style.overflowY;\n // return if the following background transition hasn't yet completed\n if (initialOverflowY === 'hidden' || this._element.classList.contains(CLASS_NAME_STATIC)) {\n return;\n }\n if (!isModalOverflowing) {\n this._element.style.overflowY = 'hidden';\n }\n this._element.classList.add(CLASS_NAME_STATIC);\n this._queueCallback(() => {\n this._element.classList.remove(CLASS_NAME_STATIC);\n this._queueCallback(() => {\n this._element.style.overflowY = initialOverflowY;\n }, this._dialog);\n }, this._dialog);\n this._element.focus();\n }\n\n /**\n * The following methods are used to handle overflowing modals\n */\n\n _adjustDialog() {\n const isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;\n const scrollbarWidth = this._scrollBar.getWidth();\n const isBodyOverflowing = scrollbarWidth > 0;\n if (isBodyOverflowing && !isModalOverflowing) {\n const property = isRTL() ? 'paddingLeft' : 'paddingRight';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n if (!isBodyOverflowing && isModalOverflowing) {\n const property = isRTL() ? 'paddingRight' : 'paddingLeft';\n this._element.style[property] = `${scrollbarWidth}px`;\n }\n }\n _resetAdjustments() {\n this._element.style.paddingLeft = '';\n this._element.style.paddingRight = '';\n }\n\n // Static\n static jQueryInterface(config, relatedTarget) {\n return this.each(function () {\n const data = Modal.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](relatedTarget);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$2, SELECTOR_DATA_TOGGLE$2, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n EventHandler.one(target, EVENT_SHOW$4, showEvent => {\n if (showEvent.defaultPrevented) {\n // only register focus restorer if modal will actually get shown\n return;\n }\n EventHandler.one(target, EVENT_HIDDEN$4, () => {\n if (isVisible(this)) {\n this.focus();\n }\n });\n });\n\n // avoid conflict when clicking modal toggler while another one is open\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR$1);\n if (alreadyOpen) {\n Modal.getInstance(alreadyOpen).hide();\n }\n const data = Modal.getOrCreateInstance(target);\n data.toggle(this);\n});\nenableDismissTrigger(Modal);\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Modal);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap offcanvas.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$6 = 'offcanvas';\nconst DATA_KEY$3 = 'bs.offcanvas';\nconst EVENT_KEY$3 = `.${DATA_KEY$3}`;\nconst DATA_API_KEY$1 = '.data-api';\nconst EVENT_LOAD_DATA_API$2 = `load${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst ESCAPE_KEY = 'Escape';\nconst CLASS_NAME_SHOW$3 = 'show';\nconst CLASS_NAME_SHOWING$1 = 'showing';\nconst CLASS_NAME_HIDING = 'hiding';\nconst CLASS_NAME_BACKDROP = 'offcanvas-backdrop';\nconst OPEN_SELECTOR = '.offcanvas.show';\nconst EVENT_SHOW$3 = `show${EVENT_KEY$3}`;\nconst EVENT_SHOWN$3 = `shown${EVENT_KEY$3}`;\nconst EVENT_HIDE$3 = `hide${EVENT_KEY$3}`;\nconst EVENT_HIDE_PREVENTED = `hidePrevented${EVENT_KEY$3}`;\nconst EVENT_HIDDEN$3 = `hidden${EVENT_KEY$3}`;\nconst EVENT_RESIZE = `resize${EVENT_KEY$3}`;\nconst EVENT_CLICK_DATA_API$1 = `click${EVENT_KEY$3}${DATA_API_KEY$1}`;\nconst EVENT_KEYDOWN_DISMISS = `keydown.dismiss${EVENT_KEY$3}`;\nconst SELECTOR_DATA_TOGGLE$1 = '[data-bs-toggle=\"offcanvas\"]';\nconst Default$5 = {\n backdrop: true,\n keyboard: true,\n scroll: false\n};\nconst DefaultType$5 = {\n backdrop: '(boolean|string)',\n keyboard: 'boolean',\n scroll: 'boolean'\n};\n\n/**\n * Class definition\n */\n\nclass Offcanvas extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n this._isShown = false;\n this._backdrop = this._initializeBackDrop();\n this._focustrap = this._initializeFocusTrap();\n this._addEventListeners();\n }\n\n // Getters\n static get Default() {\n return Default$5;\n }\n static get DefaultType() {\n return DefaultType$5;\n }\n static get NAME() {\n return NAME$6;\n }\n\n // Public\n toggle(relatedTarget) {\n return this._isShown ? this.hide() : this.show(relatedTarget);\n }\n show(relatedTarget) {\n if (this._isShown) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, EVENT_SHOW$3, {\n relatedTarget\n });\n if (showEvent.defaultPrevented) {\n return;\n }\n this._isShown = true;\n this._backdrop.show();\n if (!this._config.scroll) {\n new ScrollBarHelper().hide();\n }\n this._element.setAttribute('aria-modal', true);\n this._element.setAttribute('role', 'dialog');\n this._element.classList.add(CLASS_NAME_SHOWING$1);\n const completeCallBack = () => {\n if (!this._config.scroll || this._config.backdrop) {\n this._focustrap.activate();\n }\n this._element.classList.add(CLASS_NAME_SHOW$3);\n this._element.classList.remove(CLASS_NAME_SHOWING$1);\n EventHandler.trigger(this._element, EVENT_SHOWN$3, {\n relatedTarget\n });\n };\n this._queueCallback(completeCallBack, this._element, true);\n }\n hide() {\n if (!this._isShown) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, EVENT_HIDE$3);\n if (hideEvent.defaultPrevented) {\n return;\n }\n this._focustrap.deactivate();\n this._element.blur();\n this._isShown = false;\n this._element.classList.add(CLASS_NAME_HIDING);\n this._backdrop.hide();\n const completeCallback = () => {\n this._element.classList.remove(CLASS_NAME_SHOW$3, CLASS_NAME_HIDING);\n this._element.removeAttribute('aria-modal');\n this._element.removeAttribute('role');\n if (!this._config.scroll) {\n new ScrollBarHelper().reset();\n }\n EventHandler.trigger(this._element, EVENT_HIDDEN$3);\n };\n this._queueCallback(completeCallback, this._element, true);\n }\n dispose() {\n this._backdrop.dispose();\n this._focustrap.deactivate();\n super.dispose();\n }\n\n // Private\n _initializeBackDrop() {\n const clickCallback = () => {\n if (this._config.backdrop === 'static') {\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n return;\n }\n this.hide();\n };\n\n // 'static' option will be translated to true, and booleans will keep their value\n const isVisible = Boolean(this._config.backdrop);\n return new Backdrop({\n className: CLASS_NAME_BACKDROP,\n isVisible,\n isAnimated: true,\n rootElement: this._element.parentNode,\n clickCallback: isVisible ? clickCallback : null\n });\n }\n _initializeFocusTrap() {\n return new FocusTrap({\n trapElement: this._element\n });\n }\n _addEventListeners() {\n EventHandler.on(this._element, EVENT_KEYDOWN_DISMISS, event => {\n if (event.key !== ESCAPE_KEY) {\n return;\n }\n if (this._config.keyboard) {\n this.hide();\n return;\n }\n EventHandler.trigger(this._element, EVENT_HIDE_PREVENTED);\n });\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Offcanvas.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (data[config] === undefined || config.startsWith('_') || config === 'constructor') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config](this);\n });\n }\n}\n\n/**\n * Data API implementation\n */\n\nEventHandler.on(document, EVENT_CLICK_DATA_API$1, SELECTOR_DATA_TOGGLE$1, function (event) {\n const target = SelectorEngine.getElementFromSelector(this);\n if (['A', 'AREA'].includes(this.tagName)) {\n event.preventDefault();\n }\n if (isDisabled(this)) {\n return;\n }\n EventHandler.one(target, EVENT_HIDDEN$3, () => {\n // focus on trigger when it is closed\n if (isVisible(this)) {\n this.focus();\n }\n });\n\n // avoid conflict when clicking a toggler of an offcanvas, while another is open\n const alreadyOpen = SelectorEngine.findOne(OPEN_SELECTOR);\n if (alreadyOpen && alreadyOpen !== target) {\n Offcanvas.getInstance(alreadyOpen).hide();\n }\n const data = Offcanvas.getOrCreateInstance(target);\n data.toggle(this);\n});\nEventHandler.on(window, EVENT_LOAD_DATA_API$2, () => {\n for (const selector of SelectorEngine.find(OPEN_SELECTOR)) {\n Offcanvas.getOrCreateInstance(selector).show();\n }\n});\nEventHandler.on(window, EVENT_RESIZE, () => {\n for (const element of SelectorEngine.find('[aria-modal][class*=show][class*=offcanvas-]')) {\n if (getComputedStyle(element).position !== 'fixed') {\n Offcanvas.getOrCreateInstance(element).hide();\n }\n }\n});\nenableDismissTrigger(Offcanvas);\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Offcanvas);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/sanitizer.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n// js-docs-start allow-list\nconst ARIA_ATTRIBUTE_PATTERN = /^aria-[\\w-]*$/i;\nconst DefaultAllowlist = {\n // Global attributes allowed on any supplied element below.\n '*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],\n a: ['target', 'href', 'title', 'rel'],\n area: [],\n b: [],\n br: [],\n col: [],\n code: [],\n div: [],\n em: [],\n hr: [],\n h1: [],\n h2: [],\n h3: [],\n h4: [],\n h5: [],\n h6: [],\n i: [],\n img: ['src', 'srcset', 'alt', 'title', 'width', 'height'],\n li: [],\n ol: [],\n p: [],\n pre: [],\n s: [],\n small: [],\n span: [],\n sub: [],\n sup: [],\n strong: [],\n u: [],\n ul: []\n};\n// js-docs-end allow-list\n\nconst uriAttributes = new Set(['background', 'cite', 'href', 'itemtype', 'longdesc', 'poster', 'src', 'xlink:href']);\n\n/**\n * A pattern that recognizes URLs that are safe wrt. XSS in URL navigation\n * contexts.\n *\n * Shout-out to Angular https://github.com/angular/angular/blob/15.2.8/packages/core/src/sanitization/url_sanitizer.ts#L38\n */\n// eslint-disable-next-line unicorn/better-regex\nconst SAFE_URL_PATTERN = /^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i;\nconst allowedAttribute = (attribute, allowedAttributeList) => {\n const attributeName = attribute.nodeName.toLowerCase();\n if (allowedAttributeList.includes(attributeName)) {\n if (uriAttributes.has(attributeName)) {\n return Boolean(SAFE_URL_PATTERN.test(attribute.nodeValue));\n }\n return true;\n }\n\n // Check if a regular expression validates the attribute.\n return allowedAttributeList.filter(attributeRegex => attributeRegex instanceof RegExp).some(regex => regex.test(attributeName));\n};\nfunction sanitizeHtml(unsafeHtml, allowList, sanitizeFunction) {\n if (!unsafeHtml.length) {\n return unsafeHtml;\n }\n if (sanitizeFunction && typeof sanitizeFunction === 'function') {\n return sanitizeFunction(unsafeHtml);\n }\n const domParser = new window.DOMParser();\n const createdDocument = domParser.parseFromString(unsafeHtml, 'text/html');\n const elements = [].concat(...createdDocument.body.querySelectorAll('*'));\n for (const element of elements) {\n const elementName = element.nodeName.toLowerCase();\n if (!Object.keys(allowList).includes(elementName)) {\n element.remove();\n continue;\n }\n const attributeList = [].concat(...element.attributes);\n const allowedAttributes = [].concat(allowList['*'] || [], allowList[elementName] || []);\n for (const attribute of attributeList) {\n if (!allowedAttribute(attribute, allowedAttributes)) {\n element.removeAttribute(attribute.nodeName);\n }\n }\n }\n return createdDocument.body.innerHTML;\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap util/template-factory.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$5 = 'TemplateFactory';\nconst Default$4 = {\n allowList: DefaultAllowlist,\n content: {},\n // { selector : text , selector2 : text2 , }\n extraClass: '',\n html: false,\n sanitize: true,\n sanitizeFn: null,\n template: '
'\n};\nconst DefaultType$4 = {\n allowList: 'object',\n content: 'object',\n extraClass: '(string|function)',\n html: 'boolean',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n template: 'string'\n};\nconst DefaultContentType = {\n entry: '(string|element|function|null)',\n selector: '(string|element)'\n};\n\n/**\n * Class definition\n */\n\nclass TemplateFactory extends Config {\n constructor(config) {\n super();\n this._config = this._getConfig(config);\n }\n\n // Getters\n static get Default() {\n return Default$4;\n }\n static get DefaultType() {\n return DefaultType$4;\n }\n static get NAME() {\n return NAME$5;\n }\n\n // Public\n getContent() {\n return Object.values(this._config.content).map(config => this._resolvePossibleFunction(config)).filter(Boolean);\n }\n hasContent() {\n return this.getContent().length > 0;\n }\n changeContent(content) {\n this._checkContent(content);\n this._config.content = {\n ...this._config.content,\n ...content\n };\n return this;\n }\n toHtml() {\n const templateWrapper = document.createElement('div');\n templateWrapper.innerHTML = this._maybeSanitize(this._config.template);\n for (const [selector, text] of Object.entries(this._config.content)) {\n this._setContent(templateWrapper, text, selector);\n }\n const template = templateWrapper.children[0];\n const extraClass = this._resolvePossibleFunction(this._config.extraClass);\n if (extraClass) {\n template.classList.add(...extraClass.split(' '));\n }\n return template;\n }\n\n // Private\n _typeCheckConfig(config) {\n super._typeCheckConfig(config);\n this._checkContent(config.content);\n }\n _checkContent(arg) {\n for (const [selector, content] of Object.entries(arg)) {\n super._typeCheckConfig({\n selector,\n entry: content\n }, DefaultContentType);\n }\n }\n _setContent(template, content, selector) {\n const templateElement = SelectorEngine.findOne(selector, template);\n if (!templateElement) {\n return;\n }\n content = this._resolvePossibleFunction(content);\n if (!content) {\n templateElement.remove();\n return;\n }\n if (isElement(content)) {\n this._putElementInTemplate(getElement(content), templateElement);\n return;\n }\n if (this._config.html) {\n templateElement.innerHTML = this._maybeSanitize(content);\n return;\n }\n templateElement.textContent = content;\n }\n _maybeSanitize(arg) {\n return this._config.sanitize ? sanitizeHtml(arg, this._config.allowList, this._config.sanitizeFn) : arg;\n }\n _resolvePossibleFunction(arg) {\n return execute(arg, [this]);\n }\n _putElementInTemplate(element, templateElement) {\n if (this._config.html) {\n templateElement.innerHTML = '';\n templateElement.append(element);\n return;\n }\n templateElement.textContent = element.textContent;\n }\n}\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap tooltip.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$4 = 'tooltip';\nconst DISALLOWED_ATTRIBUTES = new Set(['sanitize', 'allowList', 'sanitizeFn']);\nconst CLASS_NAME_FADE$2 = 'fade';\nconst CLASS_NAME_MODAL = 'modal';\nconst CLASS_NAME_SHOW$2 = 'show';\nconst SELECTOR_TOOLTIP_INNER = '.tooltip-inner';\nconst SELECTOR_MODAL = `.${CLASS_NAME_MODAL}`;\nconst EVENT_MODAL_HIDE = 'hide.bs.modal';\nconst TRIGGER_HOVER = 'hover';\nconst TRIGGER_FOCUS = 'focus';\nconst TRIGGER_CLICK = 'click';\nconst TRIGGER_MANUAL = 'manual';\nconst EVENT_HIDE$2 = 'hide';\nconst EVENT_HIDDEN$2 = 'hidden';\nconst EVENT_SHOW$2 = 'show';\nconst EVENT_SHOWN$2 = 'shown';\nconst EVENT_INSERTED = 'inserted';\nconst EVENT_CLICK$1 = 'click';\nconst EVENT_FOCUSIN$1 = 'focusin';\nconst EVENT_FOCUSOUT$1 = 'focusout';\nconst EVENT_MOUSEENTER = 'mouseenter';\nconst EVENT_MOUSELEAVE = 'mouseleave';\nconst AttachmentMap = {\n AUTO: 'auto',\n TOP: 'top',\n RIGHT: isRTL() ? 'left' : 'right',\n BOTTOM: 'bottom',\n LEFT: isRTL() ? 'right' : 'left'\n};\nconst Default$3 = {\n allowList: DefaultAllowlist,\n animation: true,\n boundary: 'clippingParents',\n container: false,\n customClass: '',\n delay: 0,\n fallbackPlacements: ['top', 'right', 'bottom', 'left'],\n html: false,\n offset: [0, 6],\n placement: 'top',\n popperConfig: null,\n sanitize: true,\n sanitizeFn: null,\n selector: false,\n template: '
' + '
' + '
' + '
',\n title: '',\n trigger: 'hover focus'\n};\nconst DefaultType$3 = {\n allowList: 'object',\n animation: 'boolean',\n boundary: '(string|element)',\n container: '(string|element|boolean)',\n customClass: '(string|function)',\n delay: '(number|object)',\n fallbackPlacements: 'array',\n html: 'boolean',\n offset: '(array|string|function)',\n placement: '(string|function)',\n popperConfig: '(null|object|function)',\n sanitize: 'boolean',\n sanitizeFn: '(null|function)',\n selector: '(string|boolean)',\n template: 'string',\n title: '(string|element|function)',\n trigger: 'string'\n};\n\n/**\n * Class definition\n */\n\nclass Tooltip extends BaseComponent {\n constructor(element, config) {\n if (typeof Popper === 'undefined') {\n throw new TypeError('Bootstrap\\'s tooltips require Popper (https://popper.js.org)');\n }\n super(element, config);\n\n // Private\n this._isEnabled = true;\n this._timeout = 0;\n this._isHovered = null;\n this._activeTrigger = {};\n this._popper = null;\n this._templateFactory = null;\n this._newContent = null;\n\n // Protected\n this.tip = null;\n this._setListeners();\n if (!this._config.selector) {\n this._fixTitle();\n }\n }\n\n // Getters\n static get Default() {\n return Default$3;\n }\n static get DefaultType() {\n return DefaultType$3;\n }\n static get NAME() {\n return NAME$4;\n }\n\n // Public\n enable() {\n this._isEnabled = true;\n }\n disable() {\n this._isEnabled = false;\n }\n toggleEnabled() {\n this._isEnabled = !this._isEnabled;\n }\n toggle() {\n if (!this._isEnabled) {\n return;\n }\n this._activeTrigger.click = !this._activeTrigger.click;\n if (this._isShown()) {\n this._leave();\n return;\n }\n this._enter();\n }\n dispose() {\n clearTimeout(this._timeout);\n EventHandler.off(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n if (this._element.getAttribute('data-bs-original-title')) {\n this._element.setAttribute('title', this._element.getAttribute('data-bs-original-title'));\n }\n this._disposePopper();\n super.dispose();\n }\n show() {\n if (this._element.style.display === 'none') {\n throw new Error('Please use show on visible elements');\n }\n if (!(this._isWithContent() && this._isEnabled)) {\n return;\n }\n const showEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOW$2));\n const shadowRoot = findShadowRoot(this._element);\n const isInTheDom = (shadowRoot || this._element.ownerDocument.documentElement).contains(this._element);\n if (showEvent.defaultPrevented || !isInTheDom) {\n return;\n }\n\n // TODO: v6 remove this or make it optional\n this._disposePopper();\n const tip = this._getTipElement();\n this._element.setAttribute('aria-describedby', tip.getAttribute('id'));\n const {\n container\n } = this._config;\n if (!this._element.ownerDocument.documentElement.contains(this.tip)) {\n container.append(tip);\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_INSERTED));\n }\n this._popper = this._createPopper(tip);\n tip.classList.add(CLASS_NAME_SHOW$2);\n\n // If this is a touch-enabled device we add extra\n // empty mouseover listeners to the body's immediate children;\n // only needed because of broken event delegation on iOS\n // https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.on(element, 'mouseover', noop);\n }\n }\n const complete = () => {\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_SHOWN$2));\n if (this._isHovered === false) {\n this._leave();\n }\n this._isHovered = false;\n };\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n hide() {\n if (!this._isShown()) {\n return;\n }\n const hideEvent = EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDE$2));\n if (hideEvent.defaultPrevented) {\n return;\n }\n const tip = this._getTipElement();\n tip.classList.remove(CLASS_NAME_SHOW$2);\n\n // If this is a touch-enabled device we remove the extra\n // empty mouseover listeners we added for iOS support\n if ('ontouchstart' in document.documentElement) {\n for (const element of [].concat(...document.body.children)) {\n EventHandler.off(element, 'mouseover', noop);\n }\n }\n this._activeTrigger[TRIGGER_CLICK] = false;\n this._activeTrigger[TRIGGER_FOCUS] = false;\n this._activeTrigger[TRIGGER_HOVER] = false;\n this._isHovered = null; // it is a trick to support manual triggering\n\n const complete = () => {\n if (this._isWithActiveTrigger()) {\n return;\n }\n if (!this._isHovered) {\n this._disposePopper();\n }\n this._element.removeAttribute('aria-describedby');\n EventHandler.trigger(this._element, this.constructor.eventName(EVENT_HIDDEN$2));\n };\n this._queueCallback(complete, this.tip, this._isAnimated());\n }\n update() {\n if (this._popper) {\n this._popper.update();\n }\n }\n\n // Protected\n _isWithContent() {\n return Boolean(this._getTitle());\n }\n _getTipElement() {\n if (!this.tip) {\n this.tip = this._createTipElement(this._newContent || this._getContentForTemplate());\n }\n return this.tip;\n }\n _createTipElement(content) {\n const tip = this._getTemplateFactory(content).toHtml();\n\n // TODO: remove this check in v6\n if (!tip) {\n return null;\n }\n tip.classList.remove(CLASS_NAME_FADE$2, CLASS_NAME_SHOW$2);\n // TODO: v6 the following can be achieved with CSS only\n tip.classList.add(`bs-${this.constructor.NAME}-auto`);\n const tipId = getUID(this.constructor.NAME).toString();\n tip.setAttribute('id', tipId);\n if (this._isAnimated()) {\n tip.classList.add(CLASS_NAME_FADE$2);\n }\n return tip;\n }\n setContent(content) {\n this._newContent = content;\n if (this._isShown()) {\n this._disposePopper();\n this.show();\n }\n }\n _getTemplateFactory(content) {\n if (this._templateFactory) {\n this._templateFactory.changeContent(content);\n } else {\n this._templateFactory = new TemplateFactory({\n ...this._config,\n // the `content` var has to be after `this._config`\n // to override config.content in case of popover\n content,\n extraClass: this._resolvePossibleFunction(this._config.customClass)\n });\n }\n return this._templateFactory;\n }\n _getContentForTemplate() {\n return {\n [SELECTOR_TOOLTIP_INNER]: this._getTitle()\n };\n }\n _getTitle() {\n return this._resolvePossibleFunction(this._config.title) || this._element.getAttribute('data-bs-original-title');\n }\n\n // Private\n _initializeOnDelegatedTarget(event) {\n return this.constructor.getOrCreateInstance(event.delegateTarget, this._getDelegateConfig());\n }\n _isAnimated() {\n return this._config.animation || this.tip && this.tip.classList.contains(CLASS_NAME_FADE$2);\n }\n _isShown() {\n return this.tip && this.tip.classList.contains(CLASS_NAME_SHOW$2);\n }\n _createPopper(tip) {\n const placement = execute(this._config.placement, [this, tip, this._element]);\n const attachment = AttachmentMap[placement.toUpperCase()];\n return Popper.createPopper(this._element, tip, this._getPopperConfig(attachment));\n }\n _getOffset() {\n const {\n offset\n } = this._config;\n if (typeof offset === 'string') {\n return offset.split(',').map(value => Number.parseInt(value, 10));\n }\n if (typeof offset === 'function') {\n return popperData => offset(popperData, this._element);\n }\n return offset;\n }\n _resolvePossibleFunction(arg) {\n return execute(arg, [this._element]);\n }\n _getPopperConfig(attachment) {\n const defaultBsPopperConfig = {\n placement: attachment,\n modifiers: [{\n name: 'flip',\n options: {\n fallbackPlacements: this._config.fallbackPlacements\n }\n }, {\n name: 'offset',\n options: {\n offset: this._getOffset()\n }\n }, {\n name: 'preventOverflow',\n options: {\n boundary: this._config.boundary\n }\n }, {\n name: 'arrow',\n options: {\n element: `.${this.constructor.NAME}-arrow`\n }\n }, {\n name: 'preSetPlacement',\n enabled: true,\n phase: 'beforeMain',\n fn: data => {\n // Pre-set Popper's placement attribute in order to read the arrow sizes properly.\n // Otherwise, Popper mixes up the width and height dimensions since the initial arrow style is for top placement\n this._getTipElement().setAttribute('data-popper-placement', data.state.placement);\n }\n }]\n };\n return {\n ...defaultBsPopperConfig,\n ...execute(this._config.popperConfig, [defaultBsPopperConfig])\n };\n }\n _setListeners() {\n const triggers = this._config.trigger.split(' ');\n for (const trigger of triggers) {\n if (trigger === 'click') {\n EventHandler.on(this._element, this.constructor.eventName(EVENT_CLICK$1), this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context.toggle();\n });\n } else if (trigger !== TRIGGER_MANUAL) {\n const eventIn = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSEENTER) : this.constructor.eventName(EVENT_FOCUSIN$1);\n const eventOut = trigger === TRIGGER_HOVER ? this.constructor.eventName(EVENT_MOUSELEAVE) : this.constructor.eventName(EVENT_FOCUSOUT$1);\n EventHandler.on(this._element, eventIn, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context._activeTrigger[event.type === 'focusin' ? TRIGGER_FOCUS : TRIGGER_HOVER] = true;\n context._enter();\n });\n EventHandler.on(this._element, eventOut, this._config.selector, event => {\n const context = this._initializeOnDelegatedTarget(event);\n context._activeTrigger[event.type === 'focusout' ? TRIGGER_FOCUS : TRIGGER_HOVER] = context._element.contains(event.relatedTarget);\n context._leave();\n });\n }\n }\n this._hideModalHandler = () => {\n if (this._element) {\n this.hide();\n }\n };\n EventHandler.on(this._element.closest(SELECTOR_MODAL), EVENT_MODAL_HIDE, this._hideModalHandler);\n }\n _fixTitle() {\n const title = this._element.getAttribute('title');\n if (!title) {\n return;\n }\n if (!this._element.getAttribute('aria-label') && !this._element.textContent.trim()) {\n this._element.setAttribute('aria-label', title);\n }\n this._element.setAttribute('data-bs-original-title', title); // DO NOT USE IT. Is only for backwards compatibility\n this._element.removeAttribute('title');\n }\n _enter() {\n if (this._isShown() || this._isHovered) {\n this._isHovered = true;\n return;\n }\n this._isHovered = true;\n this._setTimeout(() => {\n if (this._isHovered) {\n this.show();\n }\n }, this._config.delay.show);\n }\n _leave() {\n if (this._isWithActiveTrigger()) {\n return;\n }\n this._isHovered = false;\n this._setTimeout(() => {\n if (!this._isHovered) {\n this.hide();\n }\n }, this._config.delay.hide);\n }\n _setTimeout(handler, timeout) {\n clearTimeout(this._timeout);\n this._timeout = setTimeout(handler, timeout);\n }\n _isWithActiveTrigger() {\n return Object.values(this._activeTrigger).includes(true);\n }\n _getConfig(config) {\n const dataAttributes = Manipulator.getDataAttributes(this._element);\n for (const dataAttribute of Object.keys(dataAttributes)) {\n if (DISALLOWED_ATTRIBUTES.has(dataAttribute)) {\n delete dataAttributes[dataAttribute];\n }\n }\n config = {\n ...dataAttributes,\n ...(typeof config === 'object' && config ? config : {})\n };\n config = this._mergeConfigObj(config);\n config = this._configAfterMerge(config);\n this._typeCheckConfig(config);\n return config;\n }\n _configAfterMerge(config) {\n config.container = config.container === false ? document.body : getElement(config.container);\n if (typeof config.delay === 'number') {\n config.delay = {\n show: config.delay,\n hide: config.delay\n };\n }\n if (typeof config.title === 'number') {\n config.title = config.title.toString();\n }\n if (typeof config.content === 'number') {\n config.content = config.content.toString();\n }\n return config;\n }\n _getDelegateConfig() {\n const config = {};\n for (const [key, value] of Object.entries(this._config)) {\n if (this.constructor.Default[key] !== value) {\n config[key] = value;\n }\n }\n config.selector = false;\n config.trigger = 'manual';\n\n // In the future can be replaced with:\n // const keysWithDifferentValues = Object.entries(this._config).filter(entry => this.constructor.Default[entry[0]] !== this._config[entry[0]])\n // `Object.fromEntries(keysWithDifferentValues)`\n return config;\n }\n _disposePopper() {\n if (this._popper) {\n this._popper.destroy();\n this._popper = null;\n }\n if (this.tip) {\n this.tip.remove();\n this.tip = null;\n }\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Tooltip.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n}\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Tooltip);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap popover.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$3 = 'popover';\nconst SELECTOR_TITLE = '.popover-header';\nconst SELECTOR_CONTENT = '.popover-body';\nconst Default$2 = {\n ...Tooltip.Default,\n content: '',\n offset: [0, 8],\n placement: 'right',\n template: '
' + '
' + '

' + '
' + '
',\n trigger: 'click'\n};\nconst DefaultType$2 = {\n ...Tooltip.DefaultType,\n content: '(null|string|element|function)'\n};\n\n/**\n * Class definition\n */\n\nclass Popover extends Tooltip {\n // Getters\n static get Default() {\n return Default$2;\n }\n static get DefaultType() {\n return DefaultType$2;\n }\n static get NAME() {\n return NAME$3;\n }\n\n // Overrides\n _isWithContent() {\n return this._getTitle() || this._getContent();\n }\n\n // Private\n _getContentForTemplate() {\n return {\n [SELECTOR_TITLE]: this._getTitle(),\n [SELECTOR_CONTENT]: this._getContent()\n };\n }\n _getContent() {\n return this._resolvePossibleFunction(this._config.content);\n }\n\n // Static\n static jQueryInterface(config) {\n return this.each(function () {\n const data = Popover.getOrCreateInstance(this, config);\n if (typeof config !== 'string') {\n return;\n }\n if (typeof data[config] === 'undefined') {\n throw new TypeError(`No method named \"${config}\"`);\n }\n data[config]();\n });\n }\n}\n\n/**\n * jQuery\n */\n\ndefineJQueryPlugin(Popover);\n\n/**\n * --------------------------------------------------------------------------\n * Bootstrap scrollspy.js\n * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)\n * --------------------------------------------------------------------------\n */\n\n\n/**\n * Constants\n */\n\nconst NAME$2 = 'scrollspy';\nconst DATA_KEY$2 = 'bs.scrollspy';\nconst EVENT_KEY$2 = `.${DATA_KEY$2}`;\nconst DATA_API_KEY = '.data-api';\nconst EVENT_ACTIVATE = `activate${EVENT_KEY$2}`;\nconst EVENT_CLICK = `click${EVENT_KEY$2}`;\nconst EVENT_LOAD_DATA_API$1 = `load${EVENT_KEY$2}${DATA_API_KEY}`;\nconst CLASS_NAME_DROPDOWN_ITEM = 'dropdown-item';\nconst CLASS_NAME_ACTIVE$1 = 'active';\nconst SELECTOR_DATA_SPY = '[data-bs-spy=\"scroll\"]';\nconst SELECTOR_TARGET_LINKS = '[href]';\nconst SELECTOR_NAV_LIST_GROUP = '.nav, .list-group';\nconst SELECTOR_NAV_LINKS = '.nav-link';\nconst SELECTOR_NAV_ITEMS = '.nav-item';\nconst SELECTOR_LIST_ITEMS = '.list-group-item';\nconst SELECTOR_LINK_ITEMS = `${SELECTOR_NAV_LINKS}, ${SELECTOR_NAV_ITEMS} > ${SELECTOR_NAV_LINKS}, ${SELECTOR_LIST_ITEMS}`;\nconst SELECTOR_DROPDOWN = '.dropdown';\nconst SELECTOR_DROPDOWN_TOGGLE$1 = '.dropdown-toggle';\nconst Default$1 = {\n offset: null,\n // TODO: v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: '0px 0px -25%',\n smoothScroll: false,\n target: null,\n threshold: [0.1, 0.5, 1]\n};\nconst DefaultType$1 = {\n offset: '(number|null)',\n // TODO v6 @deprecated, keep it for backwards compatibility reasons\n rootMargin: 'string',\n smoothScroll: 'boolean',\n target: 'element',\n threshold: 'array'\n};\n\n/**\n * Class definition\n */\n\nclass ScrollSpy extends BaseComponent {\n constructor(element, config) {\n super(element, config);\n\n // this._element is the observablesContainer and config.target the menu links wrapper\n this._targetLinks = new Map();\n this._observableSections = new Map();\n this._rootElement = getComputedStyle(this._element).overflowY === 'visible' ? null : this._element;\n this._activeTarget = null;\n this._observer = null;\n this._previousScrollData = {\n visibleEntryTop: 0,\n parentScrollTop: 0\n };\n this.refresh(); // initialize\n }\n\n // Getters\n static get Default() {\n return Default$1;\n }\n static get DefaultType() {\n return DefaultType$1;\n }\n static get NAME() {\n return NAME$2;\n }\n\n // Public\n refresh() {\n this._initializeTargetsAndObservables();\n this._maybeEnableSmoothScroll();\n if (this._observer) {\n this._observer.disconnect();\n } else {\n this._observer = this._getNewObserver();\n }\n for (const section of this._observableSections.values()) {\n this._observer.observe(section);\n }\n }\n dispose() {\n this._observer.disconnect();\n super.dispose();\n }\n\n // Private\n _configAfterMerge(config) {\n // TODO: on v6 target should be given explicitly & remove the {target: 'ss-target'} case\n config.target = getElement(config.target) || document.body;\n\n // TODO: v6 Only for backwards compatibility reasons. Use rootMargin only\n config.rootMargin = config.offset ? `${config.offset}px 0px -30%` : config.rootMargin;\n if (typeof config.threshold === 'string') {\n config.threshold = config.threshold.split(',').map(value => Number.parseFloat(value));\n }\n return config;\n }\n _maybeEnableSmoothScroll() {\n if (!this._config.smoothScroll) {\n return;\n }\n\n // unregister any previous listeners\n EventHandler.off(this._config.target, EVENT_CLICK);\n EventHandler.on(this._config.target, EVENT_CLICK, SELECTOR_TARGET_LINKS, event => {\n const observableSection = this._observableSections.get(event.target.hash);\n if (observableSection) {\n event.preventDefault();\n const root = this._rootElement || window;\n const height = observableSection.offsetTop - this._element.offsetTop;\n if (root.scrollTo) {\n root.scrollTo({\n top: height,\n behavior: 'smooth'\n });\n return;\n }\n\n // Chrome 60 doesn't support `scrollTo`\n root.scrollTop = height;\n }\n });\n }\n _getNewObserver() {\n const options = {\n root: this._rootElement,\n threshold: this._config.threshold,\n rootMargin: this._config.rootMargin\n };\n return new IntersectionObserver(entries => this._observerCallback(entries), options);\n }\n\n // The logic of selection\n _observerCallback(entries) {\n const targetElement = entry => this._targetLinks.get(`#${entry.target.id}`);\n const activate = entry => {\n this._previousScrollData.visibleEntryTop = entry.target.offsetTop;\n this._process(targetElement(entry));\n };\n const parentScrollTop = (this._rootElement || document.documentElement).scrollTop;\n const userScrollsDown = parentScrollTop >= this._previousScrollData.parentScrollTop;\n this._previousScrollData.parentScrollTop = parentScrollTop;\n for (const entry of entries) {\n if (!entry.isIntersecting) {\n this._activeTarget = null;\n this._clearActiveClass(targetElement(entry));\n continue;\n }\n const entryIsLowerThanPrevious = entry.target.offsetTop >= this._previousScrollData.visibleEntryTop;\n // if we are scrolling down, pick the bigger offsetTop\n if (userScrollsDown && entryIsLowerThanPrevious) {\n activate(entry);\n // if parent isn't scrolled, let's keep the first visible item, breaking the iteration\n if (!parentScrollTop) {\n return;\n }\n continue;\n }\n\n // if we are scrolling up, pick the smallest offsetTop\n if (!userScrollsDown && !entryIsLowerThanPrevious) {\n activate(entry);\n }\n }\n }\n _initializeTargetsAndObservables() {\n this._targetLinks = new Map();\n this._observableSections = new Map();\n const targetLinks = SelectorEngine.find(SELECTOR_TARGET_LINKS, this._config.target);\n for (const anchor of targetLinks) {\n // ensure that the anchor has an id and is not disabled\n if (!anchor.hash || isDisabled(anchor)) {\n continue;\n }\n const observableSection = SelectorEngine.findOne(decodeURI(anchor.hash), this._element);\n\n // ensure that the observableSection exists & is visible\n if (isVisible(observableSection)) {\n this._targetLinks.set(decodeURI(anchor.hash), anchor);\n this._observableSections.set(anchor.hash, observableSection);\n }\n }\n }\n _process(target) {\n if (this._activeTarget === target) {\n return;\n }\n this._clearActiveClass(this._config.target);\n this._activeTarget = target;\n target.classList.add(CLASS_NAME_ACTIVE$1);\n this._activateParents(target);\n EventHandler.trigger(this._element, EVENT_ACTIVATE, {\n relatedTarget: target\n });\n }\n _activateParents(target) {\n // Activate dropdown parents\n if (target.classList.contains(CLASS_NAME_DROPDOWN_ITEM)) {\n SelectorEngine.findOne(SELECTOR_DROPDOWN_TOGGLE$1, target.closest(SELECTOR_DROPDOWN)).classList.add(CLASS_NAME_ACTIVE$1);\n return;\n }\n for (const listGroup of SelectorEngine.parents(target, SELECTOR_NAV_LIST_GROUP)) {\n // Set triggered links parents as active\n // With both