diff --git a/changelog/2024-05-31-secondary-storage/index.md b/changelog/2024-05-31-secondary-storage/index.md index 910647641..92ce4af4e 100644 --- a/changelog/2024-05-31-secondary-storage/index.md +++ b/changelog/2024-05-31-secondary-storage/index.md @@ -7,8 +7,8 @@ image: ./secondary_storage.png description: Read and write from a storage that is not your main storage by specifying it in the S3 object as "secondary_storage" with the name of it. features: [ - 'Add additional storages from S3, Azure Blob, AWS OIDC or Azure Workload Identity.', + 'Add additional storages from S3, Azure Blob, AWS OIDC or Google Cloud Storage.', 'From script, specify the secondary storage with an object with properties `s3` (path to the file) and `storage` (name of the secondary storage).' ] docs: /docs/core_concepts/object_storage_in_windmill#secondary-storage ---- \ No newline at end of file +--- diff --git a/docs/advanced/18_instance_settings/index.mdx b/docs/advanced/18_instance_settings/index.mdx index c128f202e..f1a59f447 100644 --- a/docs/advanced/18_instance_settings/index.mdx +++ b/docs/advanced/18_instance_settings/index.mdx @@ -109,7 +109,7 @@ This setting is only available on [Enterprise Edition](/pricing). This feature has no overlap with the [Workspace object storage](../../core_concepts/38_object_storage_in_windmill/index.mdx#workspace-object-storage). -You can choose to use S3, Azure Blob Storage or AWS OIDC. For each you will find a button to test settings from a server or from a worker. +You can choose to use S3, Azure Blob Storage, AWS OIDC or Google Cloud Storage. For each you will find a button to test settings from a server or from a worker. ![S3/Azure for Python/Go cache & large logs](../../core_concepts/20_jobs/s3_azure_cache.png "S3/Azure for Python/Go cache & large logs") @@ -145,6 +145,14 @@ You can choose to use S3, Azure Blob Storage or AWS OIDC. For each you will find This setting is only available on [Enterprise Edition](/pricing). +#### Google Cloud Storage + +| Field | Description | +|-------|-------------| +| Bucket | The name of your Google Cloud Storage bucket | +| Service Account Key | The service account key for your Google Cloud Storage bucket in JSON format | + + ### Private Hub base url Base url of your [private Hub](../../core_concepts/32_private_hub/index.mdx) instance, without trailing slash. diff --git a/docs/core_concepts/11_persistent_storage/index.mdx b/docs/core_concepts/11_persistent_storage/index.mdx index 976748e50..c2bc9b48d 100644 --- a/docs/core_concepts/11_persistent_storage/index.mdx +++ b/docs/core_concepts/11_persistent_storage/index.mdx @@ -56,11 +56,11 @@ All details at: /> -## Large data: S3, R2, MinIO, Azure Blob +## Large data: S3, R2, MinIO, Azure Blob, Google Cloud Storage -On heavier data objects & unstructured data storage, [Amazon S3](https://aws.amazon.com/s3/) (Simple Storage Service) and its alternatives [Cloudflare R2](https://www.cloudflare.com/developer-platform/r2/) and [MinIO](https://min.io/) as well as [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) storage are highly scalable and durable object storage service that provides secure, reliable, and cost-effective storage for a wide range of data types and use cases. +On heavier data objects & unstructured data storage, [Amazon S3](https://aws.amazon.com/s3/) (Simple Storage Service) and its alternatives [Cloudflare R2](https://www.cloudflare.com/developer-platform/r2/) and [MinIO](https://min.io/) as well as [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) and [Google Cloud Storage](https://cloud.google.com/storage) are highly scalable and durable object storage services that provide secure, reliable, and cost-effective storage for a wide range of data types and use cases. -Windmill comes with a [native integration with S3 and Azure Blob](./large_data_files.mdx), making it the recommended storage for large objects like files and binary data. +Windmill comes with a [native integration with S3, Azure Blob, and Google Cloud Storage](./large_data_files.mdx), making them the recommended storage for large objects like files and binary data. ![Workspace object storage Infographic](./s3_infographics.png "Workspace object storage Infographic") @@ -68,8 +68,8 @@ All details at:
diff --git a/docs/core_concepts/11_persistent_storage/large_data_files.mdx b/docs/core_concepts/11_persistent_storage/large_data_files.mdx index 4ed5ddc9b..37c92aed0 100644 --- a/docs/core_concepts/11_persistent_storage/large_data_files.mdx +++ b/docs/core_concepts/11_persistent_storage/large_data_files.mdx @@ -2,17 +2,17 @@ import DocCard from '@site/src/components/DocCard'; import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# Large data: S3, R2, MinIO, Azure Blob +# Large data: S3, R2, MinIO, Azure Blob, Google Cloud Storage This page is part of our section on [Persistent storage & databases](./index.mdx) which covers where to effectively store and manage the data manipulated by Windmill. Check that page for more options on data storage. -On heavier data objects & unstructured data storage, [Amazon S3](https://aws.amazon.com/s3/) (Simple Storage Service) and its alternatives [Cloudflare R2](https://www.cloudflare.com/developer-platform/r2/) and [MinIO](https://min.io/) as well as [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) are highly scalable and durable object storage service that provides secure, reliable, and cost-effective storage for a wide range of data types and use cases. +On heavier data objects & unstructured data storage, [Amazon S3](https://aws.amazon.com/s3/) (Simple Storage Service) and its alternatives [Cloudflare R2](https://www.cloudflare.com/developer-platform/r2/) and [MinIO](https://min.io/) as well as [Azure Blob Storage](https://azure.microsoft.com/en-us/products/storage/blobs) and [Google Cloud Storage](https://cloud.google.com/storage) are highly scalable and durable object storage services that provide secure, reliable, and cost-effective storage for a wide range of data types and use cases. -Windmill comes with a [native integration with S3 and Azure Blob](../38_object_storage_in_windmill/index.mdx), making it the recommended storage for large objects like files and binary data. +Windmill comes with a [native integration with S3, Azure Blob, and Google Cloud Storage](../38_object_storage_in_windmill/index.mdx), making them the recommended storage for large objects like files and binary data. ## Workspace object storage -Connect your Windmill workspace to your S3 bucket or your Azure Blob storage to enable users to read and write from S3 without having to have access to the credentials. +Connect your Windmill workspace to your S3 bucket, Azure Blob storage, or Google Cloud Storage to enable users to read and write from S3 without having to have access to the credentials. Windmill S3 bucket browser will not work for buckets containing more than 20 files and uploads are limited to files < 50MB. Consider upgrading to Windmill [Enterprise Edition](/pricing) to use this feature with large buckets. @@ -21,7 +21,7 @@ Windmill S3 bucket browser will not work for buckets containing more than 20 fil
@@ -173,14 +173,14 @@ For more info on how Data pipelines in Windmill, see [Data pipelines](../27_data /> -## Use Amazon S3, R2, MinIO and Azure Blob directly +## Use Amazon S3, R2, MinIO, Azure Blob, and Google Cloud Storage directly -Amazon S3, Cloudflare R2 and MinIO all follow the same API schema and therefore have a [common Windmill resource type](https://hub.windmill.dev/resource_types/42/). Azure Blob has a slightly different API than S3 but works with Windmill as well using its dedicated [resource type](https://hub.windmill.dev/resource_types/137/) +Amazon S3, Cloudflare R2 and MinIO all follow the same API schema and therefore have a [common Windmill resource type](https://hub.windmill.dev/resource_types/42/). Azure Blob and Google Cloud Storage have slightly different APIs than S3 but work with Windmill as well using their dedicated resource types ([Azure Blob](https://hub.windmill.dev/resource_types/137/), [Google Cloud Storage](https://hub.windmill.dev/resource_types/268))
\ No newline at end of file diff --git a/docs/core_concepts/18_files_binary_data/index.mdx b/docs/core_concepts/18_files_binary_data/index.mdx index cd2b50b06..71f0bcd61 100644 --- a/docs/core_concepts/18_files_binary_data/index.mdx +++ b/docs/core_concepts/18_files_binary_data/index.mdx @@ -7,18 +7,18 @@ import TabItem from '@theme/TabItem'; In Windmill, JSON is the primary data format used for representing information. Binary data, such as files, are not easy to handle. Windmill provides two options. -1. Have a dedicated storage for binary data: S3 or Azure Blob. Windmill has a first class integration with S3 buckets or Azure Blob containers. +1. Have a dedicated storage for binary data: S3, Azure Blob, or Google Cloud Storage. Windmill has a first class integration with S3 buckets, Azure Blob containers, or Google Cloud Storage buckets. 2. If the above is not an option, there's always the possibility to store the binary as base64 encoded string. ## Workspace object storage -The recommended way to store binary data is to upload it to S3 or Azure Blob Storage leveraging [Windmill's workspace object storage](../38_object_storage_in_windmill/index.mdx). +The recommended way to store binary data is to upload it to S3, Azure Blob Storage, or Google Cloud Storage leveraging [Windmill's workspace object storage](../38_object_storage_in_windmill/index.mdx). Instance and workspace object storage are different from using [S3 resources](../../integrations/s3.mdx) within scripts, flows, and apps, which is free and unlimited. What is exclusive to the [Enterprise](/pricing) version is using the integration of Windmill with S3 that is a major convenience layer to enable users to read and write from S3 without having to have access to the credentials. :::info -Windmill's integration with S3 and Azure Blob Storage works exactly the same and the features described below works in both cases. The only difference is that you need to select an `azure_blob` resource when setting up the S3 storage in the Workspace settings. +Windmill's integration with S3, Azure Blob Storage, and Google Cloud Storage works exactly the same and the features described below work in all cases. The only difference is that you need to select an `azure_blob` resource for Azure Blob or a `gcloud_storage` resource for Google Cloud Storage when setting up the storage in the Workspace settings. ::: @@ -49,7 +49,7 @@ All details on Workspace object storage, and how to [read](../38_object_storage_
diff --git a/docs/core_concepts/19_rich_display_rendering/index.mdx b/docs/core_concepts/19_rich_display_rendering/index.mdx index 5ade25c2f..4b1ecceab 100644 --- a/docs/core_concepts/19_rich_display_rendering/index.mdx +++ b/docs/core_concepts/19_rich_display_rendering/index.mdx @@ -212,7 +212,7 @@ Learn more at:
diff --git a/docs/core_concepts/20_jobs/index.mdx b/docs/core_concepts/20_jobs/index.mdx index 3453db69c..8c2c02512 100644 --- a/docs/core_concepts/20_jobs/index.mdx +++ b/docs/core_concepts/20_jobs/index.mdx @@ -144,7 +144,7 @@ For large logs storage (and display) and cache for distributed Python jobs, you This feature has no overlap with the [Workspace object storage](../38_object_storage_in_windmill/index.mdx#workspace-object-storage). -You can choose to use S3, Azure Blob Storage or AWS OIDC. For each you will find a button to test settings from a server or from a worker. +You can choose to use S3, Azure Blob Storage, AWS OIDC or Google Cloud Storage. For each you will find a button to test settings from a server or from a worker.
@@ -167,6 +167,44 @@ def main(input_file: S3Object): return S3Object(s3=output_file) ``` + + + +```python +import wmill +from wmill import S3Object +import polars as pl + + +def main(input_file: S3Object): + # this will default to the workspace Google Cloud Storage resource + endpoint_url = wmill.polars_connection_settings().s3fs_args["endpoint_url"] + storage_options = wmill.polars_connection_settings().storage_options + + # this will use the designated resource + # storage_options = wmill.polars_connection_settings("").storage_options + + # input is a parquet file, we use read_parquet in lazy mode. + # Polars can read various file types, see + # https://pola-rs.github.io/polars/py-polars/html/reference/io.html + input_uri = "{}/{}".format(endpoint_url, input_file["s3"]) + + input_df = pl.read_parquet(input_uri, storage_options=storage_options).lazy() + + # process the Polars dataframe. See Polars docs: + # for dataframe: https://pola-rs.github.io/polars/py-polars/html/reference/dataframe/index.html + # for lazy dataframe: https://pola-rs.github.io/polars/py-polars/html/reference/lazyframe/index.html + output_df = input_df.collect() + print(output_df) + + # To write back the result to Google Cloud Storage, Polars needs an s3fs connection + output_file = "output/result.parquet" + output_uri = "{}/{}".format(endpoint_url, output_file) + output_df.write_parquet(output_uri, storage_options=storage_options) + + return S3Object(s3=output_file) +``` + diff --git a/docs/core_concepts/38_object_storage_in_windmill/index.mdx b/docs/core_concepts/38_object_storage_in_windmill/index.mdx index 3bc397dff..603f2f98a 100644 --- a/docs/core_concepts/38_object_storage_in_windmill/index.mdx +++ b/docs/core_concepts/38_object_storage_in_windmill/index.mdx @@ -14,13 +14,13 @@ Additionally, for [instance integration](#instance-object-storage), the Enterpri ## Workspace object storage -Connect your Windmill workspace to your S3 bucket or your Azure Blob storage to enable users to read and write from S3 without having to have access to the credentials. +Connect your Windmill workspace to your S3 bucket, Azure Blob storage, or GCS bucket to enable users to read and write from S3 without having to have access to the credentials. ![Workspace object storage infographic](../11_persistent_storage/s3_infographics.png 'Workspace object storage infographic') Windmill S3 bucket browser will not work for buckets containing more than 20 files and uploads are limited to files < 50MB. Consider upgrading to Windmill [Enterprise Edition](/pricing) to use this feature with large buckets. -Once you've created an [S3 or Azure Blob resource](../../integrations/s3.mdx) in Windmill, go to the workspace settings > S3 Storage. Select the resource and click Save. +Once you've created an [S3, Azure Blob, or Google Cloud Storage resource](../../integrations/s3.mdx) in Windmill, go to the workspace settings > S3 Storage. Select the resource and click Save. ![S3 storage workspace settings](../11_persistent_storage/workspace_settings.png) @@ -314,7 +314,7 @@ For more info on how to use files and S3 files in Windmill, see [Handling files Read and write from a storage that is not your main storage by specifying it in the S3 object as "secondary_storage" with the name of it. -From the workspace settings, in tab "S3 Storage", just click on "Add secondary storage", give it a name, and pick a resource from type "S3", "Azure Blob", "AWS OIDC" or "Azure Workload Identity". You can save as many additional storages as you want as long as you give them a different name. +From the workspace settings, in tab "S3 Storage", just click on "Add secondary storage", give it a name, and pick a resource from type "S3", "Azure Blob", "Google Cloud Storage", "AWS OIDC" or "Azure Workload Identity". You can save as many additional storages as you want as long as you give them a different name. Then from script, you can specify the secondary storage with an object with properties `s3` (path to the file) and `storage` (name of the secondary storage). @@ -377,7 +377,7 @@ Under [Enterprise Edition](/pricing), instance object storage offers advanced fe ![Instance object storage infographic](./instance_object_storage_infographic.png 'Instance object storage infographic') -This can be configured from the [instance settings](../../advanced/18_instance_settings/index.mdx#instance-object-storage), with configuration options for S3, Azure Blob or AWS OIDC. +This can be configured from the [instance settings](../../advanced/18_instance_settings/index.mdx#instance-object-storage), with configuration options for S3, Azure Blob, Google Cloud Storage, or AWS OIDC. ![S3/Azure for Python/Go cache & large logs](../../core_concepts/20_jobs/s3_azure_cache.png "S3/Azure for Python/Go cache & large logs") diff --git a/docs/core_concepts/index.mdx b/docs/core_concepts/index.mdx index c4f23676e..4b4f34d30 100644 --- a/docs/core_concepts/index.mdx +++ b/docs/core_concepts/index.mdx @@ -119,7 +119,7 @@ On top of its editors to build endpoints, flows and apps, Windmill comes with a /> @@ -87,7 +87,7 @@ Under [Enterprise Edition](/pricing), instance object storage offers advanced fe
\ No newline at end of file diff --git a/docs/integrations/cloudflare-r2.mdx b/docs/integrations/cloudflare-r2.mdx index 46a28ac02..90065ae47 100644 --- a/docs/integrations/cloudflare-r2.mdx +++ b/docs/integrations/cloudflare-r2.mdx @@ -59,7 +59,7 @@ Feel free to create your own S3 scripts on [Windmill](../getting_started/00_how_ ## Workspace object storage -Once you've created an S3 or Azure Blob resource in Windmill, you can use Windmill's native integration with S3 and Azure Blob, making it the recommended storage for large objects like files and binary data. +Once you've created an S3, Azure Blob, or Google Cloud Storage resource in Windmill, you can use Windmill's native integration with S3, Azure Blob, or GCS, making it the recommended storage for large objects like files and binary data. ![Workspace object storage Infographic](../core_concepts/11_persistent_storage/s3_infographics.png "Workspace object storage") @@ -68,7 +68,7 @@ The workspace object storage is exclusive to the [Enterprise](/pricing) edition.
@@ -82,7 +82,7 @@ Under [Enterprise Edition](/pricing), instance object storage offers advanced fe
\ No newline at end of file diff --git a/docs/integrations/excel.mdx b/docs/integrations/excel.mdx index 738920ea0..8c9cf5eb0 100644 --- a/docs/integrations/excel.mdx +++ b/docs/integrations/excel.mdx @@ -93,7 +93,7 @@ More details on how to use base64 encoded strings in scripts and flows can be fo
@@ -187,7 +187,7 @@ More details on how to use S3 object storage in scripts and flows can be found i
\ No newline at end of file diff --git a/docs/integrations/google-cloud-storage.mdx b/docs/integrations/google-cloud-storage.mdx new file mode 100644 index 000000000..954ee789d --- /dev/null +++ b/docs/integrations/google-cloud-storage.mdx @@ -0,0 +1,20 @@ +# Google Cloud Storage integration + +[Google Cloud Storage](https://cloud.google.com/storage) is Google's cloud storage service, an alternative to S3. + +:::info Windmill for data pipelines + +You can link a Windmill workspace to a Google Cloud Storage bucket and use it as source and/or target of your processing steps seamlessly, without any boilerplate. + +
+ +See [Windmill for data pipelines](../core_concepts/27_data_pipelines/index.mdx) for more details. + +::: + +To integrate Google Cloud Storage to Windmill, you need to save the following elements as a [resource](../core_concepts/3_resources_and_types/index.mdx). + +| Property | Type | Description | Default | Required | Where to Find | Additional Details | +| --------- | ------- | ---------------------------- | ------- | --------------- | -------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| bucket | string | Google Cloud Storage bucket name | | true | Google Cloud Console > Storage > Buckets | Name of the GCS bucket | +| serviceAccountKey | resource | Service Account Key | | true | Google Cloud Console > IAM & Admin > Service Accounts | Reference to a `gcloud` resource | \ No newline at end of file diff --git a/docs/integrations/s3.mdx b/docs/integrations/s3.mdx index 67883d103..d9a13392b 100644 --- a/docs/integrations/s3.mdx +++ b/docs/integrations/s3.mdx @@ -59,7 +59,7 @@ Feel free to create your own S3 scripts on [Windmill](../getting_started/00_how_ ## Workspace object storage -Once you've created an S3 or Azure Blob resource in Windmill, you can use Windmill's native integration with S3 and Azure Blob, making it the recommended storage for large objects like files and binary data. +Once you've created an S3, Azure Blob, or Google Cloud Storage resource in Windmill, you can use Windmill's native integration with S3, Azure Blob, or GCS, making it the recommended storage for large objects like files and binary data. ![Workspace object storage Infographic](../core_concepts/11_persistent_storage/s3_infographics.png "Workspace object storage Infographic") @@ -68,7 +68,7 @@ The workspace object storage is exclusive to the [Enterprise](/pricing) edition.
@@ -82,7 +82,7 @@ Under [Enterprise Edition](/pricing), instance object storage offers advanced fe
\ No newline at end of file diff --git a/sidebars.js b/sidebars.js index a689a63d6..1767bf52c 100644 --- a/sidebars.js +++ b/sidebars.js @@ -251,7 +251,7 @@ const sidebars = { label: 'Engine benchmarks', link: { type: 'doc', - id: 'misc/benchmarks/competitors/results/index', + id: 'misc/benchmarks/competitors/results/index' }, items: [ { @@ -346,7 +346,7 @@ const sidebars = { { type: 'doc', id: 'core_concepts/persistent_storage/large_data_files', - label: 'Large data (S3, R2, MinIO, Azure Blob)' + label: 'Large data (S3, R2, MinIO, Azure Blob, AWS OIDC, GCS)' }, { type: 'doc', @@ -454,6 +454,11 @@ const sidebars = { id: 'integrations/gcp', label: 'Google Cloud Platform' }, + { + type: 'doc', + id: 'integrations/google-cloud-storage', + label: 'Google Cloud Storage' + }, { type: 'doc', id: 'integrations/gdrive', @@ -858,7 +863,7 @@ const sidebars = { 'apps/app_configuration_settings/components_groups', 'apps/react_components', 'apps/default_app/index', - 'apps/app_e-commerce', + 'apps/app_e-commerce' ], collapsed: true diff --git a/src/pages/integrations/s3.tsx b/src/pages/integrations/s3.tsx index f0104af7e..bfc684a68 100644 --- a/src/pages/integrations/s3.tsx +++ b/src/pages/integrations/s3.tsx @@ -1,13 +1,6 @@ import React from 'react'; -import { - PlusCircle, - List, - Upload, - Eye, - Text, - Repeat2 -} from 'lucide-react'; +import { PlusCircle, List, Upload, Eye, Text, Repeat2 } from 'lucide-react'; import Solution from '../../components/Solutions'; export default function S3() { @@ -27,63 +20,70 @@ export default function S3() { {name} {' '} - is a scalable, high-speed, web-based cloud storage service designed for online backup and archiving of data and application programs. + is a scalable, high-speed, web-based cloud storage service designed for online backup and + archiving of data and application programs.

- Windmill comes with a + Windmill comes with a{' '} + - native integration with S3 and Azure Blob + + native integration with S3, Azure Blob and Google Cloud Storage + {' '} - to store large objects like files & binary data and use them in your scripts, flows and data pipelines. + to store large objects like files & binary data and use them in your scripts, flows and + data pipelines.

- You can also interact with S3 from scripts supported in multiple languages, build UIs and flows that you can monitor and - trigger on demand, by schedule or webhooks. + You can also interact with S3 from scripts supported in multiple languages, build UIs and + flows that you can monitor and trigger on demand, by schedule or webhooks.

), integrations_sub_title: `Write any script, flow or app targeting ${name} API, in TypeScript, Python, Go, PHP, Bash and SQL. Windmill has a built-in workspace object storage to interact with S3 directly from your scripts.`, hubIntegrations: [ - { - title: 'Read a file from S3', - link: 'https://hub.windmill.dev/scripts/s3/7111/read-a-file-from-s3-within-a-script-s3', - description: 'Use Windmill embedded workspace object storage integration to read a S3 file and explore bucket directly within script.', - icon: Eye - }, - { - title: 'Create file', - link: 'https://hub.windmill.dev/scripts/s3/7117/create-a-file-in-s3-(bun)-s3', - description: 'Create a file in S3 and explore bucket thans to Windmill workspace object storage.', - icon: PlusCircle - }, - { - title: 'List objects in a bucket', - link: 'https://hub.windmill.dev/scripts/s3/1319/list-objects-in-a-bucket-s3', - description: 'List all objects in a bucket and return them as a list.', - icon: List - }, - { - title: 'Upload text in bucket', - link: 'https://hub.windmill.dev/scripts/s3/1320/upload-text-in-bucket-s3', - description: 'Upload text content at a given path in an S3 resource.', - icon: Upload - }, - { - title: 'Get object in bucket as text', - link: 'https://hub.windmill.dev/scripts/s3/1321/get-object-in-bucket-as-text-s3', - description: 'Get object in bucket as text and return it as a string.', - icon: Text - }, - { - title: 'Get recently updated objects', - link: 'https://hub.windmill.dev/scripts/s3/1459/get-recently-updated-objects-s3', - description: 'Trigger script to check for recently updated objects (to be used in a flow loop iterating on each new item).', - icon: Repeat2 - } - ] - + { + title: 'Read a file from S3', + link: 'https://hub.windmill.dev/scripts/s3/7111/read-a-file-from-s3-within-a-script-s3', + description: + 'Use Windmill embedded workspace object storage integration to read a S3 file and explore bucket directly within script.', + icon: Eye + }, + { + title: 'Create file', + link: 'https://hub.windmill.dev/scripts/s3/7117/create-a-file-in-s3-(bun)-s3', + description: + 'Create a file in S3 and explore bucket thans to Windmill workspace object storage.', + icon: PlusCircle + }, + { + title: 'List objects in a bucket', + link: 'https://hub.windmill.dev/scripts/s3/1319/list-objects-in-a-bucket-s3', + description: 'List all objects in a bucket and return them as a list.', + icon: List + }, + { + title: 'Upload text in bucket', + link: 'https://hub.windmill.dev/scripts/s3/1320/upload-text-in-bucket-s3', + description: 'Upload text content at a given path in an S3 resource.', + icon: Upload + }, + { + title: 'Get object in bucket as text', + link: 'https://hub.windmill.dev/scripts/s3/1321/get-object-in-bucket-as-text-s3', + description: 'Get object in bucket as text and return it as a string.', + icon: Text + }, + { + title: 'Get recently updated objects', + link: 'https://hub.windmill.dev/scripts/s3/1459/get-recently-updated-objects-s3', + description: + 'Trigger script to check for recently updated objects (to be used in a flow loop iterating on each new item).', + icon: Repeat2 + } + ] }; - return ; + return ; }