local_to_s3

create_local_to_s3_job


Providers:

Run this DAG

1. Install the Astronomer CLI:Skip if you already have the CLI

2. Initate the project in a local directory:

3. Copy and paste the code below into a file in the

dags
directory.

4. Add the following to your

requirements.txt
file:

5. Run the DAG from the local directory where the project was initiated:

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from airflow import models
from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator
from airflow.utils.dates import datetime
S3_BUCKET = os.environ.get("S3_BUCKET", "test-bucket")
S3_KEY = os.environ.get("S3_KEY", "key")
with models.DAG(
"example_local_to_s3",
schedule_interval=None,
start_date=datetime(2021, 1, 1), # Override to match your needs
) as dag:
# [START howto_local_transfer_data_to_s3]
create_local_to_s3_job = LocalFilesystemToS3Operator(
task_id="create_local_to_s3_job",
filename="relative/path/to/file.csv",
dest_key=S3_KEY,
dest_bucket=S3_BUCKET,
)
create_local_to_s3_job
# [END howto_local_transfer_data_to_s3]