s3.upload_fileobj gives error a bytes-like object is required s3.upload_fileobj gives error a bytes-like object is required pandas pandas

s3.upload_fileobj gives error a bytes-like object is required


Going off this reference, it seems you'll need to wrap a gzip.GzipFile object around your BytesIO which will then perform the compression for you.

import ioimport gzipbuffer = io.BytesIO()     with gzip.GzipFile(fileobj=buffer, mode="wb") as f:    f.write(df.to_csv().encode())buffer.seek(0)s3.upload_fileobj(buffer, bucket, key)

Minimal Verifiable Example

import ioimport gzipimport zlib# Encodedf = pd.DataFrame({'A':[1,2,3], 'B':[6,7,8]})buffer = io.BytesIO()     with gzip.GzipFile(fileobj=buffer, mode="wb") as f:    f.write(df.to_csv().encode())buffer.getvalue()# b'\x1f\x8b\x08\x00\xf0\x0b\x11]\x02\xff\xd3q\xd4q\xe22\xd01\xd41\xe32\xd41\xd21\xe72\xd21\xd6\xb1\xe0\x02\x00Td\xc2\xf5\x17\x00\x00\x00'

# Decodeprint(zlib.decompress(out.getvalue(), 16+zlib.MAX_WBITS).decode())# ,A,B# 0,1,6# 1,2,7# 2,3,8


The only you need is a TextIOWrapper, as to_csv expects a string while upload_fileobj expects bytes

def upload_file(dataframe, bucket, key):    """dat=DataFrame, bucket=bucket name in AWS S3, key=key name in AWS S3"""    s3 = boto3.client('s3')    csv_buffer = io.BytesIO()    w = io.TextIOWrapper(csv_buffer)    dataframe.to_csv(w, compression='gzip')    w.seek(0)    s3.upload_fileobj(csv_buffer, bucket, key)

And the code uploads fine

$ cat test.csv,A,B0,1,61,2,72,3,8


You could try something like this.

import pandas as pdimport iodf = pd.DataFrame({'A':[1,2,3], 'B':[6,7,8]})def upload_file(dataframe, bucket, key):    """dat=DataFrame, bucket=bucket name in AWS S3, key=key name in AWS S3"""    s3 = boto3.client('s3')    csv_buffer = io.StringIO()    dataframe.to_csv(csv_buffer, compression='gzip')    csv_buffer.seek(0)    s3.upload_fileobj(csv_buffer, bucket, key)upload_file(df, your-bucket, your-key)