Skip to content

Commit

Permalink
v1.0.0: Transfer files to and from Streamlit hosted filesystem (#4)
Browse files Browse the repository at this point in the history
* v0.2.0: Upload from streamlit hosted filesystem

* v1.0.0: Upload to signed URL from hosted fs

* v1.0.0: Download to streamlit hosted fs
  • Loading branch information
SiddhantSadangi authored Aug 13, 2023
1 parent 4bedd62 commit a11e211
Show file tree
Hide file tree
Showing 2 changed files with 155 additions and 74 deletions.
142 changes: 92 additions & 50 deletions demo/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
)

# ---------- INIT SESSION ----------
upsert = operators = bucket_id = file_size_limit = allowed_mime_types = None
upsert = operators = bucket_id = file_size_limit = allowed_mime_types = source = None
public = False

STORAGE_OPERATIONS = [
Expand Down Expand Up @@ -253,7 +253,7 @@
"Results cache duration",
value="",
placeholder="Optional",
help="This does not affect results caching. Leave blank to cache indefinitely",
help="Leave blank to cache indefinitely",
)
ttl = None if ttl == "" else ttl
constructed_storage_query = f"""st_supabase.{operation}("{bucket_id}", {ttl=})"""
Expand Down Expand Up @@ -335,30 +335,45 @@

elif operation == "upload":
destination_path = None
uploaded_file = st.file_uploader("Choose a file")
if uploaded_file:
destination_path = (
st.text_input(
"Enter destination path in the bucket",
placeholder=uploaded_file.name,
lcol, rcol = st.columns([1, 3])
source = lcol.selectbox(
label="Source filesystem",
options=["local", "hosted"],
help="Filesystem from where the file has to be uploaded",
)

if source == "local":
file = rcol.file_uploader("Choose a file")
if file:
destination_path = st.text_input(
"Destination path in the bucket",
value=file.name,
)
or uploaded_file.name
)

st.session_state["storage_disabled"] = (
False if all([bucket_id, uploaded_file]) else True
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", {source=}, file={file}, destination_path="{destination_path}")
# `UploadedFile` is the `BytesIO` object returned by `st.file_uploader()`
"""
else:
file = rcol.text_input(
"Source path",
placeholder="path/to/file.txt",
help="This is the path of the file on the Streamlit hosted filesystem",
)
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", file={uploaded_file}, destination_path="{destination_path}")
# `UploadedFile` is the `BytesIO` object returned by `st.file_uploader()`
"""

destination_path = st.text_input(
"Destination path in the bucket",
value=file,
)
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", {source=}, {file=}, destination_path="{destination_path}")
"""
st.session_state["storage_disabled"] = False if all([bucket_id, file]) else True
elif operation == "list_buckets":
ttl = st.text_input(
"Results cache duration",
value="",
placeholder="Optional",
help="This does not affect results caching. Leave blank to cache indefinitely",
help="Leave blank to cache indefinitely",
)
ttl = None if ttl == "" else ttl
constructed_storage_query = f"""st_supabase.{operation}({ttl=})"""
Expand All @@ -374,7 +389,7 @@
"Results cache duration",
value="",
placeholder="Optional",
help="This does not affect results caching. Leave blank to cache indefinitely",
help="Leave blank to cache indefinitely",
)
ttl = None if ttl == "" else ttl

Expand Down Expand Up @@ -419,7 +434,7 @@
"Results cache duration",
value="",
placeholder="Optional",
help="This does not affect results caching. Leave blank to cache indefinitely",
help="Leave blank to cache indefinitely",
)
ttl = None if ttl == "" else ttl

Expand Down Expand Up @@ -464,7 +479,7 @@
"Results cache duration",
value="",
placeholder="Optional",
help="This does not affect results caching. Leave blank to cache indefinitely",
help="Leave blank to cache indefinitely",
)
ttl = None if ttl == "" else ttl

Expand Down Expand Up @@ -505,26 +520,39 @@

elif operation == "upload_to_signed_url":
path = None
uploaded_file = st.file_uploader("Choose a file")
if uploaded_file:
path = st.text_input(
"Enter destination path in the bucket",
placeholder="/folder/subFolder/image.jpg",
)
path = st.text_input(
"Enter destination path in the bucket",
placeholder="/folder/subFolder/image.jpg",
)
token = st.text_input(
"Enter the token",
type="password",
help="This is generated by `.create_signed_url()`",
)
lcol, rcol = st.columns([1, 3])
source = lcol.selectbox(
label="Source filesystem",
options=["local", "hosted"],
help="Filesystem from where the file has to be uploaded",
)

token = st.text_input(
"Enter the token",
type="password",
help="This is generated by `.create_signed_url()`",
)
if source == "local":
file = rcol.file_uploader("Choose a file")

st.session_state["storage_disabled"] = (
False if all([bucket_id, token, path]) else True
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", {source=}, {path=}, token="***", file={file})
# `UploadedFile` is the `BytesIO` object returned by `st.file_uploader()`
"""
elif source == "hosted":
file = rcol.text_input(
"Source path",
placeholder="path/to/file.txt",
help="This is the path of the file on the Streamlit hosted filesystem",
)
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", {path=}, token="***", file={uploaded_file})
# `UploadedFile` is the `BytesIO` object returned by `st.file_uploader()`
"""
constructed_storage_query = f"""
st_supabase.{operation}("{bucket_id}", {source=}, {path=}, token="***", {file=})
"""
st.session_state["storage_disabled"] = False if all([bucket_id, token, path]) else True

st.write("**Constructed statement**")
if operation == "download":
Expand Down Expand Up @@ -558,20 +586,22 @@
):
try:
if operation == "upload":
response = st_supabase.upload(bucket_id, uploaded_file, destination_path)
response = st_supabase.upload(bucket_id, source, file, destination_path)
elif operation == "download":
file_name, mime, data = eval(constructed_storage_query)
st.success("Download ready 🎉🎉🎉")
st.success(
f"File **{file_name}** downloaded from Supabase to Streamlit hosted filesystem"
)
st.download_button(
"Download file ⏬",
"Download to local filesystem ⏬",
data=data,
file_name=file_name,
mime=mime,
use_container_width=True,
)
elif operation == "upload_to_signed_url":
response = st_supabase.upload_to_signed_url(
bucket_id, path, token, uploaded_file
bucket_id, source, path, token, file
)
else:
response = eval(constructed_storage_query)
Expand Down Expand Up @@ -602,10 +632,16 @@
operation == "upload"
and response["Key"] == f"{bucket_id}/{destination_path.lstrip('/')}"
):
st.success(
f"Uploaded **{uploaded_file.name}** to **{response['Key']}**",
icon="✅",
)
try:
st.success(
f"Uploaded **{file.name}** to **{response['Key']}**",
icon="✅",
)
except AttributeError:
st.success(
f"Uploaded **{file}** to **{response['Key']}**",
icon="✅",
)
elif operation == "remove":
st.info(f"Removed **{len(response)}** objects")
st.write(response)
Expand Down Expand Up @@ -638,10 +674,16 @@
st.code(response["path"])
elif operation == "upload_to_signed_url":
if response["Key"] == f"{bucket_id}/{path.lstrip('/')}":
st.success(
f"Uploaded **{uploaded_file.name}** to **{response['Key']}**",
icon="✅",
)
try:
st.success(
f"Uploaded **{file.name}** to **{response['Key']}**",
icon="✅",
)
except AttributeError:
st.success(
f"Uploaded **{file}** to **{response['Key']}**",
icon="✅",
)
else:
st.write(response)
except Exception as e:
Expand Down
87 changes: 63 additions & 24 deletions src/st_supabase_connection/__init__.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
import mimetypes
import os
import urllib
from datetime import timedelta
from io import BytesIO
from pathlib import Path
from typing import Literal, Optional, Tuple, Union, types

from postgrest import SyncSelectRequestBuilder, types
from streamlit import cache_data, cache_resource
from streamlit.connections import ExperimentalBaseConnection
from supabase import Client, create_client

__version__ = "0.1.0"
__version__ = "1.0.0"


class SupabaseConnection(ExperimentalBaseConnection[Client]):
Expand Down Expand Up @@ -165,26 +167,47 @@ def create_bucket(
)
return response.json()

def upload(self, bucket_id: str, file: BytesIO, destination_path: str) -> dict[str, str]:
# TODO: Support overwriting existing files
def upload(
self,
bucket_id: str,
source: Literal["local", "hosted"],
file: Union[str, Path, BytesIO],
destination_path: str,
) -> dict[str, str]:
"""Uploads a file to a Supabase bucket.
Parameters
----------
bucket_id : str
Unique identifier of the bucket.
file : BytesIO
File to upload. This BytesIO object returned by `st.file_uploader()`.
source : str
"local" to upload file from your local filesystem,
"hosted" to upload file from the Streamlit hosted filesystem.
file : str, Path, BytesIO
File to upload. This can be a path of the file if `source="hosted"`,
or the `BytesIO` object returned by `st.file_uploader()` if `source="local"`.
destination_path : str
Path is the bucket where the file will be uploaded to. Folders will be created as needed. Defaults to `/filename.ext`
Path is the bucket where the file will be uploaded to.
Folders will be created as needed. Defaults to `/filename.fileext`
"""
with open(file.name, "wb") as f:
f.write(file.getbuffer())
with open(file.name, "rb") as f:
response = self.client.storage.from_(bucket_id).upload(
path=destination_path or f"/{file.name}",
file=f,
file_options={"content-type": file.type},
)

if source == "local":
with open(file.name, "wb") as f:
f.write(file.getbuffer())
with open(file.name, "rb") as f:
response = self.client.storage.from_(bucket_id).upload(
path=destination_path or f"/{file.name}",
file=f,
file_options={"content-type": file.type},
)
elif source == "hosted":
with open(file, "rb") as f:
response = self.client.storage.from_(bucket_id).upload(
path=destination_path or f"/{os.path.basename(f.name)}",
file=f,
file_options={"content-type": mimetypes.guess_type(file)[0]},
)
return response.json()

def download(
Expand Down Expand Up @@ -213,7 +236,6 @@ def download(
data : bytes
Downloaded bytes object
"""
import mimetypes

@cache_resource(ttl=ttl)
def _download(_self, bucket_id, source_path):
Expand Down Expand Up @@ -424,37 +446,54 @@ def create_signed_upload_url(self, bucket_id: str, path: str) -> dict[str, str]:
"path": path,
}

# TODO: Support overwriting existing files
def upload_to_signed_url(
self,
bucket_id: str,
source: Literal["local", "hosted"],
path: str,
token: str,
file: BytesIO,
file: Union[str, Path, BytesIO],
) -> dict[str, str]:
"""Upload a file with a token generated from `.create_signed_url()`
Parameters
----------
bucket_id : str
Unique identifier of the bucket.
source : str
"local" to upload file from your local filesystem,
"hosted" to upload file from the Streamlit hosted filesystem.
path : str
The file path, including the file name. This path will be created if it does not exist.
token : str
The token generated from `.create_signed_url()`.
file : BytesIO
File to upload. This BytesIO object returned by `st.file_uploader()`.
The token generated from `.create_signed_url()` for the specified `path`
file : str, Path, BytesIO
File to upload. This can be a path of the file if `source="hosted"`,
or the `BytesIO` object returned by `st.file_uploader()` if `source="local"`.
"""
_path = self.client.storage.from_(bucket_id)._get_final_path(path)
_url = urllib.parse.urlparse(f"/object/upload/sign/{_path}")
query_params = urllib.parse.urlencode({"token": token})
final_url = f"{_url.geturl()}?{query_params}"

filename = path.rsplit("/", maxsplit=1)[-1]
_file = {"file": (filename, file, file.type)}

response = self.client.storage.from_(bucket_id)._request(
"PUT",
final_url,
files=_file,
)
if source == "local":
_file = {"file": (filename, file, file.type)}
response = self.client.storage.from_(bucket_id)._request(
"PUT",
final_url,
files=_file,
)
elif source == "hosted":
# FIXME: Uploads 0 byte file
with open(file, "rb") as f_obj:
_file = {"file": (filename, f_obj, mimetypes.guess_type(file)[0])}
response = self.client.storage.from_(bucket_id)._request(
"PUT",
final_url,
files=_file,
)

return response.json()

0 comments on commit a11e211

Please sign in to comment.