Skip to content

Commit bb98157

Browse files
authored
Merge pull request #31 from supabase/j0_add_storage_file_api
Add Storage File API
2 parents 08d5fe4 + 3070b5b commit bb98157

File tree

5 files changed

+223
-25
lines changed

5 files changed

+223
-25
lines changed

supabase_py/client.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from supabase_py.lib.auth_client import SupabaseAuthClient
66
from supabase_py.lib.query_builder import SupabaseQueryBuilder
77
from supabase_py.lib.realtime_client import SupabaseRealtimeClient
8-
from supabase_py.lib.supabase_storage_client import SupabaseStorageClient
8+
from supabase_py.lib.storage_client import SupabaseStorageClient
99

1010
DEFAULT_OPTIONS = {
1111
"schema": "public",

supabase_py/lib/storage/__init__.py

Whitespace-only changes.
+201
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,201 @@
1+
import requests
2+
from requests import HTTPError
3+
4+
5+
class StorageFileAPI:
6+
DEFAULT_SEARCH_OPTIONS = {
7+
"limit": 100,
8+
"offset": 0,
9+
"sortBy": {
10+
"column": "name",
11+
"order": "asc",
12+
},
13+
}
14+
DEFAULT_FILE_OPTIONS = {
15+
"cacheControl": "3600",
16+
"contentType": "text/plain;charset=UTF-8",
17+
"upsert": "False",
18+
}
19+
20+
def __init__(self, url: str, headers: dict, bucket_id: str):
21+
"""
22+
Parameters
23+
----------
24+
url
25+
base url for all the operation
26+
headers
27+
the base authentication headers
28+
bucket_id
29+
the id of the bucket that we want to access, you can get the list of buckets with the SupabaseStorageClient.list_buckets()
30+
"""
31+
self.url = url
32+
self.headers = headers
33+
self.bucket_id = bucket_id
34+
# self.loop = asyncio.get_event_loop()
35+
# self.replace = replace
36+
37+
def create_signed_url(self, path: str, expires_in: int):
38+
"""
39+
Parameters
40+
----------
41+
path
42+
file path to be downloaded, including the current file name.
43+
expires_in
44+
number of seconds until the signed URL expires.
45+
"""
46+
try:
47+
_path = self._get_final_path(path)
48+
response = requests.post(
49+
f"{self.url}/object/sign/{_path}",
50+
json={"expiresIn": str(expires_in)},
51+
headers=self.headers,
52+
)
53+
data = response.json()
54+
data["signedURL"] = f"{self.url}{data['signedURL']}"
55+
response.raise_for_status()
56+
except HTTPError as http_err:
57+
print(f"HTTP error occurred: {http_err}") # Python 3.6
58+
except Exception as err:
59+
print(f"Other error occurred: {err}") # Python 3.6
60+
else:
61+
return data
62+
63+
def get_public_url(self, path: str):
64+
"""
65+
Parameters
66+
----------
67+
path
68+
file path to be downloaded, including the path and file name. For example `folder/image.png`.
69+
"""
70+
try:
71+
_path = self._get_final_path(path)
72+
public_url = f"{self.url}/object/public/{_path}"
73+
return public_url
74+
except:
75+
print("Public URL not found")
76+
77+
def move(self, from_path: str, to_path: str):
78+
"""
79+
Moves an existing file, optionally renaming it at the same time.
80+
Parameters
81+
----------
82+
from_path
83+
The original file path, including the current file name. For example `folder/image.png`.
84+
to_path
85+
The new file path, including the new file name. For example `folder/image-copy.png`.
86+
"""
87+
try:
88+
response = requests.post(
89+
f"{self.url}/object/move",
90+
json={
91+
"bucketId": self.bucket_id,
92+
"sourceKey": from_path,
93+
"destinationKey": to_path,
94+
},
95+
headers=self.headers,
96+
)
97+
response.raise_for_status()
98+
except HTTPError as http_err:
99+
print(f"HTTP error occurred: {http_err}") # Python 3.6
100+
except Exception as err:
101+
print(f"Other error occurred: {err}") # Python 3.6
102+
else:
103+
return response.json()
104+
105+
def remove(self, paths: list):
106+
"""
107+
Deletes files within the same bucket
108+
Parameters
109+
----------
110+
paths
111+
An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`].
112+
"""
113+
try:
114+
response = requests.delete(
115+
f"{self.url}/object/{self.bucket_id}",
116+
data={"prefixes": paths},
117+
headers=self.headers,
118+
)
119+
response.raise_for_status()
120+
except HTTPError as http_err:
121+
print(f"HTTP error occurred: {http_err}") # Python 3.6
122+
except Exception as err:
123+
raise err # Python 3.6
124+
else:
125+
return response.json()
126+
127+
def list(self, path: str = None, options: dict = {}):
128+
"""
129+
Lists all the files within a bucket.
130+
Parameters
131+
----------
132+
path
133+
The folder path.
134+
options
135+
Search options, including `limit`, `offset`, and `sortBy`.
136+
"""
137+
try:
138+
body = dict(self.DEFAULT_SEARCH_OPTIONS, **options)
139+
headers = dict(self.headers, **{"Content-Type": "application/json"})
140+
body["prefix"] = path if path else ""
141+
142+
getdata = requests.post(
143+
f"{self.url}/object/list/{self.bucket_id}", json=body, headers=headers
144+
)
145+
getdata.raise_for_status()
146+
except HTTPError as http_err:
147+
print(f"HTTP error occurred: {http_err}") # Python 3.6
148+
except Exception as err:
149+
raise err # Python 3.6
150+
else:
151+
return getdata.json()
152+
153+
def download(self, path: str):
154+
"""
155+
Downloads a file.
156+
Parameters
157+
----------
158+
path The file path to be downloaded, including the path and file name. For example `folder/image.png`.
159+
"""
160+
try:
161+
_path = self._get_final_path(path)
162+
response = requests.get(f"{self.url}/object/{_path}", headers=self.headers)
163+
164+
except HTTPError as http_err:
165+
print(f"HTTP error occurred: {http_err}") # Python 3.6
166+
except Exception as err:
167+
raise err # Python 3.6
168+
else:
169+
return response.content
170+
171+
def upload(self, path: str, file: any, file_options: dict = None):
172+
"""
173+
Uploads a file to an existing bucket.
174+
Parameters
175+
----------
176+
path
177+
The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.
178+
file
179+
The File object to be stored in the bucket. or a async generator of chunks
180+
file_options
181+
HTTP headers. For example `cacheControl`
182+
"""
183+
if file_options is None:
184+
file_options = {}
185+
headers = dict(self.headers, **file_options)
186+
headers.update(self.DEFAULT_FILE_OPTIONS)
187+
files = {"file": open(file, "rb")}
188+
_path = self._get_final_path(path)
189+
try:
190+
resp = requests.post(
191+
f"{self.url}/object/{_path}", data=files, headers=headers
192+
)
193+
except HTTPError as http_err:
194+
print(f"HTTP error occurred: {http_err}") # Python 3.6
195+
except Exception as err:
196+
raise err # Python 3.6
197+
else:
198+
return resp
199+
200+
def _get_final_path(self, path: str):
201+
return f"{self.bucket_id}/{path}"

supabase_py/lib/storage_client.py

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
from supabase_py.lib.storage.storage_bucket_api import StorageBucketAPI
2+
from supabase_py.lib.storage.storage_file_api import StorageFileAPI
3+
4+
5+
class SupabaseStorageClient(StorageBucketAPI):
6+
"""
7+
Manage the storage bucket and files
8+
Examples
9+
--------
10+
>>> url = storage_file.create_signed_url("something/test2.txt", 80) # signed url
11+
>>> loop.run_until_complete(storage_file.download("something/test2.txt")) # upload or download
12+
>>> loop.run_until_complete(storage_file.upload("something/test2.txt","path_file_upload"))
13+
>>> list_buckets = storage.list_buckets()
14+
>>> list_files = storage_file.list("something")
15+
"""
16+
17+
def __init__(self, url, headers):
18+
super().__init__(url, headers)
19+
20+
def StorageFileAPI(self, id_):
21+
return StorageFileAPI(self.url, self.headers, id_)

supabase_py/lib/supabase_storage_client.py

-24
This file was deleted.

0 commit comments

Comments
 (0)