|
5 | 5 | API_URL = "https://api.loading.se" |
6 | 6 | API_VERSION = "v1" |
7 | 7 | USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0" |
| 8 | +EDITORIAL_POST_TYPES = [ |
| 9 | + "neRegular", |
| 10 | + "review", |
| 11 | + "opinion", |
| 12 | + "update", |
| 13 | + "podcast", |
| 14 | + "conversation", |
| 15 | +] |
| 16 | + |
| 17 | +EDITORIAL_SORT = ["title"] |
8 | 18 |
|
9 | 19 |
|
10 | 20 | class LoadingApiWrapper: |
@@ -162,3 +172,34 @@ def get_other(self, page=None): |
162 | 172 | thread_data = self._get_threads_in_forum_category(category_name, page) |
163 | 173 |
|
164 | 174 | return thread_data |
| 175 | + |
| 176 | + def get_editorials(self, page=None, post_type=None, sort=None): |
| 177 | + url = f"{API_URL}/{API_VERSION}/posts/" |
| 178 | + headers = { |
| 179 | + "User-Agent": USER_AGENT, |
| 180 | + "texts": "texts", |
| 181 | + "post-type": "neRegular", |
| 182 | + } |
| 183 | + |
| 184 | + if post_type and post_type in EDITORIAL_POST_TYPES: |
| 185 | + headers["post-type"] = post_type |
| 186 | + |
| 187 | + if sort and sort in EDITORIAL_SORT: |
| 188 | + headers["sort"] = sort |
| 189 | + |
| 190 | + # Chooses a specific page instead of the first page which is the default page. |
| 191 | + if page and page > 1: |
| 192 | + headers["page"] = str(page) |
| 193 | + |
| 194 | + # Doing this checks to make sure it only return data from a page that exists. |
| 195 | + if page and page < 1: |
| 196 | + return {"code": 404, "post": {"posts": [], "users": []}} |
| 197 | + |
| 198 | + response = requests.get(url, headers=headers) |
| 199 | + data = response.json() |
| 200 | + |
| 201 | + # Page out of range. |
| 202 | + if not len(data["posts"]): |
| 203 | + return {"code": 404, "post": data} |
| 204 | + |
| 205 | + return {"code": 200, "post": data} |
0 commit comments