Add a JSON API endpoint for getting items
This commit is contained in:
parent
b424490003
commit
a4adbff8f6
|
@ -415,3 +415,92 @@ def _get_ttx_for_date(dt: datetime) -> int:
|
||||||
def wsgi():
|
def wsgi():
|
||||||
app.config["INTAKE_DATA"] = intake_data_dir()
|
app.config["INTAKE_DATA"] = intake_data_dir()
|
||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Experimental API endpoints for the React frontend
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/v1/items")
|
||||||
|
# @auth_check
|
||||||
|
def items():
|
||||||
|
"""
|
||||||
|
Get multiple items according to a filter.
|
||||||
|
Supported filters:
|
||||||
|
- &channel=<channel>
|
||||||
|
- &source=<source>
|
||||||
|
- &hidden=<true|false>
|
||||||
|
- TODO &tags=<+tag,-tag,...>
|
||||||
|
- &count= and &page=
|
||||||
|
|
||||||
|
Returns a JSON response with
|
||||||
|
- count: number of items
|
||||||
|
- items: items as JSON
|
||||||
|
- prev: if there are previous pages, the previous page number
|
||||||
|
- next: if there are further pages, the next page number
|
||||||
|
"""
|
||||||
|
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||||
|
|
||||||
|
# &channels and &sources may not both be specified
|
||||||
|
filter_channel = request.args.get("channel")
|
||||||
|
filter_source = request.args.get("source")
|
||||||
|
if filter_channel and filter_source:
|
||||||
|
response = jsonify({"error": "One of channel and source may be specified"})
|
||||||
|
response.status_code = 400
|
||||||
|
return response
|
||||||
|
|
||||||
|
source_names = []
|
||||||
|
|
||||||
|
# If the channel was specified, load the channel defs to get the sources
|
||||||
|
if filter_channel:
|
||||||
|
channels_config_path = data_path / "channels.json"
|
||||||
|
if not channels_config_path.exists():
|
||||||
|
abort(404)
|
||||||
|
channels = json.loads(channels_config_path.read_text(encoding="utf8"))
|
||||||
|
if filter_channel not in channels:
|
||||||
|
abort(404)
|
||||||
|
source_names = channels[filter_channel]
|
||||||
|
|
||||||
|
# If a source was specified, use that source
|
||||||
|
elif filter_source:
|
||||||
|
source_names = [filter_source]
|
||||||
|
|
||||||
|
# If neither was specified, use all sources
|
||||||
|
else:
|
||||||
|
source_names = [
|
||||||
|
child.name
|
||||||
|
for child in data_path.iterdir()
|
||||||
|
if (child / "intake.json").exists()
|
||||||
|
]
|
||||||
|
|
||||||
|
sources = [LocalSource(data_path, name) for name in source_names]
|
||||||
|
|
||||||
|
# Get the items, applying the hidden filter
|
||||||
|
show_hidden = request.args.get("hidden") == "true"
|
||||||
|
all_items = sorted(
|
||||||
|
[
|
||||||
|
item
|
||||||
|
for source in sources
|
||||||
|
for item in source.get_all_items()
|
||||||
|
if not item.is_hidden or show_hidden
|
||||||
|
],
|
||||||
|
key=item_sort_key
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply paging filters
|
||||||
|
count = int(request.args.get("count", "100"))
|
||||||
|
page = int(request.args.get("page", "0"))
|
||||||
|
paged_items = all_items[count * page : count * page + count]
|
||||||
|
|
||||||
|
# Return the result set
|
||||||
|
response_params = {
|
||||||
|
"count": len(paged_items),
|
||||||
|
"items": list(map(lambda item: item.as_json(), paged_items)),
|
||||||
|
}
|
||||||
|
if page > 0:
|
||||||
|
response_params["prev"] = page - 1
|
||||||
|
if (count * page + count) < len(all_items):
|
||||||
|
response_params["next"] = page + 1
|
||||||
|
response = jsonify(response_params)
|
||||||
|
return response
|
||||||
|
|
|
@ -109,6 +109,9 @@ class Item:
|
||||||
def serialize(self, indent=True):
|
def serialize(self, indent=True):
|
||||||
return json.dumps(self._item, indent=2 if indent else None)
|
return json.dumps(self._item, indent=2 if indent else None)
|
||||||
|
|
||||||
|
def as_json(self):
|
||||||
|
return {"source": self.source.source_name, **self._item}
|
||||||
|
|
||||||
def update_from(self, updated: "Item") -> None:
|
def update_from(self, updated: "Item") -> None:
|
||||||
for field in (
|
for field in (
|
||||||
"title",
|
"title",
|
||||||
|
|
Loading…
Reference in New Issue