Compare commits
66 Commits
Author | SHA1 | Date | |
---|---|---|---|
7bea8c247a | |||
647584e55b | |||
c4d53eb993 | |||
3519517b96 | |||
7477504508 | |||
3118758f1d | |||
6ef51b7286 | |||
680d8db6bb | |||
7ca6ccfaf3 | |||
f804299180 | |||
d23efdf00b | |||
453bc9d601 | |||
421271e2c3 | |||
6c312a1aae | |||
9dacdb987a | |||
18dd930579 | |||
fc2fadedd3 | |||
186f24e486 | |||
76449d814f | |||
af77322755 | |||
565522535f | |||
1057b54b3d | |||
c49b6c9088 | |||
c18cc73496 | |||
ab58837b5d | |||
fcea58148e | |||
f153263bc4 | |||
79dbea50c2 | |||
f89d5f5d05 | |||
d71334cda7 | |||
13c2c64583 | |||
a84a464901 | |||
1fb9e5853c | |||
4a75e8e814 | |||
9a77beb582 | |||
b7683f6805 | |||
dde799ff8e | |||
675cb64f47 | |||
2d7d48846d | |||
0fa79abdfd | |||
7b8d3796bd | |||
14df3cac03 | |||
4355a79ec0 | |||
cb161b4f91 | |||
fb0d4e9aee | |||
2a58c01319 | |||
1468c3adc4 | |||
bd488d7b47 | |||
cd00c0fedc | |||
82a2b5eab9 | |||
f540ebcb4d | |||
10f4294328 | |||
4b93a258a6 | |||
c040f97680 | |||
6bd9449baf | |||
a67b21bf41 | |||
43fb2c3917 | |||
96ab254812 | |||
0c1b978264 | |||
5798190254 | |||
dc92eb6738 | |||
390f972b0e | |||
a47c1f1bfb | |||
a3d898aa50 | |||
7aae56415d | |||
b399bd62ce |
171
.gitignore
vendored
171
.gitignore
vendored
@ -1,169 +1,2 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# nixos-shell
|
||||
nixos.qcow2
|
||||
|
||||
# nix-build
|
||||
result
|
||||
|
||||
# test sources
|
||||
tests/**/*.item
|
||||
.direnv
|
||||
tmp/
|
||||
|
674
LICENSE
674
LICENSE
@ -1,674 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
10
Makefile
Normal file
10
Makefile
Normal file
@ -0,0 +1,10 @@
|
||||
.PHONY: help serve test-data
|
||||
|
||||
help: ## display this help
|
||||
@awk 'BEGIN{FS = ":.*##"; printf "\033[1m\nUsage\n \033[1;92m make\033[0;36m <target>\033[0m\n"} /^[a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } ' $(MAKEFILE_LIST)
|
||||
|
||||
serve: ## Run "intake serve" with live reload
|
||||
@air -build.cmd "go build -o tmp/intake" -build.bin tmp/intake -build.args_bin serve,--data-dir,tmp
|
||||
|
||||
test-data: ## Recreate test data in tmp/
|
||||
@test/test_items.sh
|
191
README.md
191
README.md
@ -1,89 +1,140 @@
|
||||
# intake
|
||||
|
||||
Intake is an arbitrary feed aggregator that generalizes the concept of a feed. Rather than being restricted to parsing items out of an RSS feed, Intake provides a middle layer of executing arbitrary programs that conform to a JSON-based specification. An Intake source can parse an RSS feed, but it can also scrape a website without a feed, provide additional logic to filter or annotate feed items, or integrate with an API.
|
||||
Intake is an arbitrary feed aggregator that generalizes the concept of a feed.
|
||||
Rather than being restricted to parsing items out of an RSS feed, Intake provides a middle layer of executing arbitrary commands that conform to a JSON-based specification.
|
||||
An Intake source can parse an RSS feed, but it can also scrape a website without a feed, provide additional logic to filter or annotate feed items, or integrate with an API.
|
||||
|
||||
A basic demonstration in a VM can be run with `nixos-shell` using the `#demo` flake attribute.
|
||||
## Development
|
||||
|
||||
## Feed source definitions
|
||||
Parity with existing Python version
|
||||
|
||||
The base Intake directory is `$XDG_DATA_HOME/intake`. Each feed source's data is contained within a subdirectory of the base directory. The name of the feed source is the name of the subdirectory.
|
||||
* [x] create sources
|
||||
* [ ] rename sources
|
||||
* fetch sources
|
||||
* [x] create and delete items
|
||||
* [x] update existing items
|
||||
* [ ] support item TTL and TTD
|
||||
* [x] on_create triggers
|
||||
* [ ] on_delete triggers
|
||||
* [x] dry-run
|
||||
* item actions
|
||||
* [x] create
|
||||
* [x] edit
|
||||
* [ ] rename
|
||||
* [x] delete
|
||||
* [x] execute
|
||||
* [x] require items to declare action support
|
||||
* [ ] state files
|
||||
* [ ] source environment
|
||||
* [ ] working directory set
|
||||
* [ ] update web UI credentials
|
||||
* [ ] automatic crontab integration
|
||||
* [ ] feed supports item TTS
|
||||
* [x] data directory from envvars
|
||||
* [ ] source-level tt{s,d,l}
|
||||
* [ ] source batching
|
||||
* channels
|
||||
* [ ] create
|
||||
* [ ] edit
|
||||
* [ ] rename
|
||||
* [ ] delete
|
||||
* feeds
|
||||
* [x] show items
|
||||
* [x] deactivate items
|
||||
* [x] mass deactivate
|
||||
* [ ] punt
|
||||
* [x] trigger actions
|
||||
* [x] add ad-hoc items
|
||||
* [ ] show/hide deactivated items
|
||||
* [ ] show/hide tts items
|
||||
* [x] sort by time ?? created
|
||||
* [ ] paging
|
||||
* [ ] NixOS module
|
||||
* [ ] NixOS module demo
|
||||
|
||||
Feed source directories have the following structure:
|
||||
Additional features
|
||||
|
||||
```
|
||||
intake
|
||||
|- <source name>
|
||||
| |- intake.json
|
||||
| |- state
|
||||
| |- <item id>.item
|
||||
| |- <item id>.item
|
||||
| |- ...
|
||||
|- <source name>
|
||||
| | ...
|
||||
| ...
|
||||
```
|
||||
* [ ] metric reporting
|
||||
* [ ] on action failure, create an error item with logs
|
||||
* [ ] first-party password handling instead of basic auth and htpasswd
|
||||
* [ ] items gracefully add new fields and `action` keys
|
||||
* [ ] arbitrary date punt
|
||||
* [ ] HTTP edit item
|
||||
* [ ] sort crontab entries
|
||||
* [ ] TUI feed view
|
||||
|
||||
`intake.json` must be present; the other files are optional. Each `.item` file contains the data for one feed item. `state` provides a file for the feed source to write arbitrary data, e.g. JSON or binary data.
|
||||
## Overview
|
||||
|
||||
`intake.json` has the following structure:
|
||||
In Intake, a _source_ represents a single content feed of discrete _items_, such as a blog and its posts or a website and its pages.
|
||||
Each source has associated _actions_, which are executable commands.
|
||||
The `fetch` action checks the feed and returns the items in a JSON format.
|
||||
Each item returned by a fetch is stored by Intake and appears in that feed's source.
|
||||
When you have read an item, you can deactivate it, which hides it from your feed.
|
||||
When a deactivated item is no longer returned by `fetch`, it is deleted.
|
||||
This allows you to consume feed content at your own pace without missing anything.
|
||||
|
||||
```json
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "<absolute path to program or name on intake's PATH>",
|
||||
"args": ["list", "of", "program", "arguments"]
|
||||
},
|
||||
"<action name>": {
|
||||
"exe": "...",
|
||||
"args": "..."
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"...": "..."
|
||||
},
|
||||
"cron": "* * * * *"
|
||||
}
|
||||
```
|
||||
Intake stores all its data in a SQLite database.
|
||||
This database is stored in `$INTAKE_DATA_DIR`, `$XDG_DATA_HOME/intake`, or `$HOME/.local/share/intake`, whichever is resolved first.
|
||||
The database can also be specified on the command line via `--data-dir`/`-d` instead of the environment.
|
||||
|
||||
Each key under `action` defines an action that can be taken for the source. An action must contain `exe` and may contain `args`. A source must have a `fetch` action.
|
||||
### Items
|
||||
|
||||
Each key under `env` defines an environment variable that will be set when actions are executed.
|
||||
|
||||
If `cron` is present, it must define a crontab schedule. Intake will automatically create crontab entries to update each source according to its cron schedule.
|
||||
|
||||
## Interface for source programs
|
||||
|
||||
Intake interacts with sources by executing the actions defined in the source's `intake.json`. The `fetch` action is required and used to check for new feed items when `intake update` is executed.
|
||||
|
||||
To execute an action, intake executes the `exe` program for the action with the corresponding `args` (if present) as arguments. The process's working directory is set to the source's folder, i.e. the folder containing `intake.json`. The process's environment is as follows:
|
||||
|
||||
* intake's environment is inherited.
|
||||
* `STATE_PATH` is set to the absolute path of `state`.
|
||||
* Each key in `env` in `config.json` is passed with its value.
|
||||
|
||||
Anything written to `stderr` by the process will be captured and logged by Intake.
|
||||
|
||||
The `fetch` action is used to fetch the current state of the feed source. It receives no input and should write feed items to `stdout` as JSON objects, each on one line. All other actions are taken in the context of a single item. These actions receive the item as a JSON object on the first line of `stdin`. The process should write the item back to `stdout` with any changes as a result of the action.
|
||||
|
||||
An item must have a key under `action` with that action's name to support executing that action for that item. The value under that key may be any JSON structure used to manage the item-specific state.
|
||||
|
||||
All input and output is treated as UTF-8. If an item cannot be parsed or the exit code of the process is nonzero, Intake will consider the action to be a failure. No items or other feed changes will happen as a result of a failed action, except for changes to `state` done by the action process.
|
||||
|
||||
## Top-level item fields
|
||||
Items are passed between Intake and sources as JSON objects.
|
||||
Only the `id` field is required.
|
||||
Any unspecified field is equivalent to the empty string, object, or 0, depending on field's type.
|
||||
|
||||
| Field name | Specification | Description |
|
||||
| ---------- | ------------- | ----------- |
|
||||
| `id` | **Required** | A unique identifier within the scope of the feed source. |
|
||||
| `created` | **Automatic** | The Unix timestamp at which intake first processed the item. |
|
||||
| `active` | **Automatic** | Whether the item is active. Inactive items are not displayed in channels. |
|
||||
| `id` | **Required** | A unique identifier within the source.
|
||||
| `source` | **Automatic** | The source that produced the item.
|
||||
| `created` | **Automatic** | The Unix timestamp at which Intake first processed the item.
|
||||
| `active` | **Automatic** | Whether the item is active and displayed in feeds.
|
||||
| `title` | Optional | The title of the item. If an item has no title, `id` is used as a fallback title.
|
||||
| `author` | Optional | An author name associated with the item. Displayed in the item footer.
|
||||
| `body` | Optional | Body text of the item as raw HTML. This will be displayed in the item without further processing! Consider your sources' threat models against injection attacks.
|
||||
| `link` | Optional | A hyperlink associated with the item.
|
||||
| `time` | Optional | A time associated with the item, not necessarily when the item was created. Feeds sort by `time` when it is defined and fall back to `created`. Displayed in the item footer.
|
||||
| `tags` | Optional | A list of tags that describe the item. Tags help filter feeds that contain different kinds of content.
|
||||
| `tts` | Optional | The time-to-show of the item. An item with `tts` defined is hidden from channel feeds until the current time is after `created + tts`.
|
||||
| `ttl` | Optional | The time-to-live of the item. An item with `ttl` defined is not deleted by feed updates as long as `created + ttl` is in the future, even if it is inactive.
|
||||
| `ttd` | Optional | The time-to-die of the item. An item with `ttd` defined is deleted by feed updates if `created + ttd` is in the past, even if it is active.
|
||||
| `action` | Optional | An object with keys for all supported actions. The schema of the values depends on the source.
|
||||
| `time` | Optional | A Unix timestamp associated with the item, not necessarily when the item was created. Items sort by `time` when it is defined and fall back to `created`. Displayed in the item footer.
|
||||
| `action` | Optional | A JSON object with keys for all supported actions. No schema is imposed on the values.
|
||||
|
||||
Existing items are updated with new values when a fetch or action produces them, with some exceptions:
|
||||
|
||||
* Automatic fields cannot be changed.
|
||||
* If a field's previous value is non-empty and the new value is empty, the old value is kept.
|
||||
|
||||
### Sources
|
||||
|
||||
A source is identified by its name. A minimally functional source requires a `fetch` action that returns items.
|
||||
|
||||
### Action API
|
||||
|
||||
The Intake action API defines how programs should behave to be used with Intake sources.
|
||||
|
||||
To execute an action, Intake executes the command specified by that action's `argv`.
|
||||
The process's environment is as follows:
|
||||
|
||||
* `intake`'s environment is inherited.
|
||||
* `STATE_PATH` is set to the absolute path of a file containing the source's persistent state.
|
||||
|
||||
When an action receives an item as input, that item's JSON representation is written to that action's `stdin`.
|
||||
When an action outputs an item, it should write the item's JSON representation to `stdout` on one line.
|
||||
All input and output is assumed to be UTF-8.
|
||||
If an item cannot be parsed or the exit code of the process is nonzero, Intake will consider the action to be a failure.
|
||||
No items will be created or updated as a result of the failed action.
|
||||
Anything written to `stderr` by the action will be captured and logged by Intake.
|
||||
|
||||
The `fetch` action receives no input and outputs multiple items.
|
||||
This action is executed when a source is updated.
|
||||
The `fetch` action is the core of an Intake source.
|
||||
|
||||
All other actions take an item as input and should output the same item with any modifications made by the action.
|
||||
Actions can only be executed for an item if that item has a key with the same name in its `action` field.
|
||||
The value of that key may be any non-null JSON value used to pass state to the action.
|
||||
|
||||
The special action `on_create` is always run when an item is first returned by a fetch.
|
||||
The item does not need to declare support for `on_create`.
|
||||
This action is not accessible through the web interface, so if you need to retry the action, you should create another action with the same command as `on_create`.
|
||||
If an item's `on_create` fails, the item is still created, but without any changes made by action.
|
||||
|
||||
The special action `on_delete` is like `on_create`, except it runs right before an item is deleted.
|
||||
It does not require explicit support and is not accessible in the web interface.
|
||||
The output of `on_delete` is ignored; it is primarily for causing side effects like managing state.
|
||||
|
34
cmd/action.go
Normal file
34
cmd/action.go
Normal file
@ -0,0 +1,34 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var actionCmd = &cobra.Command{
|
||||
Use: "action",
|
||||
Short: "Manage and run source actions",
|
||||
Long: `Add, edit, delete, and run source actions on items.
|
||||
|
||||
A feed source is updated by the "fetch" action, which receives no input and
|
||||
returns one JSON item per line on stdout. Other source actions are run on a
|
||||
specific item, receiving that item on stdin and expecting that item, with any
|
||||
modifications made by the action, on stdout.
|
||||
|
||||
Items declare support for an action by having an "action" key containing an
|
||||
object with a key for every supported action. The value of that key may be
|
||||
any arbitrary JSON value. Use --force to execute an unsupported action anyway,
|
||||
though the action may fail if it operates on the item's action data.
|
||||
|
||||
The special action "on_create" is always run when an item is first returned
|
||||
by a fetch. The item does not need to declare support for "on_create". This
|
||||
action is not accessible through the web interface, so if you need to retry
|
||||
the action, you need another action with the same command as "on_create".
|
||||
If an item's "on_create" fails, the item is still created, but without any
|
||||
changes from the "on_create", if any.
|
||||
|
||||
To execute the "fetch" action, use "intake source fetch".`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(actionCmd)
|
||||
}
|
52
cmd/actionAdd.go
Normal file
52
cmd/actionAdd.go
Normal file
@ -0,0 +1,52 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var actionAddCmd = &cobra.Command{
|
||||
Use: "add [flags] -- argv...",
|
||||
Short: "Add an action to a source",
|
||||
Long: `Add an action to a source.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
actionAdd(getArgv(cmd, args))
|
||||
},
|
||||
}
|
||||
|
||||
var actionAddSource string
|
||||
var actionAddAction string
|
||||
|
||||
func init() {
|
||||
actionCmd.AddCommand(actionAddCmd)
|
||||
|
||||
actionAddCmd.Flags().StringVarP(&actionAddSource, "source", "s", "", "Source to add action")
|
||||
actionAddCmd.MarkFlagRequired("source")
|
||||
|
||||
actionAddCmd.Flags().StringVarP(&actionAddAction, "action", "a", "", "Action name")
|
||||
actionAddCmd.MarkFlagRequired("action")
|
||||
}
|
||||
|
||||
func actionAdd(argv []string) {
|
||||
if actionAddSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
if actionAddAction == "" {
|
||||
log.Fatal("error: --action is empty")
|
||||
}
|
||||
if len(argv) == 0 {
|
||||
log.Fatal("error: no argv provided")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
err := core.AddAction(db, actionAddSource, actionAddAction, argv)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to add action: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Added action %s to source %s", actionAddAction, actionAddSource)
|
||||
}
|
50
cmd/actionDelete.go
Normal file
50
cmd/actionDelete.go
Normal file
@ -0,0 +1,50 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var actionDeleteCmd = &cobra.Command{
|
||||
Use: "delete",
|
||||
Aliases: []string{"rm"},
|
||||
Short: "Delete an action from a source",
|
||||
Long: `Delete an action from a source.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
actionDelete()
|
||||
},
|
||||
}
|
||||
|
||||
var actionDeleteSource string
|
||||
var actionDeleteAction string
|
||||
|
||||
func init() {
|
||||
actionCmd.AddCommand(actionDeleteCmd)
|
||||
|
||||
actionDeleteCmd.Flags().StringVarP(&actionDeleteSource, "source", "s", "", "Source to add action")
|
||||
actionDeleteCmd.MarkFlagRequired("source")
|
||||
|
||||
actionDeleteCmd.Flags().StringVarP(&actionDeleteAction, "action", "a", "", "Action name")
|
||||
actionDeleteCmd.MarkFlagRequired("action")
|
||||
}
|
||||
|
||||
func actionDelete() {
|
||||
if actionDeleteSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
if actionDeleteAction == "" {
|
||||
log.Fatal("error: --action is empty")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
err := core.DeleteAction(db, actionDeleteSource, actionDeleteAction)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to delete action: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Deleted action %s from source %s", actionDeleteAction, actionDeleteSource)
|
||||
}
|
52
cmd/actionEdit.go
Normal file
52
cmd/actionEdit.go
Normal file
@ -0,0 +1,52 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var actionEditCmd = &cobra.Command{
|
||||
Use: "edit",
|
||||
Short: "Edit an action on a source",
|
||||
Long: `Edit an action on a source.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
actionEdit(getArgv(cmd, args))
|
||||
},
|
||||
}
|
||||
|
||||
var actionEditSource string
|
||||
var actionEditAction string
|
||||
|
||||
func init() {
|
||||
actionCmd.AddCommand(actionEditCmd)
|
||||
|
||||
actionEditCmd.Flags().StringVarP(&actionEditSource, "source", "s", "", "Source to edit action")
|
||||
actionEditCmd.MarkFlagRequired("source")
|
||||
|
||||
actionEditCmd.Flags().StringVarP(&actionEditAction, "action", "a", "", "Action name")
|
||||
actionEditCmd.MarkFlagRequired("action")
|
||||
}
|
||||
|
||||
func actionEdit(argv []string) {
|
||||
if actionEditSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
if actionEditAction == "" {
|
||||
log.Fatal("error: --action is empty")
|
||||
}
|
||||
if len(argv) == 0 {
|
||||
log.Fatal("error: no argv provided")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
err := core.UpdateAction(db, actionEditSource, actionEditAction, argv)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to update action: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Updated action %s on source %s", actionEditAction, actionEditSource)
|
||||
}
|
138
cmd/actionExecute.go
Normal file
138
cmd/actionExecute.go
Normal file
@ -0,0 +1,138 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var actionExecuteCmd = &cobra.Command{
|
||||
Use: "execute",
|
||||
Aliases: []string{"exec"},
|
||||
Short: "Run a source action for an item",
|
||||
Long: fmt.Sprintf(`Execute a source action for an item.
|
||||
|
||||
The item must declare support for the action by having the action's name
|
||||
in its "action" field. Use --force to execute the action anyway.
|
||||
|
||||
The "fetch" action is special and does not execute for any specific item.
|
||||
Use "intake source fetch" to run the fetch action.
|
||||
|
||||
In a dry run, the item will be printed in the chosen format and not updated.
|
||||
|
||||
%s`, makeFormatHelpText()),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
actionExecute()
|
||||
},
|
||||
}
|
||||
|
||||
var actionExecuteSource string
|
||||
var actionExecuteAction string
|
||||
var actionExecuteItem string
|
||||
var actionExecuteFormat string
|
||||
var actionExecuteDryRun bool
|
||||
var actionExecuteDiff bool
|
||||
var actionExecuteForce bool
|
||||
|
||||
func init() {
|
||||
actionCmd.AddCommand(actionExecuteCmd)
|
||||
|
||||
actionExecuteCmd.PersistentFlags().StringVarP(&actionExecuteSource, "source", "s", "", "Source of the item")
|
||||
actionExecuteCmd.MarkFlagRequired("source")
|
||||
|
||||
actionExecuteCmd.PersistentFlags().StringVarP(&actionExecuteItem, "item", "i", "", "Item to run action on")
|
||||
actionExecuteCmd.MarkFlagRequired("item")
|
||||
|
||||
actionExecuteCmd.PersistentFlags().StringVarP(&actionExecuteAction, "action", "a", "", "Action to run")
|
||||
actionExecuteCmd.MarkFlagRequired("action")
|
||||
|
||||
actionExecuteCmd.Flags().StringVarP(&actionExecuteFormat, "format", "f", "headlines", "Feed format for returned items")
|
||||
actionExecuteCmd.Flags().BoolVar(&actionExecuteDryRun, "dry-run", false, "Instead of updating the item, print it")
|
||||
|
||||
actionExecuteCmd.Flags().BoolVar(&actionExecuteDiff, "diff", false, "Show which fields of the item changed")
|
||||
|
||||
actionExecuteCmd.Flags().BoolVar(&actionExecuteForce, "force", false, "Execute the action even if the item does not support it")
|
||||
}
|
||||
|
||||
func actionExecute() {
|
||||
formatter := formatAs(actionExecuteFormat)
|
||||
|
||||
if actionExecuteSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
if actionExecuteAction == "" {
|
||||
log.Fatal("error: --action is empty")
|
||||
}
|
||||
if actionExecuteItem == "" {
|
||||
log.Fatal("error: --item is empty")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
item, err := core.GetItem(db, actionExecuteSource, actionExecuteItem)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to get item: %v", err)
|
||||
}
|
||||
|
||||
if item.Action[actionExecuteAction] == nil {
|
||||
if actionExecuteForce {
|
||||
log.Printf("warning: force-executing %s on %s/%s", actionExecuteAction, actionExecuteSource, actionExecuteItem)
|
||||
} else {
|
||||
log.Fatalf("error: %s/%s does not support %s", actionExecuteSource, actionExecuteItem, actionExecuteAction)
|
||||
}
|
||||
}
|
||||
|
||||
argv, err := core.GetArgvForAction(db, actionExecuteSource, actionExecuteAction)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to get action: %v", err)
|
||||
}
|
||||
|
||||
itemJson, err := json.Marshal(item)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to serialize item: %v", err)
|
||||
}
|
||||
|
||||
res, err := core.Execute(actionExecuteSource, argv, nil, string(itemJson), time.Minute)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to execute action: %v", err)
|
||||
}
|
||||
if len(res) != 1 {
|
||||
log.Fatalf("error: expected action to produce exactly one item, got %d", len(res))
|
||||
}
|
||||
newItem := res[0]
|
||||
core.BackfillItem(&newItem, &item)
|
||||
|
||||
if actionExecuteDiff {
|
||||
if item.Title != newItem.Title {
|
||||
log.Printf("title: %s => %s", item.Title, newItem.Title)
|
||||
}
|
||||
if item.Author != newItem.Author {
|
||||
log.Printf("author: %s => %s", item.Author, newItem.Author)
|
||||
}
|
||||
if item.Body != newItem.Body {
|
||||
log.Printf("body: %s => %s", item.Body, newItem.Body)
|
||||
}
|
||||
if item.Link != newItem.Link {
|
||||
log.Printf("link: %s => %s", item.Link, newItem.Link)
|
||||
}
|
||||
if item.Time != newItem.Time {
|
||||
log.Printf("time: %d => %d", item.Time, newItem.Time)
|
||||
}
|
||||
if core.ItemsAreEqual(item, newItem) {
|
||||
log.Printf("no changes\n")
|
||||
}
|
||||
}
|
||||
|
||||
if actionExecuteDryRun {
|
||||
fmt.Println(formatter(res[0]))
|
||||
return
|
||||
}
|
||||
|
||||
if err = core.UpdateItems(db, []core.Item{newItem}); err != nil {
|
||||
log.Fatalf("error: failed to update item: %v", err)
|
||||
}
|
||||
}
|
66
cmd/actionList.go
Normal file
66
cmd/actionList.go
Normal file
@ -0,0 +1,66 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"slices"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var actionListCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Aliases: []string{"ls"},
|
||||
Short: "List actions on a source",
|
||||
Long: `List actions on a source.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
actionList()
|
||||
},
|
||||
}
|
||||
|
||||
var actionListSource string
|
||||
var actionListArgv bool
|
||||
|
||||
func init() {
|
||||
actionCmd.AddCommand(actionListCmd)
|
||||
|
||||
actionListCmd.Flags().StringVarP(&actionListSource, "source", "s", "", "Source to list actions")
|
||||
actionListCmd.MarkFlagRequired("source")
|
||||
|
||||
actionListCmd.Flags().BoolVarP(&actionListArgv, "argv", "a", false, "Include action command")
|
||||
}
|
||||
|
||||
func actionList() {
|
||||
if actionListSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
actions, err := core.GetActionsForSource(db, actionListSource)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
slices.SortFunc(actions, actionSort)
|
||||
|
||||
if actionListArgv {
|
||||
actionArgv := make(map[string][]string)
|
||||
for _, name := range actions {
|
||||
argv, err := core.GetArgvForAction(db, actionListSource, name)
|
||||
if err != nil {
|
||||
log.Fatalf("error: could not get argv for source %s action %s: %v", actionListSource, name, err)
|
||||
}
|
||||
actionArgv[name] = argv
|
||||
}
|
||||
for _, name := range actions {
|
||||
fmt.Printf("%s %v\n", name, actionArgv[name])
|
||||
}
|
||||
|
||||
} else {
|
||||
for _, action := range actions {
|
||||
fmt.Println(action)
|
||||
}
|
||||
}
|
||||
}
|
16
cmd/channel.go
Normal file
16
cmd/channel.go
Normal file
@ -0,0 +1,16 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var channelCmd = &cobra.Command{
|
||||
Use: "channel",
|
||||
Short: "Manage channels",
|
||||
Long: `
|
||||
`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(channelCmd)
|
||||
}
|
21
cmd/channelAdd.go
Normal file
21
cmd/channelAdd.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var channelAddCmd = &cobra.Command{
|
||||
Use: "add",
|
||||
Short: "Create a channel",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
channelCmd.AddCommand(channelAddCmd)
|
||||
}
|
21
cmd/channelDelete.go
Normal file
21
cmd/channelDelete.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var channelDeleteCmd = &cobra.Command{
|
||||
Use: "delete",
|
||||
Short: "Delete a channel",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
channelCmd.AddCommand(channelDeleteCmd)
|
||||
}
|
21
cmd/channelEdit.go
Normal file
21
cmd/channelEdit.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var channelEditCmd = &cobra.Command{
|
||||
Use: "edit",
|
||||
Short: "Edit a channel",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
channelCmd.AddCommand(channelEditCmd)
|
||||
}
|
70
cmd/feed.go
Normal file
70
cmd/feed.go
Normal file
@ -0,0 +1,70 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var feedCmd = &cobra.Command{
|
||||
Use: "feed",
|
||||
Short: "Display the item feed",
|
||||
Long: fmt.Sprintf(`Display the intake item feed in various formats.
|
||||
The default format is "headlines".
|
||||
|
||||
%s`, makeFormatHelpText()),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
feed()
|
||||
},
|
||||
}
|
||||
|
||||
var feedFormat string
|
||||
var feedSource string
|
||||
var feedChannel string
|
||||
var feedShowInactive bool
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(feedCmd)
|
||||
|
||||
feedCmd.Flags().StringVarP(&feedFormat, "format", "f", "headlines", "Feed format")
|
||||
feedCmd.Flags().StringVarP(&feedSource, "source", "s", "", "Limit to items from source")
|
||||
feedCmd.Flags().StringVarP(&feedChannel, "channel", "c", "", "Limit to items from channel")
|
||||
feedCmd.MarkFlagsMutuallyExclusive("source", "channel")
|
||||
feedCmd.Flags().BoolVar(&feedShowInactive, "all", false, "Show inactive items")
|
||||
}
|
||||
|
||||
func feed() {
|
||||
formatter := formatAs(feedFormat)
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
var items []core.Item
|
||||
var err error
|
||||
if feedSource != "" {
|
||||
if feedShowInactive {
|
||||
items, err = core.GetAllItemsForSource(db, feedSource)
|
||||
} else {
|
||||
items, err = core.GetActiveItemsForSource(db, feedSource)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to fetch items from %s:, %v", feedSource, err)
|
||||
}
|
||||
} else if feedChannel != "" {
|
||||
log.Fatal("error: unimplemented")
|
||||
} else {
|
||||
if feedShowInactive {
|
||||
items, err = core.GetAllItems(db)
|
||||
} else {
|
||||
items, err = core.GetAllActiveItems(db)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to fetch items: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, item := range items {
|
||||
fmt.Println(formatter(item))
|
||||
}
|
||||
}
|
16
cmd/item.go
Normal file
16
cmd/item.go
Normal file
@ -0,0 +1,16 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var itemCmd = &cobra.Command{
|
||||
Use: "item",
|
||||
Short: "Manage items",
|
||||
Long: `Add, edit, or deactivate items.
|
||||
`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(itemCmd)
|
||||
}
|
85
cmd/itemAdd.go
Normal file
85
cmd/itemAdd.go
Normal file
@ -0,0 +1,85 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var itemAddCmd = &cobra.Command{
|
||||
Use: "add",
|
||||
Short: "Add an item",
|
||||
Long: `Create an ad-hoc item in a source.
|
||||
|
||||
By default, the item is created in the "default" source, which is created
|
||||
if it doesn't exist, with a random id.`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
itemAdd()
|
||||
},
|
||||
}
|
||||
|
||||
var addItemSource string
|
||||
var addItemId string
|
||||
var addItemTitle string
|
||||
var addItemAuthor string
|
||||
var addItemBody string
|
||||
var addItemLink string
|
||||
var addItemTime int
|
||||
var addItemActions string
|
||||
|
||||
func init() {
|
||||
itemCmd.AddCommand(itemAddCmd)
|
||||
|
||||
itemAddCmd.Flags().StringVarP(&addItemSource, "source", "s", "", "Source in which to create the item (default: default)")
|
||||
itemAddCmd.Flags().StringVarP(&addItemId, "id", "i", "", "Item id (default: random hex)")
|
||||
itemAddCmd.Flags().StringVarP(&addItemTitle, "title", "t", "", "Item title")
|
||||
itemAddCmd.Flags().StringVarP(&addItemAuthor, "author", "a", "", "Item author")
|
||||
itemAddCmd.Flags().StringVarP(&addItemBody, "body", "b", "", "Item body")
|
||||
itemAddCmd.Flags().StringVarP(&addItemLink, "link", "l", "", "Item link")
|
||||
itemAddCmd.Flags().IntVarP(&addItemTime, "time", "m", 0, "Item time as a Unix timestamp")
|
||||
itemAddCmd.Flags().StringVarP(&addItemActions, "action", "x", "", "Item time as a Unix timestamp")
|
||||
}
|
||||
|
||||
func itemAdd() {
|
||||
// Default to "default" source
|
||||
if addItemSource == "" {
|
||||
addItemSource = "default"
|
||||
}
|
||||
// Default id to random hex string
|
||||
if addItemId == "" {
|
||||
bytes := make([]byte, 16)
|
||||
if _, err := rand.Read(bytes); err != nil {
|
||||
log.Fatalf("error: failed to generate id: %v", err)
|
||||
}
|
||||
addItemId = hex.EncodeToString(bytes)
|
||||
}
|
||||
|
||||
var actions core.Actions
|
||||
if addItemActions != "" {
|
||||
if err := json.Unmarshal([]byte(addItemActions), &actions); err != nil {
|
||||
log.Fatalf("error: could not parse actions: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
if err := core.AddItems(db, []core.Item{{
|
||||
Source: addItemSource,
|
||||
Id: addItemId,
|
||||
Title: addItemTitle,
|
||||
Author: addItemAuthor,
|
||||
Body: addItemBody,
|
||||
Link: addItemLink,
|
||||
Time: addItemTime,
|
||||
Action: actions,
|
||||
}}); err != nil {
|
||||
log.Fatalf("error: failed to add item: %s", err)
|
||||
}
|
||||
|
||||
log.Printf("Added %s/%s\n", addItemSource, addItemId)
|
||||
}
|
46
cmd/itemDeactivate.go
Normal file
46
cmd/itemDeactivate.go
Normal file
@ -0,0 +1,46 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var itemDeactivateCmd = &cobra.Command{
|
||||
Use: "deactivate",
|
||||
Aliases: []string{"deac"},
|
||||
Short: "Deactivate an item",
|
||||
Long: `Deactivate items, hiding them from feeds and marking them for deletion.
|
||||
|
||||
Deactivation is idempotent.`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
itemDeactivate()
|
||||
},
|
||||
}
|
||||
|
||||
var deacSource string
|
||||
var deacItem string
|
||||
|
||||
func init() {
|
||||
itemCmd.AddCommand(itemDeactivateCmd)
|
||||
|
||||
itemDeactivateCmd.Flags().StringVarP(&deacSource, "source", "s", "", "Source of the item")
|
||||
itemDeactivateCmd.MarkFlagRequired("source")
|
||||
itemDeactivateCmd.Flags().StringVarP(&deacItem, "item", "i", "", "Item id")
|
||||
itemDeactivateCmd.MarkFlagRequired("item")
|
||||
}
|
||||
|
||||
func itemDeactivate() {
|
||||
db := openAndMigrateDb()
|
||||
|
||||
active, err := core.DeactivateItem(db, deacSource, deacItem)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to deactivate item: %s", err)
|
||||
}
|
||||
if active {
|
||||
fmt.Printf("Deactivated %s/%s\n", deacSource, deacItem)
|
||||
}
|
||||
}
|
21
cmd/itemEdit.go
Normal file
21
cmd/itemEdit.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var itemEditCmd = &cobra.Command{
|
||||
Use: "edit",
|
||||
Short: "Edit an item",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
itemCmd.AddCommand(itemEditCmd)
|
||||
}
|
50
cmd/migrate.go
Normal file
50
cmd/migrate.go
Normal file
@ -0,0 +1,50 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var migrateCmd = &cobra.Command{
|
||||
Use: "migrate",
|
||||
Short: "Migrate an intake database to the latest version",
|
||||
Long: `Migrate an intake database to the latest version.
|
||||
|
||||
Note that the database will be created if it does not exist, even with --list.`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
migrate()
|
||||
},
|
||||
}
|
||||
|
||||
var migrateListOnly bool
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(migrateCmd)
|
||||
|
||||
migrateCmd.Flags().BoolVarP(&migrateListOnly, "list", "l", false, "Show the list of migrations")
|
||||
}
|
||||
|
||||
func migrate() {
|
||||
db := openDb()
|
||||
|
||||
core.InitDatabase(db)
|
||||
if migrateListOnly {
|
||||
pending, err := core.GetPendingMigrations(db)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
for name, complete := range pending {
|
||||
if complete {
|
||||
fmt.Printf("[x] %s\n", name)
|
||||
} else {
|
||||
fmt.Printf("[ ] %s\n", name)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
core.MigrateDatabase(db)
|
||||
}
|
||||
}
|
21
cmd/passwd.go
Normal file
21
cmd/passwd.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var passwdCmd = &cobra.Command{
|
||||
Use: "passwd",
|
||||
Short: "Set the password for the web interface",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(passwdCmd)
|
||||
}
|
109
cmd/root.go
Normal file
109
cmd/root.go
Normal file
@ -0,0 +1,109 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "intake",
|
||||
Short: "Universal and extensible feed aggregator",
|
||||
Long: `intake, the universal and extensible feed aggregator`,
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
err := rootCmd.Execute()
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
var dataPath string
|
||||
|
||||
func init() {
|
||||
// Disable the automatic help command
|
||||
rootCmd.SetHelpCommand(&cobra.Command{Hidden: true})
|
||||
|
||||
// All commands need to operate on a database
|
||||
rootCmd.PersistentFlags().StringVarP(&dataPath, "data-dir", "d", "", "Path to the intake data directory containing the database")
|
||||
}
|
||||
|
||||
//
|
||||
// Common logic shared by multiple commands
|
||||
//
|
||||
|
||||
func getDbPath() string {
|
||||
if dataPath != "" {
|
||||
return core.DatabasePath(dataPath)
|
||||
}
|
||||
if dataDir := core.ResolveDataDir(); dataDir != "" {
|
||||
return core.DatabasePath(dataDir)
|
||||
}
|
||||
fmt.Println("error: no database specified")
|
||||
fmt.Println("One of --data-dir, INTAKE_DATA_DIR, XDG_DATA_HOME, or HOME must be defined.")
|
||||
os.Exit(1)
|
||||
return ""
|
||||
}
|
||||
|
||||
// Attempt to open the specified database and exit with an error if it fails.
|
||||
func openDb() *core.DB {
|
||||
dbPath := getDbPath()
|
||||
db, err := core.OpenDb(dbPath)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to open %s", dbPath)
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
// Attempt to open and migrate the specified database and exit with an error if it fails.
|
||||
func openAndMigrateDb() *core.DB {
|
||||
db := openDb()
|
||||
if err := core.InitDatabase(db); err != nil {
|
||||
log.Fatalf("error: failed to init database: %v", err)
|
||||
}
|
||||
if err := core.MigrateDatabase(db); err != nil {
|
||||
log.Fatalf("error: failed to migrate database: %v", err)
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
func getArgv(cmd *cobra.Command, args []string) []string {
|
||||
lenAtDash := cmd.Flags().ArgsLenAtDash()
|
||||
if lenAtDash == -1 {
|
||||
return nil
|
||||
} else {
|
||||
return args[lenAtDash:]
|
||||
}
|
||||
}
|
||||
|
||||
// Sort "fetch" action ahead of other actions
|
||||
func actionSort(a string, b string) int {
|
||||
if a == "fetch" {
|
||||
return -1
|
||||
}
|
||||
if b == "fetch" {
|
||||
return 1
|
||||
}
|
||||
return strings.Compare(a, b)
|
||||
}
|
||||
|
||||
func makeFormatHelpText() string {
|
||||
text := "Available formats:\n"
|
||||
for format, desc := range core.AvailableFormats {
|
||||
text += fmt.Sprintf(" %-13s %s\n", format, desc)
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
func formatAs(format string) func(item core.Item) string {
|
||||
formatter, err := core.FormatAs(format)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
return formatter
|
||||
}
|
31
cmd/serve.go
Normal file
31
cmd/serve.go
Normal file
@ -0,0 +1,31 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/Jaculabilis/intake/web"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var serveCmd = &cobra.Command{
|
||||
Use: "serve",
|
||||
Short: "Serve the web interface",
|
||||
Long: `Serve the intake web interface.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
serve()
|
||||
},
|
||||
}
|
||||
|
||||
var serveAddr string
|
||||
var servePort string
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(serveCmd)
|
||||
|
||||
serveCmd.Flags().StringVarP(&serveAddr, "addr", "a", "localhost", "Address to bind to")
|
||||
serveCmd.Flags().StringVarP(&servePort, "port", "p", "8081", "Port to bind to")
|
||||
}
|
||||
|
||||
func serve() {
|
||||
db := openAndMigrateDb()
|
||||
web.RunServer(db, serveAddr, servePort)
|
||||
}
|
23
cmd/source.go
Normal file
23
cmd/source.go
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
Copyright © 2025 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceCmd = &cobra.Command{
|
||||
Use: "source",
|
||||
Short: "Manage sources",
|
||||
Long: `Manage sources.
|
||||
|
||||
A source represents a single content feed that generates discrete feed items.
|
||||
The command defined in the "fetch" action is used to check for new items to
|
||||
update the feed.
|
||||
`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(sourceCmd)
|
||||
}
|
41
cmd/sourceAdd.go
Normal file
41
cmd/sourceAdd.go
Normal file
@ -0,0 +1,41 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceAddCmd = &cobra.Command{
|
||||
Use: "add",
|
||||
Short: "Create a source",
|
||||
Long: `Create a source.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
sourceAdd()
|
||||
},
|
||||
}
|
||||
|
||||
var sourceAddSource string
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceAddCmd)
|
||||
|
||||
sourceAddCmd.Flags().StringVarP(&sourceAddSource, "source", "s", "", "Source name")
|
||||
sourceAddCmd.MarkFlagRequired("source")
|
||||
}
|
||||
|
||||
func sourceAdd() {
|
||||
if sourceAddSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
if err := core.AddSource(db, sourceAddSource); err != nil {
|
||||
log.Fatalf("error: failed to add source: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Added source %s", sourceAddSource)
|
||||
}
|
21
cmd/sourceDeactivate.go
Normal file
21
cmd/sourceDeactivate.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceDeactivateCmd = &cobra.Command{
|
||||
Use: "deactivate",
|
||||
Short: "Deactivate all items in a source",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceDeactivateCmd)
|
||||
}
|
41
cmd/sourceDelete.go
Normal file
41
cmd/sourceDelete.go
Normal file
@ -0,0 +1,41 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceDeleteCmd = &cobra.Command{
|
||||
Use: "delete",
|
||||
Aliases: []string{"rm"},
|
||||
Short: "Delete a source",
|
||||
Long: `Delete a source.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
sourceDelete()
|
||||
},
|
||||
}
|
||||
|
||||
var sourceDeleteSource string
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceDeleteCmd)
|
||||
|
||||
sourceDeleteCmd.Flags().StringVarP(&sourceDeleteSource, "source", "s", "", "Source to delete")
|
||||
}
|
||||
|
||||
func sourceDelete() {
|
||||
if sourceDeleteSource == "" {
|
||||
log.Fatal("error: --source is empty")
|
||||
}
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
if err := core.DeleteSource(db, sourceDeleteSource); err != nil {
|
||||
log.Fatalf("error: failed to delete source: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Deleted source %s", sourceDeleteSource)
|
||||
}
|
21
cmd/sourceEdit.go
Normal file
21
cmd/sourceEdit.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceEditCmd = &cobra.Command{
|
||||
Use: "edit",
|
||||
Short: "Edit a source",
|
||||
Long: `
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
log.Fatal("not implemented")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceEditCmd)
|
||||
}
|
72
cmd/sourceFetch.go
Normal file
72
cmd/sourceFetch.go
Normal file
@ -0,0 +1,72 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceFetchCmd = &cobra.Command{
|
||||
Use: "fetch",
|
||||
Short: "Fetch items for a source and update the feed",
|
||||
Long: fmt.Sprintf(`Fetch items from a feed source using the configured "fetch" action.
|
||||
Items returned by a successful fetch will be used to update the source.
|
||||
A fetch is successful if all items output by the fetch are parsed successfully
|
||||
and the exit code is 0. No changes will be made to the source if the fetch
|
||||
does not succeed.
|
||||
|
||||
In a dry run, the items will be printed according to the chosen format and
|
||||
the source will not be updated with the fetch result.
|
||||
|
||||
%s`, makeFormatHelpText()),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
sourceFetch()
|
||||
},
|
||||
}
|
||||
|
||||
var sourceFetchSource string
|
||||
var sourceFetchFormat string
|
||||
var sourceFetchDryRun bool
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceFetchCmd)
|
||||
|
||||
sourceFetchCmd.Flags().StringVarP(&sourceFetchSource, "source", "s", "", "Source name to fetch (required)")
|
||||
sourceFetchCmd.MarkFlagRequired("source")
|
||||
|
||||
sourceFetchCmd.Flags().StringVarP(&sourceFetchFormat, "format", "f", "headlines", "Feed format for returned items.")
|
||||
sourceFetchCmd.Flags().BoolVar(&sourceFetchDryRun, "dry-run", false, "Instead of updating the source, print the fetched items")
|
||||
}
|
||||
|
||||
func sourceFetch() {
|
||||
formatter := formatAs(sourceFetchFormat)
|
||||
|
||||
db := openAndMigrateDb()
|
||||
|
||||
argv, err := core.GetArgvForAction(db, sourceFetchSource, "fetch")
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to get fetch action: %v", err)
|
||||
}
|
||||
|
||||
items, err := core.Execute(sourceFetchSource, argv, nil, "", time.Minute)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to execute fetch: %v", err)
|
||||
}
|
||||
|
||||
if sourceFetchDryRun {
|
||||
log.Printf("Fetch returned %d items", len(items))
|
||||
for _, item := range items {
|
||||
fmt.Println(formatter(item))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
added, deleted, err := core.UpdateWithFetchedItems(db, sourceFetchSource, items)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to update: %v", err)
|
||||
}
|
||||
log.Printf("%s added %d items, updated %d items, and deleted %d items", sourceFetchSource, added, len(items)-added, deleted)
|
||||
}
|
58
cmd/sourceList.go
Normal file
58
cmd/sourceList.go
Normal file
@ -0,0 +1,58 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"slices"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceListCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Aliases: []string{"ls"},
|
||||
Short: "List sources",
|
||||
Long: `Print the list of sources.
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
sourceList()
|
||||
},
|
||||
}
|
||||
|
||||
var sourceListShowActions bool
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceListCmd)
|
||||
|
||||
sourceListCmd.Flags().BoolVarP(&sourceListShowActions, "actions", "a", false, "Include source actions")
|
||||
}
|
||||
|
||||
func sourceList() {
|
||||
db := openAndMigrateDb()
|
||||
|
||||
names, err := core.GetSources(db)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to get sources: %v", err)
|
||||
}
|
||||
slices.Sort(names)
|
||||
|
||||
if sourceListShowActions {
|
||||
sourceActions := make(map[string][]string)
|
||||
for _, name := range names {
|
||||
actions, err := core.GetActionsForSource(db, name)
|
||||
if err != nil {
|
||||
log.Fatalf("error: could not get actions for source %s: %v", name, err)
|
||||
}
|
||||
slices.SortFunc(actions, actionSort)
|
||||
sourceActions[name] = actions
|
||||
}
|
||||
for _, name := range names {
|
||||
fmt.Printf("%s %v\n", name, sourceActions[name])
|
||||
}
|
||||
} else {
|
||||
for _, name := range names {
|
||||
fmt.Println(name)
|
||||
}
|
||||
}
|
||||
}
|
49
cmd/sourceTest.go
Normal file
49
cmd/sourceTest.go
Normal file
@ -0,0 +1,49 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sourceTestCmd = &cobra.Command{
|
||||
Use: "test [flags] -- argv",
|
||||
Short: "Test a fetch action",
|
||||
Long: fmt.Sprintf(`Execute a command as if it were a feed source's fetch action.
|
||||
|
||||
%s`, makeFormatHelpText()),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
l := cmd.Flags().ArgsLenAtDash()
|
||||
if l == -1 {
|
||||
sourceTest(nil)
|
||||
} else {
|
||||
sourceTest(args[l:])
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var sourceTestEnv []string
|
||||
var sourceTestFormat string
|
||||
|
||||
func init() {
|
||||
sourceCmd.AddCommand(sourceTestCmd)
|
||||
|
||||
sourceTestCmd.Flags().StringArrayVarP(&sourceTestEnv, "env", "e", nil, "Environment variables to set, in the form KEY=VAL")
|
||||
sourceTestCmd.Flags().StringVarP(&sourceTestFormat, "format", "f", "headlines", "Feed format for returned items.")
|
||||
}
|
||||
|
||||
func sourceTest(cmd []string) {
|
||||
formatter := formatAs(sourceTestFormat)
|
||||
|
||||
items, err := core.Execute("", cmd, sourceTestEnv, "", time.Minute)
|
||||
log.Printf("Returned %d items", len(items))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
for _, item := range items {
|
||||
fmt.Println(formatter(item))
|
||||
}
|
||||
}
|
208
core/action.go
Normal file
208
core/action.go
Normal file
@ -0,0 +1,208 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Type alias for storing string array as jsonb
|
||||
type argList []string
|
||||
|
||||
func (a argList) Value() (driver.Value, error) {
|
||||
return json.Marshal(a)
|
||||
}
|
||||
|
||||
func (a *argList) Scan(value interface{}) error {
|
||||
return json.Unmarshal([]byte(value.(string)), a)
|
||||
}
|
||||
|
||||
func AddAction(db *DB, source string, name string, argv []string) error {
|
||||
_, err := db.Exec(`
|
||||
insert into actions (source, name, argv)
|
||||
values (?, ?, jsonb(?))
|
||||
`, source, name, argList(argv))
|
||||
return err
|
||||
}
|
||||
|
||||
func UpdateAction(db *DB, source string, name string, argv []string) error {
|
||||
_, err := db.Exec(`
|
||||
update actions
|
||||
set argv = jsonb(?)
|
||||
where source = ? and name = ?
|
||||
`, argList(argv), source, name)
|
||||
return err
|
||||
}
|
||||
|
||||
func GetActionsForSource(db *DB, source string) ([]string, error) {
|
||||
rows, err := db.Query(`
|
||||
select name
|
||||
from actions
|
||||
where source = ?
|
||||
`, source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var names []string
|
||||
for rows.Next() {
|
||||
var name string
|
||||
err = rows.Scan(&name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
names = append(names, name)
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func GetArgvForAction(db *DB, source string, name string) ([]string, error) {
|
||||
rows := db.QueryRow(`
|
||||
select json(argv)
|
||||
from actions
|
||||
where source = ? and name = ?
|
||||
`, source, name)
|
||||
var argv argList
|
||||
err := rows.Scan(&argv)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return argv, nil
|
||||
}
|
||||
|
||||
func DeleteAction(db *DB, source string, name string) error {
|
||||
_, err := db.Exec(`
|
||||
delete from actions
|
||||
where source = ? and name = ?
|
||||
`, source, name)
|
||||
return err
|
||||
}
|
||||
|
||||
func readStdout(stdout io.ReadCloser, source string, items chan Item, cparse chan bool) {
|
||||
var item Item
|
||||
parseError := false
|
||||
scanout := bufio.NewScanner(stdout)
|
||||
for scanout.Scan() {
|
||||
data := scanout.Bytes()
|
||||
err := json.Unmarshal(data, &item)
|
||||
if err != nil || item.Id == "" {
|
||||
log.Printf("[%s: stdout] %s\n", source, strings.TrimSpace(string(data)))
|
||||
parseError = true
|
||||
} else {
|
||||
item.Active = true // These fields aren't up to
|
||||
item.Created = 0 // the action to set and
|
||||
item.Source = source // shouldn't be overrideable
|
||||
log.Printf("[%s: item] %s\n", source, item.Id)
|
||||
items <- item
|
||||
}
|
||||
}
|
||||
// Only send the parsing result at the end, to block main until stdout is drained
|
||||
cparse <- parseError
|
||||
close(items)
|
||||
}
|
||||
|
||||
func readStderr(stderr io.ReadCloser, source string, done chan bool) {
|
||||
scanerr := bufio.NewScanner(stderr)
|
||||
for scanerr.Scan() {
|
||||
text := strings.TrimSpace(scanerr.Text())
|
||||
log.Printf("[%s: stderr] %s\n", source, text)
|
||||
}
|
||||
done <- true
|
||||
}
|
||||
|
||||
func writeStdin(stdin io.WriteCloser, text string) {
|
||||
defer stdin.Close()
|
||||
io.WriteString(stdin, text)
|
||||
}
|
||||
|
||||
func Execute(
|
||||
source string,
|
||||
argv []string,
|
||||
env []string,
|
||||
input string,
|
||||
timeout time.Duration,
|
||||
) ([]Item, error) {
|
||||
log.Printf("Executing %v", argv)
|
||||
|
||||
if len(argv) == 0 {
|
||||
return nil, errors.New("empty argv")
|
||||
}
|
||||
if source == "" {
|
||||
return nil, errors.New("empty source")
|
||||
}
|
||||
|
||||
env = append(env, "STATE_PATH=")
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), timeout)
|
||||
defer cancel()
|
||||
cmd := exec.CommandContext(ctx, argv[0], argv[1:]...)
|
||||
cmd.Env = append(os.Environ(), env...)
|
||||
cmd.WaitDelay = time.Second * 5
|
||||
|
||||
// Open pipes to the command
|
||||
stdin, err := cmd.StdinPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cout := make(chan Item)
|
||||
cparse := make(chan bool)
|
||||
cerr := make(chan bool)
|
||||
|
||||
// Sink routine for items produced
|
||||
var items []Item
|
||||
go func() {
|
||||
for item := range cout {
|
||||
items = append(items, item)
|
||||
}
|
||||
}()
|
||||
|
||||
// Routines handling the process i/o
|
||||
go writeStdin(stdin, input)
|
||||
go readStdout(stdout, source, cout, cparse)
|
||||
go readStderr(stderr, source, cerr)
|
||||
|
||||
// Kick off the command
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Block until std{out,err} close
|
||||
<-cerr
|
||||
parseError := <-cparse
|
||||
|
||||
err = cmd.Wait()
|
||||
if ctx.Err() == context.DeadlineExceeded {
|
||||
log.Printf("Timed out after %v\n", timeout)
|
||||
return nil, err
|
||||
} else if exiterr, ok := err.(*exec.ExitError); ok {
|
||||
log.Printf("error: %s failed with exit code %d\n", argv[0], exiterr.ExitCode())
|
||||
return nil, err
|
||||
} else if err != nil {
|
||||
log.Printf("error: %s failed with error: %s\n", argv[0], err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if parseError {
|
||||
log.Printf("error: could not parse item\n")
|
||||
return nil, errors.New("invalid JSON")
|
||||
}
|
||||
|
||||
return items, nil
|
||||
}
|
182
core/action_test.go
Normal file
182
core/action_test.go
Normal file
@ -0,0 +1,182 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestActionCreate(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
|
||||
if err := AddAction(db, "test", "hello", []string{"echo", "hello"}); err == nil {
|
||||
t.Fatal("Action created for nonexistent source")
|
||||
}
|
||||
|
||||
if err := AddSource(db, "test"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := AddAction(db, "test", "hello", []string{"echo", "hello"}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := AddAction(db, "test", "goodbye", []string{"exit", "1"}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := UpdateAction(db, "test", "goodbye", []string{"echo", "goodbye"}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
actions, err := GetActionsForSource(db, "test")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(actions) != 2 {
|
||||
t.Fatal("expected 2 actions")
|
||||
}
|
||||
found := make(map[string]bool)
|
||||
for _, action := range actions {
|
||||
found[action] = true
|
||||
}
|
||||
if !found["hello"] || !found["goodbye"] {
|
||||
t.Fatalf("missing hello and/or goodbye, got: %v", actions)
|
||||
}
|
||||
|
||||
argv, err := GetArgvForAction(db, "test", "goodbye")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(argv) != 2 || argv[0] != "echo" || argv[1] != "goodbye" {
|
||||
t.Fatalf("expected [echo goodbye], got: %v", argv)
|
||||
}
|
||||
|
||||
err = DeleteAction(db, "test", "hello")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecute(t *testing.T) {
|
||||
assertLen := func(items []Item, length int) {
|
||||
if len(items) != length {
|
||||
t.Fatalf("Expected %d items, got %d", length, len(items))
|
||||
}
|
||||
}
|
||||
assertNil := func(err error) {
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
assertNotNil := func(err error) {
|
||||
if err == nil {
|
||||
t.Fatal("expected err")
|
||||
}
|
||||
}
|
||||
execute := func(argv []string) ([]Item, error) {
|
||||
return Execute("_", argv, nil, "", time.Minute)
|
||||
}
|
||||
|
||||
res, err := execute([]string{"true"})
|
||||
assertNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
// Exit with error code
|
||||
res, err = execute([]string{"false"})
|
||||
assertNotNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
res, err = execute([]string{"sh", "-c", "exit 22"})
|
||||
assertNotNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
// Timeout
|
||||
res, err = Execute("_", []string{"sleep", "10"}, nil, "", time.Millisecond)
|
||||
assertNotNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
// Returning items
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "foo"}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Id != "foo" {
|
||||
t.Fatal("jq -cn test failed")
|
||||
}
|
||||
|
||||
// Read from stdin
|
||||
res, err = Execute("_", []string{"jq", "-cR", `{id: .}`}, nil, "bar", time.Minute)
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Id != "bar" {
|
||||
t.Fatal("jq -cR test failed")
|
||||
}
|
||||
|
||||
// Set env
|
||||
res, err = Execute("_", []string{"jq", "-cn", `{id: env.HELLO}`}, []string{"HELLO=baz"}, "", time.Minute)
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Id != "baz" {
|
||||
t.Fatal("jq -cn env test failed")
|
||||
}
|
||||
|
||||
// With logging on stderr
|
||||
res, err = execute([]string{"sh", "-c", `echo 1>&2 Hello; jq -cn '{id: "box"}'; echo 1>&2 World`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Id != "box" {
|
||||
t.Fatal("stderr test failed")
|
||||
}
|
||||
|
||||
// Unsupported item field is silently discarded
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "test", unknownField: "what is this"}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
|
||||
// Field with incorrect type fails
|
||||
res, err = execute([]string{"jq", "-cn", `{id: ["list"]}`})
|
||||
assertNotNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "test", time: "0"}`})
|
||||
assertNotNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
res, err = execute([]string{"jq", "-cn", `{id: null}`})
|
||||
assertNotNil(err)
|
||||
assertLen(res, 0)
|
||||
|
||||
// Items with duplicate ids is not a fetch error, but it will fail to update
|
||||
res, err = execute([]string{"jq", "-cn", `["a", "a"] | .[] | {id: .}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 2)
|
||||
|
||||
// Action keys are detected even with empty values
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "test", action: {"hello": null}}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Action["hello"] == nil {
|
||||
t.Fatal("missing hello action")
|
||||
}
|
||||
if res[0].Action["goodbye"] != nil {
|
||||
t.Fatal("nonexistent action should key to nil in Action")
|
||||
}
|
||||
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "test", action: {"hello": ""}}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Action["hello"] == nil {
|
||||
t.Fatal("missing hello action")
|
||||
}
|
||||
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "test", action: {"hello": []}}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Action["hello"] == nil {
|
||||
t.Fatal("missing hello action")
|
||||
}
|
||||
|
||||
res, err = execute([]string{"jq", "-cn", `{id: "test", action: {"hello": {}}}`})
|
||||
assertNil(err)
|
||||
assertLen(res, 1)
|
||||
if res[0].Action["hello"] == nil {
|
||||
t.Fatal("missing hello action")
|
||||
}
|
||||
}
|
21
core/data.go
Normal file
21
core/data.go
Normal file
@ -0,0 +1,21 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func ResolveDataDir() string {
|
||||
if intakeData := os.Getenv("INTAKE_DATA_DIR"); intakeData != "" {
|
||||
return intakeData
|
||||
} else if xdgData := os.Getenv("XDG_DATA_HOME"); xdgData != "" {
|
||||
return filepath.Join(xdgData, "intake")
|
||||
} else if home := os.Getenv("HOME"); home != "" {
|
||||
return filepath.Join(home, ".local", "share", "intake")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func DatabasePath(dataDir string) string {
|
||||
return filepath.Join(dataDir, "intake.db")
|
||||
}
|
89
core/db.go
Normal file
89
core/db.go
Normal file
@ -0,0 +1,89 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"runtime"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
type DB struct {
|
||||
ro *sql.DB
|
||||
rw *sql.DB
|
||||
}
|
||||
|
||||
func (db *DB) Query(query string, args ...any) (*sql.Rows, error) {
|
||||
return db.ro.Query(query, args...)
|
||||
}
|
||||
|
||||
func (db *DB) QueryRow(query string, args ...any) *sql.Row {
|
||||
return db.ro.QueryRow(query, args...)
|
||||
}
|
||||
|
||||
func (db *DB) Exec(query string, args ...any) (sql.Result, error) {
|
||||
return db.rw.Exec(query, args...)
|
||||
}
|
||||
|
||||
func (db *DB) Transact(transaction func(*sql.Tx) error) error {
|
||||
tx, err := db.rw.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
_, err = tx.Exec("rollback; begin immediate")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = transaction(tx); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func defaultPragma(db *sql.DB) (sql.Result, error) {
|
||||
return db.Exec(`
|
||||
pragma journal_mode = WAL;
|
||||
pragma busy_timeout = 5000;
|
||||
pragma synchronous = NORMAL;
|
||||
pragma cache_size = 1000000000;
|
||||
pragma foreign_keys = true;
|
||||
pragma temp_store = memory;
|
||||
pragma mmap_size = 3000000000;
|
||||
`)
|
||||
}
|
||||
|
||||
func OpenDb(dataSourceName string) (*DB, error) {
|
||||
ro, err := sql.Open("sqlite3", dataSourceName)
|
||||
if err != nil {
|
||||
defer ro.Close()
|
||||
return nil, err
|
||||
}
|
||||
ro.SetMaxOpenConns(max(4, runtime.NumCPU()))
|
||||
_, err = defaultPragma(ro)
|
||||
if err != nil {
|
||||
defer ro.Close()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rw, err := sql.Open("sqlite3", dataSourceName)
|
||||
if err != nil {
|
||||
defer ro.Close()
|
||||
defer rw.Close()
|
||||
return nil, err
|
||||
}
|
||||
rw.SetMaxOpenConns(1)
|
||||
_, err = defaultPragma(rw)
|
||||
if err != nil {
|
||||
defer ro.Close()
|
||||
defer rw.Close()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
wrapper := new(DB)
|
||||
wrapper.ro = ro
|
||||
wrapper.rw = rw
|
||||
return wrapper, nil
|
||||
}
|
116
core/db_test.go
Normal file
116
core/db_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"testing"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func TestDeleteSourceCascade(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
|
||||
if err := AddSource(db, "source1"); err != nil {
|
||||
t.Fatalf("failed to add source1: %v", err)
|
||||
}
|
||||
if err := AddSource(db, "source2"); err != nil {
|
||||
t.Fatalf("failed to add source2: %v", err)
|
||||
}
|
||||
if err := AddItems(db, []Item{
|
||||
{"source1", "item1", 0, true, "", "", "", "", 0, nil},
|
||||
{"source2", "item2", 0, true, "", "", "", "", 0, nil},
|
||||
}); err != nil {
|
||||
t.Fatalf("failed to add items: %v", err)
|
||||
}
|
||||
|
||||
items, err := GetAllActiveItems(db)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get active items: %v", err)
|
||||
}
|
||||
if len(items) != 2 {
|
||||
t.Fatal("Expected 2 items")
|
||||
}
|
||||
|
||||
if err := DeleteSource(db, "source1"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
items, err = GetAllActiveItems(db)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(items) != 1 {
|
||||
t.Fatalf("Expected only 1 item after source delete, got %d", len(items))
|
||||
}
|
||||
|
||||
err = AddItems(db, []Item{{"source1", "item3", 0, true, "", "", "", "", 0, nil}})
|
||||
if err == nil {
|
||||
t.Fatal("Unexpected success adding item for nonexistent source")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransaction(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
if _, err := db.Exec("create table planets (name text) strict"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// A transaction that should succeed
|
||||
err := db.Transact(func(tx *sql.Tx) error {
|
||||
if _, err := tx.Exec("insert into planets (name) values (?)", "mercury"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err := tx.Exec("insert into planets (name) values (?)", "venus"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Check both rows were inserted
|
||||
rows, err := db.Query("select name from planets")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
found := map[string]bool{}
|
||||
for rows.Next() {
|
||||
var name string
|
||||
if err = rows.Scan(&name); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
found[name] = true
|
||||
}
|
||||
if !found["mercury"] || !found["venus"] {
|
||||
t.Fatal("transaction failed to insert rows")
|
||||
}
|
||||
|
||||
// A transaction that should fail
|
||||
err = db.Transact(func(tx *sql.Tx) error {
|
||||
if _, err := tx.Exec("insert into planets (name) values (?)", "earth"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
_, err := tx.Exec("insert into planets (name) values (?, ?)", "moon", "surprise asteroid!")
|
||||
return err
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatal("expected error")
|
||||
}
|
||||
|
||||
// Check the third insert was rolled back by the error
|
||||
rows, err = db.Query("select name from planets")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
found = map[string]bool{}
|
||||
for rows.Next() {
|
||||
var name string
|
||||
if err = rows.Scan(&name); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
found[name] = true
|
||||
}
|
||||
if found["earth"] {
|
||||
t.Fatal("transaction failed to roll back insert")
|
||||
}
|
||||
}
|
80
core/item.go
Normal file
80
core/item.go
Normal file
@ -0,0 +1,80 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
)
|
||||
|
||||
type Actions map[string]json.RawMessage
|
||||
|
||||
func (a Actions) Value() (driver.Value, error) {
|
||||
return json.Marshal(a)
|
||||
}
|
||||
|
||||
func (a *Actions) Scan(value interface{}) error {
|
||||
return json.Unmarshal([]byte(value.(string)), a)
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
Source string `json:"source"`
|
||||
Id string `json:"id"`
|
||||
Created int `json:"created"`
|
||||
Active bool `json:"active"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
Body string `json:"body"`
|
||||
Link string `json:"link"`
|
||||
Time int `json:"time"`
|
||||
Action Actions `json:"action"`
|
||||
}
|
||||
|
||||
// Whether an item that no longer appears in a fetch can be deleted.
|
||||
func (item Item) Deletable() bool {
|
||||
return !item.Active
|
||||
}
|
||||
|
||||
func ItemsAreEqual(first Item, second Item) bool {
|
||||
// Hacky but easy to use
|
||||
return fmt.Sprintf("%#v", first) == fmt.Sprintf("%#v", second)
|
||||
}
|
||||
|
||||
func FormatAsHeadline(item Item) string {
|
||||
title := item.Title
|
||||
if title == "" {
|
||||
title = item.Id
|
||||
}
|
||||
return title
|
||||
}
|
||||
|
||||
func FormatAsJson(item Item) string {
|
||||
data, err := json.Marshal(item)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to serialize %s/%s: %v", item.Source, item.Id, err)
|
||||
}
|
||||
return string(data)
|
||||
}
|
||||
|
||||
func FormatAsShort(item Item) string {
|
||||
return fmt.Sprintf("%s/%s", item.Source, item.Id)
|
||||
}
|
||||
|
||||
func FormatAs(format string) (func(item Item) string, error) {
|
||||
switch format {
|
||||
case "headlines":
|
||||
return FormatAsHeadline, nil
|
||||
case "json":
|
||||
return FormatAsJson, nil
|
||||
case "short":
|
||||
return FormatAsShort, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid format '%s'", format)
|
||||
}
|
||||
}
|
||||
|
||||
var AvailableFormats = map[string]string{
|
||||
"headlines": "Only item titles",
|
||||
"json": "Full item JSON",
|
||||
"short": "Item source and id",
|
||||
}
|
51
core/item_test.go
Normal file
51
core/item_test.go
Normal file
@ -0,0 +1,51 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestItemFormatsExist(t *testing.T) {
|
||||
for name := range AvailableFormats {
|
||||
formatter, err := FormatAs(name)
|
||||
if err != nil {
|
||||
t.Fatalf("error getting formatter for available format %s: %v", name, err)
|
||||
}
|
||||
if formatter == nil {
|
||||
t.Fatalf("formatter %s is nil", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestItemRoundTrip(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
if err := AddSource(db, "_"); err != nil {
|
||||
t.Fatalf("failed to create source: %v", err)
|
||||
}
|
||||
|
||||
item1 := Item{
|
||||
Source: "_",
|
||||
Id: "a",
|
||||
Created: 0,
|
||||
Active: true,
|
||||
Title: "title",
|
||||
Author: "author",
|
||||
Body: "body",
|
||||
Link: "link",
|
||||
Time: 123456,
|
||||
Action: map[string]json.RawMessage{
|
||||
"hello": json.RawMessage(`"world"`),
|
||||
},
|
||||
}
|
||||
if err := AddItems(db, []Item{item1}); err != nil {
|
||||
t.Fatalf("update failed: %v", err)
|
||||
}
|
||||
item2, err := GetItem(db, item1.Source, item1.Id)
|
||||
if err != nil {
|
||||
t.Fatalf("could not get item: %v", err)
|
||||
}
|
||||
item2.Created = 0 // automatically set by db
|
||||
if !ItemsAreEqual(item1, item2) {
|
||||
t.Fatalf("items are not equal, err %v", err)
|
||||
}
|
||||
}
|
101
core/migrations.go
Normal file
101
core/migrations.go
Normal file
@ -0,0 +1,101 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"log"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
//go:embed sql/*.sql
|
||||
var migrations embed.FS
|
||||
|
||||
// Idempotently initialize the database. Safe to call unconditionally.
|
||||
func InitDatabase(db *DB) error {
|
||||
rows, err := db.Query(`
|
||||
select exists (
|
||||
select 1
|
||||
from sqlite_master
|
||||
where type = 'table'
|
||||
and name = 'migrations'
|
||||
)
|
||||
`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var exists bool
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&exists)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
err = ApplyMigration(db, "0000_baseline.sql")
|
||||
return err
|
||||
}
|
||||
|
||||
// Get a map of migration names to whether the migration has been applied.
|
||||
func GetPendingMigrations(db *DB) (map[string]bool, error) {
|
||||
allMigrations, err := migrations.ReadDir("sql")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
complete := map[string]bool{}
|
||||
for _, mig := range allMigrations {
|
||||
complete[mig.Name()] = false
|
||||
}
|
||||
|
||||
rows, err := db.Query("select name from migrations")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for rows.Next() {
|
||||
var name string
|
||||
err = rows.Scan(&name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
complete[name] = true
|
||||
}
|
||||
|
||||
return complete, nil
|
||||
}
|
||||
|
||||
// Apply a migration by name.
|
||||
func ApplyMigration(db *DB, name string) error {
|
||||
data, err := migrations.ReadFile("sql/" + name)
|
||||
if err != nil {
|
||||
log.Fatalf("Missing migration %s", name)
|
||||
}
|
||||
log.Printf("Applying migration %s", name)
|
||||
_, err = db.Exec(string(data))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = db.Exec("insert into migrations (name) values (?)", name)
|
||||
return err
|
||||
}
|
||||
|
||||
// Apply all pending migrations.
|
||||
func MigrateDatabase(db *DB) error {
|
||||
pending, err := GetPendingMigrations(db)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for name, complete := range pending {
|
||||
if !complete {
|
||||
err = ApplyMigration(db, name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
67
core/migrations_test.go
Normal file
67
core/migrations_test.go
Normal file
@ -0,0 +1,67 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"testing"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func EphemeralDb(t *testing.T) *DB {
|
||||
// We don't use OpenDb here because you can't open two connections to the same memory mem
|
||||
mem, err := sql.Open("sqlite3", ":memory:")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err = defaultPragma(mem); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db := new(DB)
|
||||
db.ro = mem
|
||||
db.rw = mem
|
||||
if err = InitDatabase(db); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err = MigrateDatabase(db); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
func TestInitIdempotency(t *testing.T) {
|
||||
mem, err := sql.Open("sqlite3", ":memory:")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
db := new(DB)
|
||||
db.ro = mem
|
||||
db.rw = mem
|
||||
if err = InitDatabase(db); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err = InitDatabase(db); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMigrations(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
|
||||
allMigrations, err := migrations.ReadDir("sql")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
rows, err := db.Query("select name from migrations")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
count := 0
|
||||
for rows.Next() {
|
||||
count += 1
|
||||
}
|
||||
|
||||
if count != len(allMigrations) {
|
||||
t.Fatalf("Expected %d migrations, got %d", len(allMigrations), count)
|
||||
}
|
||||
}
|
332
core/source.go
Normal file
332
core/source.go
Normal file
@ -0,0 +1,332 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func AddSource(db *DB, name string) error {
|
||||
_, err := db.Exec(`
|
||||
insert into sources (name)
|
||||
values (?)
|
||||
`, name)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func GetSources(db *DB) ([]string, error) {
|
||||
rows, err := db.Query(`
|
||||
select name
|
||||
from sources
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var names []string
|
||||
for rows.Next() {
|
||||
var name string
|
||||
if err = rows.Scan(&name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
names = append(names, name)
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func DeleteSource(db *DB, name string) error {
|
||||
_, err := db.Exec(`
|
||||
delete from sources
|
||||
where name = ?
|
||||
`, name)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func AddItems(db *DB, items []Item) error {
|
||||
return db.Transact(func(tx *sql.Tx) error {
|
||||
stmt, err := tx.Prepare(`
|
||||
insert into items (source, id, active, title, author, body, link, time, action)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, jsonb(?))
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare insert: %v", err)
|
||||
}
|
||||
for _, item := range items {
|
||||
actions, err := json.Marshal(item.Action)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal actions for %s/%s: %v", item.Source, item.Id, err)
|
||||
}
|
||||
_, err = stmt.Exec(item.Source, item.Id, true, item.Title, item.Author, item.Body, item.Link, item.Time, actions)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to insert %s/%s: %v", item.Source, item.Id, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Set fields in the new item to match the old item where the new item's fields are zero-valued.
|
||||
// This allows sources to omit fields and let an action set them without a later fetch overwriting
|
||||
// the value from the action, e.g. an on-create action archiving a page and setting the link to
|
||||
// point to the archive.
|
||||
func BackfillItem(new *Item, old *Item) {
|
||||
new.Active = old.Active
|
||||
new.Created = old.Created
|
||||
if new.Author == "" {
|
||||
new.Author = old.Author
|
||||
}
|
||||
if new.Body == "" {
|
||||
new.Body = old.Body
|
||||
}
|
||||
if new.Link == "" {
|
||||
new.Link = old.Link
|
||||
}
|
||||
if new.Time == 0 {
|
||||
new.Time = old.Time
|
||||
}
|
||||
if new.Title == "" {
|
||||
new.Title = old.Title
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateItems(db *DB, items []Item) error {
|
||||
return db.Transact(func(tx *sql.Tx) error {
|
||||
stmt, err := tx.Prepare(`
|
||||
update items
|
||||
set
|
||||
title = ?,
|
||||
author = ?,
|
||||
body = ?,
|
||||
link = ?,
|
||||
time = ?,
|
||||
action = jsonb(?)
|
||||
where source = ?
|
||||
and id = ?
|
||||
`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, item := range items {
|
||||
actions, err := json.Marshal(item.Action)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal actions for %s/%s: %v", item.Source, item.Id, err)
|
||||
}
|
||||
_, err = stmt.Exec(item.Title, item.Author, item.Body, item.Link, item.Time, actions, item.Source, item.Id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Deactivate an item, returning its previous active state.
|
||||
func DeactivateItem(db *DB, source string, id string) (bool, error) {
|
||||
row := db.QueryRow(`
|
||||
select active
|
||||
from items
|
||||
where source = ? and id = ?
|
||||
`, source, id)
|
||||
var active bool
|
||||
err := row.Scan(&active)
|
||||
if err != nil && errors.Is(err, sql.ErrNoRows) {
|
||||
return false, fmt.Errorf("item %s/%s not found", source, id)
|
||||
}
|
||||
|
||||
_, err = db.Exec(`
|
||||
update items
|
||||
set active = 0
|
||||
where source = ? and id = ?
|
||||
`, source, id)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return active, nil
|
||||
}
|
||||
|
||||
func DeleteItem(db *DB, source string, id string) (int64, error) {
|
||||
res, err := db.Exec(`
|
||||
delete from items
|
||||
where source = ?
|
||||
and id = ?
|
||||
`, source, id)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return res.RowsAffected()
|
||||
}
|
||||
|
||||
func getItems(db *DB, query string, args ...any) ([]Item, error) {
|
||||
rows, err := db.Query(query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var items []Item
|
||||
for rows.Next() {
|
||||
var item Item
|
||||
err = rows.Scan(&item.Source, &item.Id, &item.Created, &item.Active, &item.Title, &item.Author, &item.Body, &item.Link, &item.Time, &item.Action)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, item)
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func GetItem(db *DB, source string, id string) (Item, error) {
|
||||
items, err := getItems(db, `
|
||||
select source, id, created, active, title, author, body, link, time, json(action)
|
||||
from items
|
||||
where source = ?
|
||||
and id = ?
|
||||
order by case when time = 0 then created else time end, id
|
||||
`, source, id)
|
||||
if err != nil {
|
||||
return Item{}, err
|
||||
}
|
||||
if len(items) == 0 {
|
||||
return Item{}, fmt.Errorf("no item in %s with id %s", source, id)
|
||||
}
|
||||
return items[0], nil
|
||||
}
|
||||
|
||||
func GetAllActiveItems(db *DB) ([]Item, error) {
|
||||
return getItems(db, `
|
||||
select
|
||||
source, id, created, active, title, author, body, link, time, json(action)
|
||||
from items
|
||||
where active <> 0
|
||||
order by case when time = 0 then created else time end, id
|
||||
`)
|
||||
}
|
||||
|
||||
func GetAllItems(db *DB) ([]Item, error) {
|
||||
return getItems(db, `
|
||||
select
|
||||
source, id, created, active, title, author, body, link, time, json(action)
|
||||
from items
|
||||
order by case when time = 0 then created else time end, id
|
||||
`)
|
||||
}
|
||||
|
||||
func GetActiveItemsForSource(db *DB, source string) ([]Item, error) {
|
||||
return getItems(db, `
|
||||
select
|
||||
source, id, created, active, title, author, body, link, time, json(action)
|
||||
from items
|
||||
where
|
||||
source = ?
|
||||
and active <> 0
|
||||
order by case when time = 0 then created else time end, id
|
||||
`, source)
|
||||
}
|
||||
|
||||
func GetAllItemsForSource(db *DB, source string) ([]Item, error) {
|
||||
return getItems(db, `
|
||||
select
|
||||
source, id, created, active, title, author, body, link, time, json(action)
|
||||
from items
|
||||
where
|
||||
source = ?
|
||||
order by case when time = 0 then created else time end, id
|
||||
`, source)
|
||||
}
|
||||
|
||||
// Given the results of a fetch, add new items, update existing items, and delete expired items.
|
||||
//
|
||||
// Returns the number of new and deleted items on success.
|
||||
func UpdateWithFetchedItems(db *DB, source string, items []Item) (int, int, error) {
|
||||
// Get the existing items
|
||||
existingItems, err := GetAllItemsForSource(db, source)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
existingIds := map[string]bool{}
|
||||
existingItemsById := map[string]*Item{}
|
||||
for _, item := range existingItems {
|
||||
existingIds[item.Id] = true
|
||||
existingItemsById[item.Id] = &item
|
||||
}
|
||||
|
||||
// Split the fetch into adds and updates
|
||||
var newItems []Item
|
||||
var updatedItems []Item
|
||||
for _, item := range items {
|
||||
if existingIds[item.Id] {
|
||||
updatedItems = append(updatedItems, item)
|
||||
} else {
|
||||
newItems = append(newItems, item)
|
||||
}
|
||||
}
|
||||
|
||||
// Bulk insert the new items
|
||||
if err = AddItems(db, newItems); err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// Bulk update the existing items
|
||||
for _, item := range updatedItems {
|
||||
BackfillItem(&item, existingItemsById[item.Id])
|
||||
}
|
||||
if err = UpdateItems(db, updatedItems); err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// If the source has an on-create trigger, run it for each new item
|
||||
// On-create errors are ignored to avoid failing the fetch
|
||||
onCreateArgv, err := GetArgvForAction(db, source, "on_create")
|
||||
if err == nil {
|
||||
var updatedNewItems []Item
|
||||
for _, item := range newItems {
|
||||
itemJson, err := json.Marshal(item)
|
||||
if err != nil {
|
||||
log.Fatalf("error: failed to serialize item: %v", err)
|
||||
}
|
||||
res, err := Execute(source, onCreateArgv, nil, string(itemJson), time.Minute)
|
||||
if err != nil {
|
||||
log.Printf("error: failed to execute on_create for %s/%s: %v", item.Source, item.Id, err)
|
||||
continue
|
||||
}
|
||||
if len(res) != 1 {
|
||||
log.Printf("error: expected on_create for %s/%s to produce exactly one item, got %d", item.Source, item.Id, len(res))
|
||||
}
|
||||
updatedItem := res[0]
|
||||
BackfillItem(&updatedItem, &item)
|
||||
updatedNewItems = append(updatedNewItems, updatedItem)
|
||||
}
|
||||
UpdateItems(db, updatedNewItems)
|
||||
}
|
||||
|
||||
// Get the list of expired items
|
||||
fetchedIds := map[string]bool{}
|
||||
for _, item := range items {
|
||||
fetchedIds[item.Id] = true
|
||||
}
|
||||
expiredIds := map[string]bool{}
|
||||
for id := range existingIds {
|
||||
expiredIds[id] = !fetchedIds[id]
|
||||
}
|
||||
|
||||
// Check expired items for deletion
|
||||
idsToDelete := map[string]bool{}
|
||||
for _, item := range existingItems {
|
||||
if expiredIds[item.Id] && item.Deletable() {
|
||||
idsToDelete[item.Id] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Delete each item to be deleted
|
||||
for id := range idsToDelete {
|
||||
if _, err = DeleteItem(db, source, id); err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
}
|
||||
|
||||
return len(newItems), len(idsToDelete), nil
|
||||
}
|
293
core/source_test.go
Normal file
293
core/source_test.go
Normal file
@ -0,0 +1,293 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func TestCreateSource(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
|
||||
if err := AddSource(db, "one"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := AddSource(db, "two"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := AddSource(db, "three"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := DeleteSource(db, "two"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
names, err := GetSources(db)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
expected := []string{"one", "three"}
|
||||
for i := 0; i < len(expected); i += 1 {
|
||||
if !slices.Contains(names, expected[i]) {
|
||||
t.Fatalf("missing %s, have: %v", expected[i], names)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func AssertItemIs(t *testing.T, item Item, expected string) {
|
||||
actual := fmt.Sprintf(
|
||||
"%s/%s/%t/%s/%s/%s/%s/%d",
|
||||
item.Source,
|
||||
item.Id,
|
||||
item.Active,
|
||||
item.Title,
|
||||
item.Author,
|
||||
item.Body,
|
||||
item.Link,
|
||||
item.Time,
|
||||
)
|
||||
if actual != expected {
|
||||
t.Fatalf("expected %s, got %s", expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddItem(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
if err := AddSource(db, "test"); err != nil {
|
||||
t.Fatalf("failed to add source: %v", err)
|
||||
}
|
||||
|
||||
if err := AddItems(db, []Item{
|
||||
{"test", "one", 0, true, "", "", "", "", 0, nil},
|
||||
{"test", "two", 0, true, "title", "author", "body", "link", 123456, nil},
|
||||
}); err != nil {
|
||||
t.Fatalf("failed to add items: %v", err)
|
||||
}
|
||||
items, err := GetActiveItemsForSource(db, "test")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get active items: %v", err)
|
||||
}
|
||||
if len(items) != 2 {
|
||||
t.Fatal("should get two items")
|
||||
}
|
||||
// order is by (time ?? created) so this ordering is correct as long as you don't run it in early 1970
|
||||
AssertItemIs(t, items[0], "test/two/true/title/author/body/link/123456")
|
||||
AssertItemIs(t, items[1], "test/one/true/////0")
|
||||
|
||||
if _, err = DeactivateItem(db, "test", "one"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
items, err = GetActiveItemsForSource(db, "test")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(items) != 1 {
|
||||
t.Fatal("should get one item")
|
||||
}
|
||||
|
||||
items, err = GetAllItemsForSource(db, "test")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(items) != 2 {
|
||||
t.Fatal("should get two items")
|
||||
}
|
||||
|
||||
deleted, err := DeleteItem(db, "test", "one")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if deleted != 1 {
|
||||
t.Fatal("expected one deletion")
|
||||
}
|
||||
|
||||
deleted, err = DeleteItem(db, "test", "one")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if deleted != 0 {
|
||||
t.Fatal("expected no deletion")
|
||||
}
|
||||
|
||||
items, err = GetAllItemsForSource(db, "test")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(items) != 1 {
|
||||
t.Fatal("should get one item")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateSourceAddAndDelete(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
if err := AddSource(db, "test"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
a := Item{Source: "test", Id: "a"}
|
||||
add, del, err := UpdateWithFetchedItems(db, "test", []Item{a})
|
||||
if add != 1 || del != 0 || err != nil {
|
||||
t.Fatalf("update failed: add %d, del %d, err %v", add, del, err)
|
||||
}
|
||||
|
||||
add, del, err = UpdateWithFetchedItems(db, "test", []Item{a})
|
||||
if add != 0 || del != 0 || err != nil {
|
||||
t.Fatalf("update failed: add %d, del %d, err %v", add, del, err)
|
||||
}
|
||||
|
||||
b := Item{Source: "test", Id: "b"}
|
||||
add, del, err = UpdateWithFetchedItems(db, "test", []Item{a, b})
|
||||
if add != 1 || del != 0 || err != nil {
|
||||
t.Fatalf("update failed: add %d, del %d, err %v", add, del, err)
|
||||
}
|
||||
|
||||
if _, err = DeactivateItem(db, "test", "a"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
add, del, err = UpdateWithFetchedItems(db, "test", []Item{a, b})
|
||||
if add != 0 || del != 0 || err != nil {
|
||||
t.Fatalf("update failed: add %d, del %d, err %v", add, del, err)
|
||||
}
|
||||
|
||||
add, del, err = UpdateWithFetchedItems(db, "test", []Item{b})
|
||||
if add != 0 || del != 1 || err != nil {
|
||||
t.Fatalf("update failed: add %d, del %d, err %v", add, del, err)
|
||||
}
|
||||
|
||||
add, del, err = UpdateWithFetchedItems(db, "test", []Item{b})
|
||||
if add != 0 || del != 0 || err != nil {
|
||||
t.Fatalf("update failed: add %d, del %d, err %v", add, del, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOnCreateAction(t *testing.T) {
|
||||
db := EphemeralDb(t)
|
||||
if err := AddSource(db, "test"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if err := AddAction(db, "test", "on_create", []string{"true"}); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
execute := func(argv []string) []Item {
|
||||
items, err := Execute("test", argv, nil, "", time.Minute)
|
||||
if err != nil {
|
||||
t.Fatal("unexpected error executing test fetch")
|
||||
}
|
||||
if len(items) != 1 {
|
||||
t.Fatalf("expected only one item, got %d", len(items))
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
onCreate := func(argv []string) {
|
||||
if err := UpdateAction(db, "test", "on_create", argv); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
getItem := func(id string) Item {
|
||||
item, err := GetItem(db, "test", id)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return item
|
||||
}
|
||||
|
||||
// Noop on_create works
|
||||
onCreate([]string{"tee"})
|
||||
items := execute([]string{"jq", "-cn", `{id: "one"}`})
|
||||
add, _, err := UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with noop oncreate")
|
||||
}
|
||||
updated := getItem("one")
|
||||
updated.Created = 0 // zero out for comparison with pre-insert item
|
||||
if !ItemsAreEqual(updated, items[0]) {
|
||||
t.Fatalf("expected no change: %#v != %#v", updated, items[0])
|
||||
}
|
||||
|
||||
// on_create can change a field
|
||||
onCreate([]string{"jq", "-c", `.title = "Goodbye, World"`})
|
||||
items = execute([]string{"jq", "-cn", `{id: "two", title: "Hello, World"}`})
|
||||
if items[0].Title != "Hello, World" {
|
||||
t.Fatal("unexpected title")
|
||||
}
|
||||
add, _, err = UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with alter oncreate")
|
||||
}
|
||||
two := getItem("two")
|
||||
if two.Title != "Goodbye, World" {
|
||||
t.Fatalf("title not updated, is: %s", two.Title)
|
||||
}
|
||||
|
||||
// on_create can add a field
|
||||
onCreate([]string{"jq", "-c", `.link = "gopher://go.dev"`})
|
||||
items = execute([]string{"jq", "-cn", `{id: "three"}`})
|
||||
if items[0].Link != "" {
|
||||
t.Fatal("unexpected link")
|
||||
}
|
||||
add, _, err = UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with augment oncreate")
|
||||
}
|
||||
if getItem("three").Link != "gopher://go.dev" {
|
||||
t.Fatal("link not added")
|
||||
}
|
||||
|
||||
// on_create can't delete a field using a zero value
|
||||
// due to zero values preserving prior field values
|
||||
onCreate([]string{"jq", "-c", `del(.link)`})
|
||||
items = execute([]string{"jq", "-cn", `{id: "four", link: "gopher://go.dev"}`})
|
||||
if items[0].Link != "gopher://go.dev" {
|
||||
t.Fatal("missing link")
|
||||
}
|
||||
add, _, err = UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with attempted deletion oncreate")
|
||||
}
|
||||
if getItem("four").Link != "gopher://go.dev" {
|
||||
t.Fatal("link unexpectedly removed")
|
||||
}
|
||||
|
||||
// item is created if on_create fails
|
||||
onCreate([]string{"false"})
|
||||
items = execute([]string{"jq", "-cn", `{id: "five"}`})
|
||||
add, _, err = UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with failing oncreate")
|
||||
}
|
||||
if getItem("five").Id != "five" {
|
||||
t.Fatal("item not created")
|
||||
}
|
||||
|
||||
// item isn't updated if on_create has valid output but a bad exit code
|
||||
onCreate([]string{"sh", "-c", `jq -cn '{id: "six", title: "after"}'; exit 1`})
|
||||
items = execute([]string{"jq", "-cn", `{id: "six", title: "before"}`})
|
||||
if items[0].Title != "before" {
|
||||
t.Fatal("unexpected title")
|
||||
}
|
||||
add, _, err = UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with bad exit code oncreate")
|
||||
}
|
||||
if getItem("six").Title != "before" {
|
||||
t.Fatal("update applied after oncreate failed")
|
||||
}
|
||||
|
||||
// on_create can't change id, active, or created
|
||||
onCreate([]string{"jq", "-c", `.id = "seven"; .active = false; .created = 123456`})
|
||||
items = execute([]string{"jq", "-cn", `{id: "seven"}`})
|
||||
add, _, err = UpdateWithFetchedItems(db, "test", items)
|
||||
if add != 1 || err != nil {
|
||||
t.Fatal("failed update with invalid field changes oncreate")
|
||||
}
|
||||
updated = getItem("seven")
|
||||
if updated.Id != "seven" || !updated.Active || updated.Created == 123456 {
|
||||
t.Fatal("unexpected changes to id, active, or created fields")
|
||||
}
|
||||
}
|
1
core/sql/0000_baseline.sql
Normal file
1
core/sql/0000_baseline.sql
Normal file
@ -0,0 +1 @@
|
||||
create table migrations (name text) strict;
|
25
core/sql/0001_initial_schema.sql
Normal file
25
core/sql/0001_initial_schema.sql
Normal file
@ -0,0 +1,25 @@
|
||||
create table sources(
|
||||
name text not null,
|
||||
primary key (name)
|
||||
) strict;
|
||||
create table actions(
|
||||
source text not null,
|
||||
name text not null,
|
||||
argv blob not null,
|
||||
primary key (source, name),
|
||||
foreign key (source) references sources (name) on delete cascade
|
||||
) strict;
|
||||
create table items(
|
||||
source text not null,
|
||||
id text not null,
|
||||
created int not null default (unixepoch()),
|
||||
active int not null,
|
||||
title text,
|
||||
author text,
|
||||
body text,
|
||||
link text,
|
||||
time int,
|
||||
action blob,
|
||||
primary key (source, id),
|
||||
foreign key (source) references sources (name) on delete cascade
|
||||
) strict;
|
10
default.nix
10
default.nix
@ -1,10 +0,0 @@
|
||||
(import
|
||||
(
|
||||
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
|
||||
fetchTarball {
|
||||
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
||||
sha256 = lock.nodes.flake-compat.locked.narHash;
|
||||
}
|
||||
)
|
||||
{ src = ./.; }
|
||||
).defaultNix
|
@ -1,9 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "currenttime.sh",
|
||||
"args": []
|
||||
}
|
||||
},
|
||||
"cron": "* * * * *"
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "echo.py",
|
||||
"args": []
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"MESSAGE": "Hello, Alice!"
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "echo.py",
|
||||
"args": []
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"MESSAGE": "Hello, Bob!"
|
||||
}
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
{ pkgs, ... }:
|
||||
|
||||
{
|
||||
system.stateVersion = "22.11";
|
||||
|
||||
# Set up two users to demonstrate the user separation
|
||||
users.users.alice = {
|
||||
isNormalUser = true;
|
||||
password = "alpha";
|
||||
uid = 1000;
|
||||
packages = [ pkgs.intake ];
|
||||
};
|
||||
|
||||
users.users.bob = {
|
||||
isNormalUser = true;
|
||||
password = "beta";
|
||||
uid = 1001;
|
||||
packages = [ pkgs.intake ];
|
||||
};
|
||||
|
||||
# Set up intake for both users with an entry point at port 8080
|
||||
services.intake = {
|
||||
listen.port = 8080;
|
||||
users.alice.enable = true;
|
||||
users.bob.enable = true;
|
||||
};
|
||||
|
||||
# Expose the vm's intake revproxy at host port 5234
|
||||
virtualisation.forwardPorts = [{
|
||||
from = "host";
|
||||
host.port = 5234;
|
||||
guest.port = 8080;
|
||||
}];
|
||||
|
||||
# Mount the demo content for both users
|
||||
nixos-shell.mounts = {
|
||||
mountHome = false;
|
||||
mountNixProfile = false;
|
||||
cache = "none";
|
||||
|
||||
extraMounts = {
|
||||
"/mnt/alice" = ./alice;
|
||||
"/mnt/bob" = ./bob;
|
||||
"/mnt/sources" = ./sources;
|
||||
};
|
||||
};
|
||||
|
||||
# Create an activation script that copies and chowns the demo content
|
||||
# chmod 777 because the users may not exist when the activation script runs
|
||||
system.activationScripts =
|
||||
let
|
||||
userSetup = name: uid: ''
|
||||
${pkgs.coreutils}/bin/mkdir -p /home/${name}/.local/share/intake
|
||||
${pkgs.coreutils}/bin/cp -r /mnt/${name}/* /home/${name}/.local/share/intake/
|
||||
${pkgs.coreutils}/bin/chown -R ${uid} /home/${name}
|
||||
${pkgs.findutils}/bin/find /home/${name} -type d -exec ${pkgs.coreutils}/bin/chmod 755 {} \;
|
||||
${pkgs.findutils}/bin/find /home/${name} -type f -exec ${pkgs.coreutils}/bin/chmod 644 {} \;
|
||||
'';
|
||||
in
|
||||
{
|
||||
aliceSetup = userSetup "alice" "1000";
|
||||
bobSetup = userSetup "bob" "1001";
|
||||
};
|
||||
|
||||
# Put the demo sources on the global PATH
|
||||
environment.variables.PATH = "/mnt/sources";
|
||||
|
||||
# Include some demo instructions
|
||||
environment.etc.issue.text = ''
|
||||
###
|
||||
# Welcome to the intake demo! Log in as `alice` with password `alpha` to begin.
|
||||
#
|
||||
# Exit the VM with ctrl+a x, or switch to the qemu console with ctrl+a c and `quit`.
|
||||
###
|
||||
|
||||
'';
|
||||
users.motd = ''
|
||||
|
||||
###
|
||||
# To set a password for the web interface, run `intake passwd` and set a password.
|
||||
#
|
||||
# Within this demo VM, the main intake entry point can be found at localhost:8080. This is also exposed on the host machine at localhost:5234. After you set a password, navigate to localhost:5234 on your host machine and log in to see the web interface.
|
||||
#
|
||||
# Try updating the `echo` source by running `intake update -s echo`. You should see a new item after refreshing the source's feed. This source uses `env` source configuration, so use `intake edit -s echo` or the web interface to change the message, then update the source again.
|
||||
#
|
||||
# Updating a source will also trigger intake to update the user crontab. If you run `crontab -l`, you should see that the `currenttime` source has a crontab entry. You can change this source's cron schedule in the source config.
|
||||
###
|
||||
|
||||
'';
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
#!/bin/sh
|
||||
echo {\"id\": \"$(date +%Y-%m-%d-%H-%M)\"}
|
@ -1,10 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import hashlib, json, os, sys
|
||||
|
||||
echo = os.environ.get("MESSAGE", "Hello, world!")
|
||||
item = {
|
||||
"id": hashlib.md5(echo.encode("utf8")).hexdigest(),
|
||||
"title": echo,
|
||||
}
|
||||
print(json.dumps(item), file=sys.stdout)
|
59
flake.lock
generated
59
flake.lock
generated
@ -1,61 +1,54 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1673956053,
|
||||
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixos-shell": {
|
||||
"flake-parts": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
"nixpkgs-lib": "nixpkgs-lib"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1686216465,
|
||||
"narHash": "sha256-0A4K6xVIyxUi2YZu4+156WwzAO1GDWGcKiMvsXpBQDQ=",
|
||||
"owner": "Mic92",
|
||||
"repo": "nixos-shell",
|
||||
"rev": "65489e7eeef8eeea43e1e4218ad1b99d58852c7c",
|
||||
"lastModified": 1736143030,
|
||||
"narHash": "sha256-+hu54pAoLDEZT9pjHlqL9DNzWz0NbUn8NEAHP7PQPzU=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"rev": "b905f6fc23a9051a6e1b741e1438dbfc0634c6de",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "Mic92",
|
||||
"repo": "nixos-shell",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1717179513,
|
||||
"narHash": "sha256-vboIEwIQojofItm2xGCdZCzW96U85l9nDW3ifMuAIdM=",
|
||||
"lastModified": 1736798957,
|
||||
"narHash": "sha256-qwpCtZhSsSNQtK4xYGzMiyEDhkNzOCz/Vfu4oL2ETsQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "63dacb46bf939521bdc93981b4cbb7ecb58427a0",
|
||||
"rev": "9abb87b552b7f55ac8916b6fc9e5cb486656a2f3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "24.05",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-lib": {
|
||||
"locked": {
|
||||
"lastModified": 1735774519,
|
||||
"narHash": "sha256-CewEm1o2eVAnoqb6Ml+Qi9Gg/EfNAxbRx1lANGVyoLI=",
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/NixOS/nixpkgs/archive/e9b51731911566bbf7e4895475a87fe06961de0b.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
"url": "https://github.com/NixOS/nixpkgs/archive/e9b51731911566bbf7e4895475a87fe06961de0b.tar.gz"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"nixos-shell": "nixos-shell",
|
||||
"flake-parts": "flake-parts",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
|
97
flake.nix
97
flake.nix
@ -1,73 +1,40 @@
|
||||
{
|
||||
description = "A personal feed aggregator";
|
||||
description = "Universal and extensible feed aggregator";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/24.05";
|
||||
# Included to support default.nix and shell.nix
|
||||
flake-compat = {
|
||||
url = "github:edolstra/flake-compat";
|
||||
flake = false;
|
||||
};
|
||||
# Included to support the integration test in tests/demo.nix
|
||||
nixos-shell.url = "github:Mic92/nixos-shell";
|
||||
nixos-shell.inputs.nixpkgs.follows = "nixpkgs";
|
||||
flake-parts.url = "github:hercules-ci/flake-parts";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-compat, nixos-shell }:
|
||||
let
|
||||
inherit (nixpkgs.lib) makeOverridable nixosSystem;
|
||||
system = "x86_64-linux";
|
||||
in {
|
||||
packages.${system} = let
|
||||
pkgs = (import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [ self.overlays.default ];
|
||||
});
|
||||
in {
|
||||
default = self.packages.${system}.intake;
|
||||
inherit (pkgs) intake;
|
||||
};
|
||||
|
||||
devShells.${system} = {
|
||||
default = let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
pythonEnv = pkgs.python3.withPackages (pypkgs: with pypkgs; [ flask black pytest ]);
|
||||
in pkgs.mkShell {
|
||||
packages = [
|
||||
pythonEnv
|
||||
pkgs.nixos-shell
|
||||
# We only take this dependency for htpasswd, which is a little unfortunate
|
||||
pkgs.apacheHttpd
|
||||
];
|
||||
shellHook = ''
|
||||
PS1="(develop) $PS1"
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
overlays.default = final: prev: {
|
||||
intake = final.python3Packages.buildPythonPackage {
|
||||
name = "intake";
|
||||
src = builtins.path { path = ./.; name = "intake"; };
|
||||
format = "pyproject";
|
||||
propagatedBuildInputs = with final.python3Packages; [ flask setuptools ];
|
||||
};
|
||||
};
|
||||
|
||||
templates.source = {
|
||||
path = builtins.path { path = ./template; name = "source"; };
|
||||
description = "A basic intake source config";
|
||||
};
|
||||
|
||||
nixosModules.default = import ./module.nix self;
|
||||
|
||||
nixosConfigurations."demo" = makeOverridable nixosSystem {
|
||||
inherit system;
|
||||
modules = [
|
||||
nixos-shell.nixosModules.nixos-shell
|
||||
self.nixosModules.default
|
||||
./demo
|
||||
outputs =
|
||||
inputs@{ flake-parts, ... }:
|
||||
flake-parts.lib.mkFlake { inherit inputs; } {
|
||||
systems = [
|
||||
"x86_64-linux"
|
||||
"aarch64-linux"
|
||||
"aarch64-darwin"
|
||||
"x86_64-darwin"
|
||||
];
|
||||
perSystem =
|
||||
{
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
formatter = pkgs.nixfmt-rfc-style;
|
||||
|
||||
devShells.default = pkgs.mkShell {
|
||||
packages = [
|
||||
pkgs.go
|
||||
pkgs.gopls
|
||||
pkgs.go-tools
|
||||
pkgs.gotools
|
||||
pkgs.cobra-cli
|
||||
pkgs.air
|
||||
];
|
||||
};
|
||||
};
|
||||
flake = {
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
12
go.mod
Normal file
12
go.mod
Normal file
@ -0,0 +1,12 @@
|
||||
module github.com/Jaculabilis/intake
|
||||
|
||||
go 1.23.4
|
||||
|
||||
require github.com/spf13/cobra v1.8.1
|
||||
|
||||
require github.com/mattn/go-sqlite3 v1.14.24
|
||||
|
||||
require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
)
|
12
go.sum
Normal file
12
go.sum
Normal file
@ -0,0 +1,12 @@
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
|
||||
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
@ -1,3 +0,0 @@
|
||||
from .cli import main
|
||||
|
||||
main()
|
417
intake/app.py
417
intake/app.py
@ -1,417 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
from random import getrandbits
|
||||
from typing import List
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
|
||||
from flask import (
|
||||
Flask,
|
||||
render_template,
|
||||
request,
|
||||
jsonify,
|
||||
abort,
|
||||
redirect,
|
||||
url_for,
|
||||
current_app,
|
||||
)
|
||||
|
||||
from intake.core import intake_data_dir
|
||||
from intake.crontab import update_crontab_entries
|
||||
from intake.source import LocalSource, execute_action, Item
|
||||
|
||||
# Globals
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
CRON_HELPTEXT = """cron spec:
|
||||
* * * * *
|
||||
+-------------- minute (0 - 59)
|
||||
+----------- hour (0 - 23)
|
||||
+-------- day of month (1 - 31)
|
||||
+----- month (1 - 12)
|
||||
+-- day of week (0 Sun - 6 Sat)"""
|
||||
|
||||
|
||||
def item_sort_key(item: Item):
|
||||
return item.sort_key
|
||||
|
||||
|
||||
def get_show_hidden(default: bool):
|
||||
"""
|
||||
Get the value of the ?hidden query parameter, with a default value if it is
|
||||
absent or set to an unnown value.
|
||||
"""
|
||||
hidden = request.args.get("hidden")
|
||||
if hidden == "true":
|
||||
return True
|
||||
if hidden == "false":
|
||||
return False
|
||||
return default
|
||||
|
||||
|
||||
@app.template_filter("datetimeformat")
|
||||
def datetimeformat(value):
|
||||
if not value:
|
||||
return ""
|
||||
dt = datetime.fromtimestamp(value)
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
@app.template_global()
|
||||
def set_query(**kwargs):
|
||||
"""
|
||||
Helper function to create a URL plus or minus some query parameters.
|
||||
"""
|
||||
args = request.args.copy()
|
||||
for key, val in kwargs.items():
|
||||
if val is None and key in args:
|
||||
del args[key]
|
||||
else:
|
||||
args[key] = val
|
||||
return url_for(request.endpoint, **request.view_args, **args)
|
||||
|
||||
|
||||
def auth_check(route):
|
||||
"""
|
||||
Checks the HTTP Basic Auth header against the stored credential.
|
||||
"""
|
||||
|
||||
@wraps(route)
|
||||
def _route(*args, **kwargs):
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
auth_path = data_path / "credentials.json"
|
||||
if auth_path.exists():
|
||||
if not request.authorization:
|
||||
abort(401)
|
||||
auth = json.load(auth_path.open(encoding="utf8"))
|
||||
if request.authorization.username != auth["username"]:
|
||||
abort(403)
|
||||
if request.authorization.password != auth["secret"]:
|
||||
abort(403)
|
||||
return route(*args, **kwargs)
|
||||
|
||||
return _route
|
||||
|
||||
|
||||
@app.get("/")
|
||||
@auth_check
|
||||
def root():
|
||||
"""
|
||||
Navigation home page.
|
||||
"""
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
|
||||
sources = []
|
||||
for child in data_path.iterdir():
|
||||
if (child / "intake.json").exists():
|
||||
sources.append(LocalSource(data_path, child.name))
|
||||
sources.sort(key=lambda s: s.source_name)
|
||||
|
||||
channels = {}
|
||||
channels_config_path = data_path / "channels.json"
|
||||
if channels_config_path.exists():
|
||||
channels = json.loads(channels_config_path.read_text(encoding="utf8"))
|
||||
|
||||
return render_template(
|
||||
"home.jinja2",
|
||||
sources=sources,
|
||||
channels=channels,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/source/<string:name>")
|
||||
@auth_check
|
||||
def source_feed(name):
|
||||
"""
|
||||
Feed view for a single source.
|
||||
"""
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
source = LocalSource(data_path, name)
|
||||
if not source.source_path.exists():
|
||||
abort(404)
|
||||
|
||||
return _sources_feed(name, [source], show_hidden=get_show_hidden(True))
|
||||
|
||||
|
||||
@app.get("/channel/<string:name>")
|
||||
@auth_check
|
||||
def channel_feed(name):
|
||||
"""
|
||||
Feed view for a channel.
|
||||
"""
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
channels_config_path = data_path / "channels.json"
|
||||
if not channels_config_path.exists():
|
||||
abort(404)
|
||||
channels = json.loads(channels_config_path.read_text(encoding="utf8"))
|
||||
if name not in channels:
|
||||
abort(404)
|
||||
sources = [LocalSource(data_path, name) for name in channels[name]]
|
||||
|
||||
return _sources_feed(name, sources, show_hidden=get_show_hidden(False))
|
||||
|
||||
|
||||
def _sources_feed(name: str, sources: List[LocalSource], show_hidden: bool):
|
||||
"""
|
||||
Feed view for multiple sources.
|
||||
"""
|
||||
# Get all items
|
||||
all_items = sorted(
|
||||
[
|
||||
item
|
||||
for source in sources
|
||||
for item in source.get_all_items()
|
||||
if not item.is_hidden or show_hidden
|
||||
],
|
||||
key=item_sort_key,
|
||||
)
|
||||
|
||||
# Apply paging parameters
|
||||
count = int(request.args.get("count", "100"))
|
||||
page = int(request.args.get("page", "0"))
|
||||
paged_items = all_items[count * page : count * page + count]
|
||||
pager_prev = (
|
||||
None
|
||||
if page <= 0
|
||||
else url_for(request.endpoint, name=name, count=count, page=page - 1)
|
||||
)
|
||||
pager_next = (
|
||||
None
|
||||
if (count * page + count) > len(all_items)
|
||||
else url_for(request.endpoint, name=name, count=count, page=page + 1)
|
||||
)
|
||||
|
||||
return render_template(
|
||||
"feed.jinja2",
|
||||
items=paged_items,
|
||||
now=int(time.time()),
|
||||
mdeac=[
|
||||
{"source": item.source.source_name, "itemid": item["id"]}
|
||||
for item in paged_items
|
||||
if "id" in item
|
||||
],
|
||||
page_num=page,
|
||||
page_count=count,
|
||||
item_count=len(all_items),
|
||||
)
|
||||
|
||||
|
||||
@app.delete("/item/<string:source_name>/<string:item_id>")
|
||||
@auth_check
|
||||
def deactivate(source_name, item_id):
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
source = LocalSource(data_path, source_name)
|
||||
item = source.get_item(item_id)
|
||||
if item["active"]:
|
||||
print(f"Deactivating {source_name}/{item_id}", file=sys.stderr)
|
||||
item["active"] = False
|
||||
source.save_item(item)
|
||||
return jsonify({"active": item["active"]})
|
||||
|
||||
|
||||
@app.patch("/item/<string:source_name>/<string:item_id>")
|
||||
@auth_check
|
||||
def update(source_name, item_id):
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
source = LocalSource(data_path, source_name)
|
||||
item = source.get_item(item_id)
|
||||
params = request.get_json()
|
||||
if "tts" in params:
|
||||
tomorrow = datetime.now() + timedelta(days=1)
|
||||
morning = datetime(tomorrow.year, tomorrow.month, tomorrow.day, 6, 0, 0)
|
||||
til_then = int(morning.timestamp()) - item["created"]
|
||||
item["tts"] = til_then
|
||||
source.save_item(item)
|
||||
return jsonify(item._item)
|
||||
|
||||
|
||||
@app.post("/mass-deactivate/")
|
||||
@auth_check
|
||||
def mass_deactivate():
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
params = request.get_json()
|
||||
if "items" not in params:
|
||||
print(f"Bad request params: {params}", file=sys.stderr)
|
||||
for info in params.get("items"):
|
||||
source = info["source"]
|
||||
itemid = info["itemid"]
|
||||
source = LocalSource(data_path, source)
|
||||
item = source.get_item(itemid)
|
||||
if item["active"]:
|
||||
print(f"Deactivating {info['source']}/{info['itemid']}", file=sys.stderr)
|
||||
item["active"] = False
|
||||
source.save_item(item)
|
||||
return jsonify({})
|
||||
|
||||
|
||||
@app.post("/action/<string:source_name>/<string:item_id>/<string:action>")
|
||||
@auth_check
|
||||
def action(source_name, item_id, action):
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
source = LocalSource(data_path, source_name)
|
||||
item = execute_action(source, item_id, action)
|
||||
return jsonify(item._item)
|
||||
|
||||
|
||||
@app.route("/edit/source/<string:name>", methods=["GET", "POST"])
|
||||
@auth_check
|
||||
def source_edit(name):
|
||||
"""
|
||||
Config editor for a source
|
||||
"""
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
source = LocalSource(data_path, name)
|
||||
if not source.source_path.exists():
|
||||
abort(404)
|
||||
|
||||
# For POST, check if the config is valid
|
||||
error_message: str = None
|
||||
if request.method == "POST":
|
||||
config_str = request.form.get("config", "")
|
||||
error_message, config = _parse_source_config(config_str)
|
||||
if not error_message:
|
||||
source.save_config(config)
|
||||
update_crontab_entries(data_path)
|
||||
return redirect(url_for("root"))
|
||||
|
||||
# For GET, load the config
|
||||
if request.method == "GET":
|
||||
config = source.get_config()
|
||||
config_str = json.dumps(config, indent=2)
|
||||
|
||||
return render_template(
|
||||
"edit.jinja2",
|
||||
subtitle=source.source_name,
|
||||
config=config_str,
|
||||
error_message=error_message,
|
||||
helptext=CRON_HELPTEXT,
|
||||
)
|
||||
|
||||
|
||||
def _parse_source_config(config_str: str):
|
||||
if not config_str:
|
||||
return ("Config required", {})
|
||||
try:
|
||||
parsed = json.loads(config_str)
|
||||
except json.JSONDecodeError:
|
||||
return ("Invalid JSON", {})
|
||||
if not isinstance(parsed, dict):
|
||||
return ("Invalid config format", {})
|
||||
if "action" not in parsed:
|
||||
return ("No actions defined", {})
|
||||
action = parsed["action"]
|
||||
if "fetch" not in action:
|
||||
return ("No fetch action defined", {})
|
||||
fetch = action["fetch"]
|
||||
if "exe" not in fetch:
|
||||
return ("No fetch exe", {})
|
||||
config = {"action": parsed["action"]}
|
||||
if "env" in parsed:
|
||||
config["env"] = parsed["env"]
|
||||
if "cron" in parsed:
|
||||
config["cron"] = parsed["cron"]
|
||||
return (None, config)
|
||||
|
||||
|
||||
@app.route("/edit/channels", methods=["GET", "POST"])
|
||||
@auth_check
|
||||
def channels_edit():
|
||||
"""
|
||||
Config editor for channels
|
||||
"""
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
config_path = data_path / "channels.json"
|
||||
|
||||
# For POST, check if the config is valid
|
||||
error_message: str = None
|
||||
if request.method == "POST":
|
||||
config_str = request.form.get("config", "")
|
||||
error_message, config = _parse_channels_config(config_str)
|
||||
if not error_message:
|
||||
config_path.write_text(json.dumps(config, indent=2), encoding="utf8")
|
||||
return redirect(url_for("root"))
|
||||
|
||||
# For GET, load the config
|
||||
if request.method == "GET":
|
||||
if config_path.exists():
|
||||
config = json.loads(config_path.read_text(encoding="utf8"))
|
||||
else:
|
||||
config = {}
|
||||
config_str = json.dumps(config, indent=2)
|
||||
|
||||
return render_template(
|
||||
"edit.jinja2",
|
||||
subtitle="Channels",
|
||||
config=config_str,
|
||||
error_message=error_message,
|
||||
)
|
||||
|
||||
|
||||
def _parse_channels_config(config_str: str):
|
||||
if not config_str:
|
||||
return ("Config required", {})
|
||||
try:
|
||||
parsed = json.loads(config_str)
|
||||
except json.JSONDecodeError:
|
||||
return ("Invalid JSON", {})
|
||||
if not isinstance(parsed, dict):
|
||||
return ("Invalid config format", {})
|
||||
for key in parsed:
|
||||
if not isinstance(parsed[key], list):
|
||||
return (f"{key} must map to a list", {})
|
||||
for val in parsed[key]:
|
||||
if not isinstance(val, str):
|
||||
return f"{key} source {val} must be a string"
|
||||
return (None, parsed)
|
||||
|
||||
|
||||
@app.post("/add")
|
||||
@auth_check
|
||||
def add_item():
|
||||
# Ensure the default source exists
|
||||
data_path: Path = current_app.config["INTAKE_DATA"]
|
||||
source_path = data_path / "default"
|
||||
if not source_path.exists():
|
||||
source_path.mkdir()
|
||||
config_path = source_path / "intake.json"
|
||||
if not config_path.exists():
|
||||
config_path.write_text(
|
||||
json.dumps({"action": {"fetch": {"exe": "true"}}}, indent=2)
|
||||
)
|
||||
source = LocalSource(source_path.parent, source_path.name)
|
||||
|
||||
fields = {"id": "{:x}".format(getrandbits(16 * 4))}
|
||||
if form_title := request.form.get("title"):
|
||||
fields["title"] = form_title
|
||||
if form_link := request.form.get("link"):
|
||||
fields["link"] = form_link
|
||||
if form_body := request.form.get("body"):
|
||||
fields["body"] = form_body
|
||||
if form_tags := request.form.get("tags"):
|
||||
fields["tags"] = [tag.strip() for tag in form_tags.split() if tag.strip()]
|
||||
if form_tts := request.form.get("tts"):
|
||||
fields["tts"] = _get_ttx_for_date(datetime.fromisoformat(form_tts))
|
||||
if form_ttl := request.form.get("ttl"):
|
||||
fields["ttl"] = _get_ttx_for_date(datetime.fromisoformat(form_ttl))
|
||||
if form_ttd := request.form.get("ttd"):
|
||||
fields["ttd"] = _get_ttx_for_date(datetime.fromisoformat(form_ttd))
|
||||
|
||||
item = Item.create(source, **fields)
|
||||
source.save_item(item)
|
||||
|
||||
return redirect(url_for("source_feed", name="default"))
|
||||
|
||||
|
||||
def _get_ttx_for_date(dt: datetime) -> int:
|
||||
"""Get the relative time difference between now and a date."""
|
||||
ts = int(dt.timestamp())
|
||||
now = int(time.time())
|
||||
return ts - now
|
||||
|
||||
|
||||
def wsgi():
|
||||
app.config["INTAKE_DATA"] = intake_data_dir()
|
||||
return app
|
393
intake/cli.py
393
intake/cli.py
@ -1,393 +0,0 @@
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from shutil import get_terminal_size
|
||||
import argparse
|
||||
import getpass
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import pwd
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from intake.core import intake_data_dir
|
||||
from intake.crontab import update_crontab_entries
|
||||
from intake.source import fetch_items, LocalSource, update_items, execute_action
|
||||
from intake.types import InvalidConfigException, SourceUpdateException
|
||||
|
||||
|
||||
def cmd_edit(cmd_args):
|
||||
"""Open a source's config for editing."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake edit",
|
||||
description=cmd_edit.__doc__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--data",
|
||||
"-d",
|
||||
help="Path to the intake data directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--source",
|
||||
"-s",
|
||||
required=True,
|
||||
help="Source name to edit",
|
||||
)
|
||||
args = parser.parse_args(cmd_args)
|
||||
data_path: Path = Path(args.data) if args.data else intake_data_dir()
|
||||
|
||||
editor_cmd = os.environ.get("EDITOR")
|
||||
if not editor_cmd:
|
||||
print("Cannot edit, no EDITOR set", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
source_path: Path = data_path / args.source
|
||||
if not source_path.exists():
|
||||
yn = input("Source does not exist, create? [yN] ")
|
||||
if yn.strip().lower() != "y":
|
||||
return 0
|
||||
source_path.mkdir()
|
||||
with (source_path / "intake.json").open("w") as f:
|
||||
json.dump(
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "",
|
||||
"args": [],
|
||||
},
|
||||
},
|
||||
"env": {},
|
||||
},
|
||||
f,
|
||||
indent=2,
|
||||
)
|
||||
|
||||
# Make a copy of the config
|
||||
source = LocalSource(data_path, args.source)
|
||||
tmp_path = source.source_path / "intake.json.tmp"
|
||||
tmp_path.write_text(json.dumps(source.get_config(), indent=2))
|
||||
|
||||
while True:
|
||||
# Edit the config
|
||||
subprocess.run([editor_cmd, tmp_path])
|
||||
|
||||
# Check if the new config is valid
|
||||
try:
|
||||
json.load(tmp_path.open())
|
||||
except json.JSONDecodeError:
|
||||
yn = input("Invalid JSON. Return to editor? [Yn] ")
|
||||
if yn.strip().lower() != "n":
|
||||
continue
|
||||
tmp_path.unlink()
|
||||
return 0
|
||||
|
||||
tmp_path.replace(source.source_path / "intake.json")
|
||||
|
||||
# Assume that --data is for local testing and don't update crontab
|
||||
if not args.data:
|
||||
update_crontab_entries(data_path)
|
||||
break
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_update(cmd_args):
|
||||
"""Fetch items for a source and update it."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake update",
|
||||
description=cmd_update.__doc__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--data",
|
||||
"-d",
|
||||
help="Path to the intake data directory containing source directories",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--source",
|
||||
"-s",
|
||||
required=True,
|
||||
help="Source name to fetch",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Instead of updating the source, print the fetched items",
|
||||
)
|
||||
args = parser.parse_args(cmd_args)
|
||||
|
||||
data_path: Path = Path(args.data) if args.data else intake_data_dir()
|
||||
source = LocalSource(data_path, args.source)
|
||||
try:
|
||||
items = fetch_items(source)
|
||||
if not args.dry_run:
|
||||
update_items(source, items)
|
||||
else:
|
||||
print("Update returned", len(items), "items:")
|
||||
for item in items:
|
||||
print(" Item:", item._item, file=sys.stderr)
|
||||
except InvalidConfigException as ex:
|
||||
print("Could not fetch", args.source, file=sys.stderr)
|
||||
print(ex, file=sys.stderr)
|
||||
return 1
|
||||
except SourceUpdateException as ex:
|
||||
print("Error updating source", args.source, file=sys.stderr)
|
||||
print(ex, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_action(cmd_args):
|
||||
"""Execute an action for an item."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake action",
|
||||
description=cmd_action.__doc__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--data",
|
||||
"-d",
|
||||
help="Path to the intake data directory containing source directories",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--source",
|
||||
"-s",
|
||||
required=True,
|
||||
help="Source name to fetch",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--item",
|
||||
"-i",
|
||||
required=True,
|
||||
help="Item id to perform the action with",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--action",
|
||||
"-a",
|
||||
required=True,
|
||||
help="Action to perform",
|
||||
)
|
||||
args = parser.parse_args(cmd_args)
|
||||
|
||||
data_path: Path = Path(args.data) if args.data else intake_data_dir()
|
||||
source = LocalSource(data_path, args.source)
|
||||
try:
|
||||
item = execute_action(source, args.item, args.action, 5)
|
||||
print("Item:", item._item, file=sys.stderr)
|
||||
except InvalidConfigException as ex:
|
||||
print("Could not fetch", args.source, file=sys.stderr)
|
||||
print(ex, file=sys.stderr)
|
||||
return 1
|
||||
except SourceUpdateException as ex:
|
||||
print(
|
||||
"Error executing source",
|
||||
args.source,
|
||||
"item",
|
||||
args.item,
|
||||
"action",
|
||||
args.action,
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(ex, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_feed(cmd_args):
|
||||
"""Print the current feed."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake feed",
|
||||
description=cmd_feed.__doc__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--data",
|
||||
"-d",
|
||||
help="Path to the intake data directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sources",
|
||||
"-s",
|
||||
nargs="+",
|
||||
help="Limit feed to these sources",
|
||||
)
|
||||
args = parser.parse_args(cmd_args)
|
||||
|
||||
data_path: Path = Path(args.data) if args.data else intake_data_dir()
|
||||
if not data_path.exists() and data_path.is_dir():
|
||||
print("Not a directory:", data_path, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not args.sources:
|
||||
args.sources = [child.name for child in data_path.iterdir()]
|
||||
|
||||
sources = [
|
||||
LocalSource(data_path, name)
|
||||
for name in args.sources
|
||||
if (data_path / name / "intake.json").exists()
|
||||
]
|
||||
items = sorted(
|
||||
[item for source in sources for item in source.get_all_items()],
|
||||
key=lambda item: item.sort_key,
|
||||
)
|
||||
|
||||
if not items:
|
||||
print("Feed is empty")
|
||||
return 0
|
||||
|
||||
size = get_terminal_size((80, 20))
|
||||
width = min(80, size.columns)
|
||||
|
||||
for item in items:
|
||||
title = item.display_title
|
||||
titles = [title]
|
||||
while len(titles[-1]) > width - 4:
|
||||
i = titles[-1][: width - 4].rfind(" ")
|
||||
titles = titles[:-1] + [titles[-1][:i].strip(), titles[-1][i:].strip()]
|
||||
print("+" + (width - 2) * "-" + "+")
|
||||
for title in titles:
|
||||
print("| {0:<{1}} |".format(title, width - 4))
|
||||
print("|{0:<{1}}|".format("", width - 2))
|
||||
info1 = ""
|
||||
if "author" in title and item["author"]:
|
||||
info1 += item["author"] + " "
|
||||
if "time" in item and item["time"]:
|
||||
time_dt = datetime.fromtimestamp(item["time"])
|
||||
info1 += time_dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
print("| {0:<{1}} |".format(info1, width - 4))
|
||||
created_dt = datetime.fromtimestamp(item["created"])
|
||||
created = created_dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
info2 = "{0} {1} {2}".format(
|
||||
item.source.source_name, item.get("id", ""), created
|
||||
)
|
||||
print("| {0:<{1}} |".format(info2, width - 4))
|
||||
print("+" + (width - 2) * "-" + "+")
|
||||
print()
|
||||
|
||||
|
||||
def cmd_passwd(cmd_args):
|
||||
"""Update password for the web interface."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake passwd",
|
||||
description=cmd_passwd.__doc__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--data",
|
||||
"-d",
|
||||
help="Path to the intake data directory",
|
||||
)
|
||||
args = parser.parse_args(cmd_args)
|
||||
|
||||
command_exists = subprocess.run(["command", "-v" "htpasswd"], shell=True)
|
||||
if command_exists.returncode:
|
||||
print("Could not find htpasswd, cannot update password", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
data_path: Path = Path(args.data) if args.data else intake_data_dir()
|
||||
creds = Path(data_path) / "credentials.json"
|
||||
if not creds.parent.exists():
|
||||
creds.parent.mkdir(parents=True)
|
||||
|
||||
user = pwd.getpwuid(os.getuid()).pw_name
|
||||
password = getpass.getpass(f"intake password for {user}: ")
|
||||
update_pwd = subprocess.run(
|
||||
["htpasswd", "-b", "/etc/intake/htpasswd", user, password]
|
||||
)
|
||||
if update_pwd.returncode:
|
||||
print("Could not update password file", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
new_creds = {"username": user, "secret": password}
|
||||
creds.write_text(json.dumps(new_creds, indent=2))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_run(cmd_args):
|
||||
"""Run the default Flask server."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake run",
|
||||
description=cmd_run.__doc__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--data",
|
||||
"-d",
|
||||
help="Path to the intake data directory containing source directories",
|
||||
)
|
||||
parser.add_argument("--debug", action="store_true")
|
||||
parser.add_argument("--port", type=int, default=5000)
|
||||
args = parser.parse_args(cmd_args)
|
||||
|
||||
data_path: Path = Path(args.data) if args.data else intake_data_dir()
|
||||
try:
|
||||
from intake.app import app
|
||||
|
||||
app.config["INTAKE_DATA"] = data_path
|
||||
app.run(port=args.port, debug=args.debug)
|
||||
return 0
|
||||
except Exception as ex:
|
||||
print(ex, file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
def cmd_help(_):
|
||||
"""Print the help text."""
|
||||
print_usage()
|
||||
return 0
|
||||
|
||||
|
||||
def execute_cli():
|
||||
"""
|
||||
Internal entry point for CLI execution.
|
||||
"""
|
||||
|
||||
# Collect the commands in this module.
|
||||
cli = sys.modules[__name__]
|
||||
commands = {
|
||||
name[4:]: func for name, func in vars(cli).items() if name.startswith("cmd_")
|
||||
}
|
||||
names_width = max(map(len, commands.keys()))
|
||||
desc_fmt = f" {{0:<{names_width}}} {{1}}"
|
||||
descriptions = "\n".join(
|
||||
[desc_fmt.format(name, func.__doc__) for name, func in commands.items()]
|
||||
)
|
||||
|
||||
# Set up the top-level parser
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="intake",
|
||||
description=f"Available commands:\n{descriptions}\n",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
# add_help=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"command",
|
||||
nargs="?",
|
||||
default="help",
|
||||
help="The command to execute",
|
||||
choices=commands,
|
||||
metavar="command",
|
||||
)
|
||||
parser.add_argument(
|
||||
"args", nargs=argparse.REMAINDER, help="Command arguments", metavar="args"
|
||||
)
|
||||
|
||||
# Extract the usage print for command_help
|
||||
global print_usage
|
||||
print_usage = parser.print_help
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Execute command
|
||||
sys.exit(commands[args.command](args.args))
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main entry point for CLI execution.
|
||||
"""
|
||||
try:
|
||||
execute_cli()
|
||||
except BrokenPipeError:
|
||||
# See https://docs.python.org/3.10/library/signal.html#note-on-sigpipe
|
||||
devnull = os.open(os.devnull, os.O_WRONLY)
|
||||
os.dup2(devnull, sys.stdout.fileno())
|
||||
sys.exit(1)
|
@ -1,12 +0,0 @@
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
|
||||
def intake_data_dir() -> Path:
|
||||
if intake_data := os.environ.get("INTAKE_DATA"):
|
||||
return Path(intake_data)
|
||||
if xdg_data_home := os.environ.get("XDG_DATA_HOME"):
|
||||
return Path(xdg_data_home) / "intake"
|
||||
if home := os.environ.get("HOME"):
|
||||
return Path(home) / ".local" / "share" / "intake"
|
||||
raise Exception("No intake data directory defined")
|
@ -1,93 +0,0 @@
|
||||
from pathlib import Path
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from intake.source import LocalSource
|
||||
|
||||
|
||||
INTAKE_CRON_BEGIN = "### begin intake-managed crontab entries"
|
||||
INTAKE_CRON_END = "### end intake-managed crontab entries"
|
||||
|
||||
|
||||
def get_desired_crons(data_path: Path):
|
||||
"""
|
||||
Get a list of sources and crontab specs from the data directory.
|
||||
"""
|
||||
for child in data_path.iterdir():
|
||||
if not (child / "intake.json").exists():
|
||||
continue
|
||||
source = LocalSource(data_path, child.name)
|
||||
config = source.get_config()
|
||||
if cron := config.get("cron"):
|
||||
yield f"{cron} . /etc/profile; intake update -s {source.source_name}"
|
||||
|
||||
|
||||
def update_crontab_entries(data_path: Path):
|
||||
"""
|
||||
Update the intake-managed section of the user's crontab.
|
||||
"""
|
||||
# If there is no crontab command available, quit early.
|
||||
cmd = ("command", "-v", "crontab")
|
||||
print("Executing", *cmd, file=sys.stderr)
|
||||
crontab_exists = subprocess.run(cmd, shell=True)
|
||||
if crontab_exists.returncode:
|
||||
print("Could not update crontab", file=sys.stderr)
|
||||
return
|
||||
|
||||
# Get the current crontab
|
||||
cmd = ["crontab", "-e"]
|
||||
print("Executing", *cmd, file=sys.stderr)
|
||||
get_crontab = subprocess.run(
|
||||
cmd,
|
||||
env={**os.environ, "EDITOR": "cat"},
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
for line in get_crontab.stderr.decode("utf8").splitlines():
|
||||
print("[stderr]", line, file=sys.stderr)
|
||||
crontab_lines = get_crontab.stdout.decode("utf-8").splitlines()
|
||||
|
||||
# Splice the intake crons into the crontab
|
||||
new_crontab_lines = []
|
||||
section_found = False
|
||||
in_section = False
|
||||
for i in range(len(crontab_lines)):
|
||||
|
||||
if not section_found and crontab_lines[i] == INTAKE_CRON_BEGIN:
|
||||
section_found = True
|
||||
in_section = True
|
||||
# Open the section and add everything
|
||||
new_crontab_lines.append(INTAKE_CRON_BEGIN)
|
||||
new_crontab_lines.extend(get_desired_crons(data_path))
|
||||
|
||||
elif crontab_lines[i] == INTAKE_CRON_END:
|
||||
new_crontab_lines.append(INTAKE_CRON_END)
|
||||
in_section = False
|
||||
|
||||
elif not in_section:
|
||||
new_crontab_lines.append(crontab_lines[i])
|
||||
|
||||
# If the splice mark was never found, append the whole section to the end
|
||||
if not section_found:
|
||||
new_crontab_lines.append(INTAKE_CRON_BEGIN)
|
||||
new_crontab_lines.extend(get_desired_crons(data_path))
|
||||
new_crontab_lines.append(INTAKE_CRON_END)
|
||||
|
||||
print("Updating", len(new_crontab_lines) - 2, "crontab entries", file=sys.stderr)
|
||||
|
||||
# Save the updated crontab
|
||||
cmd = ["crontab", "-"]
|
||||
print("Executing", *cmd, file=sys.stderr)
|
||||
new_crontab: bytes = "\n".join(new_crontab_lines).encode("utf8")
|
||||
save_crontab = subprocess.Popen(
|
||||
cmd,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
(stdout, stderr) = save_crontab.communicate(new_crontab)
|
||||
for line in stdout.decode("utf8").splitlines():
|
||||
print("[stdout]", line, file=sys.stderr)
|
||||
for line in stderr.decode("utf8").splitlines():
|
||||
print("[stderr]", line, file=sys.stderr)
|
369
intake/source.py
369
intake/source.py
@ -1,369 +0,0 @@
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from subprocess import Popen, PIPE, TimeoutExpired
|
||||
from threading import Thread
|
||||
from time import time as current_time
|
||||
from typing import List
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from intake.types import InvalidConfigException, SourceUpdateException
|
||||
|
||||
|
||||
class Item:
|
||||
"""
|
||||
A wrapper for an item object.
|
||||
"""
|
||||
|
||||
def __init__(self, source: "LocalSource", item: dict):
|
||||
self.source = source
|
||||
self._item = item
|
||||
|
||||
# Methods to allow Item as a drop-in replacement for the item dict itself
|
||||
def __contains__(self, key):
|
||||
return self._item.__contains__(key)
|
||||
|
||||
def __iter__(self):
|
||||
return self._item.__iter__
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._item.__getitem__(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
return self._item.__setitem__(key, value)
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._item.get(key, default)
|
||||
|
||||
@staticmethod
|
||||
def create(source: "LocalSource", **fields) -> "Item":
|
||||
if "id" not in fields:
|
||||
raise KeyError("id")
|
||||
item = {
|
||||
"id": fields["id"],
|
||||
"created": int(current_time()),
|
||||
"active": True,
|
||||
}
|
||||
for field_name in (
|
||||
"title",
|
||||
"author",
|
||||
"body",
|
||||
"link",
|
||||
"time",
|
||||
"tags",
|
||||
"tts",
|
||||
"ttl",
|
||||
"ttd",
|
||||
"action",
|
||||
):
|
||||
if val := fields.get(field_name):
|
||||
item[field_name] = val
|
||||
return Item(source, item)
|
||||
|
||||
@property
|
||||
def display_title(self):
|
||||
return self._item.get("title", self._item["id"])
|
||||
|
||||
@property
|
||||
def can_remove(self):
|
||||
# The time-to-live fields protects an item from removal until expiry.
|
||||
# This is mainly used to avoid old items resurfacing when their source
|
||||
# cannot guarantee monotonocity.
|
||||
if "ttl" in self._item:
|
||||
ttl_date = self._item["created"] + self._item["ttl"]
|
||||
if ttl_date > current_time():
|
||||
return False
|
||||
|
||||
# The time-to-die field puts a maximum lifespan on an item, removing it
|
||||
# even if it is active.
|
||||
if "ttd" in self._item:
|
||||
ttd_date = self._item["created"] + self._item["ttd"]
|
||||
if ttd_date < current_time():
|
||||
return True
|
||||
|
||||
return not self._item["active"]
|
||||
|
||||
@property
|
||||
def before_tts(self):
|
||||
return (
|
||||
"tts" in self._item
|
||||
and current_time() < self._item["created"] + self._item["tts"]
|
||||
)
|
||||
|
||||
@property
|
||||
def is_hidden(self):
|
||||
return not self._item["active"] or self.before_tts
|
||||
|
||||
@property
|
||||
def sort_key(self):
|
||||
item_date = self._item.get(
|
||||
"time",
|
||||
self._item.get(
|
||||
"created",
|
||||
),
|
||||
)
|
||||
return (item_date, self._item["id"])
|
||||
|
||||
def serialize(self, indent=True):
|
||||
return json.dumps(self._item, indent=2 if indent else None)
|
||||
|
||||
def update_from(self, updated: "Item") -> None:
|
||||
for field in (
|
||||
"title",
|
||||
"author",
|
||||
"body",
|
||||
"link",
|
||||
"time",
|
||||
"tags",
|
||||
"tts",
|
||||
"ttl",
|
||||
"ttd",
|
||||
):
|
||||
if field in updated and self[field] != updated[field]:
|
||||
self[field] = updated[field]
|
||||
# Actions are not updated since the available actions and associated
|
||||
# content is left to the action executor to manage.
|
||||
|
||||
|
||||
class LocalSource:
|
||||
"""
|
||||
An intake source backed by a filesystem directory.
|
||||
"""
|
||||
|
||||
def __init__(self, data_path: Path, source_name: str):
|
||||
self.data_path: Path = data_path
|
||||
self.source_name = source_name
|
||||
self.source_path: Path = data_path / source_name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.source_name
|
||||
|
||||
def get_config(self) -> dict:
|
||||
config_path = self.source_path / "intake.json"
|
||||
with open(config_path, "r", encoding="utf8") as config_file:
|
||||
return json.load(config_file)
|
||||
|
||||
def save_config(self, config: dict) -> None:
|
||||
config_path = self.source_path / "intake.json"
|
||||
tmp_path = config_path.with_name(f"{config_path.name}.tmp")
|
||||
with tmp_path.open("w") as f:
|
||||
f.write(json.dumps(config, indent=2))
|
||||
os.rename(tmp_path, config_path)
|
||||
|
||||
def get_state_path(self) -> Path:
|
||||
return (self.source_path / "state").absolute()
|
||||
|
||||
def get_item_path(self, item_id: dict) -> Path:
|
||||
return self.source_path / f"{item_id}.item"
|
||||
|
||||
def get_item_ids(self) -> List[str]:
|
||||
return [
|
||||
filepath.name[:-5]
|
||||
for filepath in self.source_path.iterdir()
|
||||
if filepath.name.endswith(".item")
|
||||
]
|
||||
|
||||
def item_exists(self, item_id) -> bool:
|
||||
return self.get_item_path(item_id).exists()
|
||||
|
||||
def get_item(self, item_id: str) -> Item:
|
||||
with self.get_item_path(item_id).open() as f:
|
||||
return Item(self, json.load(f))
|
||||
|
||||
def save_item(self, item: Item) -> None:
|
||||
# Write to a tempfile first to avoid losing the item on write failure
|
||||
item_path = self.get_item_path(item["id"])
|
||||
tmp_path = item_path.with_name(f"{item_path.name}.tmp")
|
||||
with tmp_path.open("w") as f:
|
||||
f.write(item.serialize())
|
||||
os.rename(tmp_path, item_path)
|
||||
|
||||
def delete_item(self, item_id) -> None:
|
||||
os.remove(self.get_item_path(item_id))
|
||||
|
||||
def get_all_items(self) -> List[Item]:
|
||||
for filepath in self.source_path.iterdir():
|
||||
if filepath.name.endswith(".item"):
|
||||
yield Item(self, json.loads(filepath.read_text(encoding="utf8")))
|
||||
|
||||
|
||||
def _read_stdout(process: Popen, output: list) -> None:
|
||||
"""
|
||||
Read the subprocess's stdout into memory.
|
||||
This prevents the process from blocking when the pipe fills up.
|
||||
"""
|
||||
while True:
|
||||
data = process.stdout.readline()
|
||||
if data:
|
||||
print(f"[stdout] {data.rstrip()}", file=sys.stderr)
|
||||
output.append(data)
|
||||
if process.poll() is not None:
|
||||
break
|
||||
|
||||
|
||||
def _read_stderr(process: Popen) -> None:
|
||||
"""
|
||||
Read the subprocess's stderr stream and pass it to logging.
|
||||
This prevents the process from blocking when the pipe fills up.
|
||||
"""
|
||||
while True:
|
||||
data = process.stderr.readline()
|
||||
if data:
|
||||
print(f"[stderr] {data.rstrip()}", file=sys.stderr)
|
||||
if process.poll() is not None:
|
||||
break
|
||||
|
||||
|
||||
def _execute_source_action(
|
||||
source: LocalSource, action: str, input: str, timeout: timedelta
|
||||
) -> List[str]:
|
||||
"""
|
||||
Execute the action from a given source. If stdin is specified, pass it
|
||||
along to the process. Returns lines from stdout.
|
||||
"""
|
||||
# Gather the information necessary to launch the process
|
||||
config = source.get_config()
|
||||
action_cfg = config.get("action", {}).get(action)
|
||||
|
||||
if not action_cfg:
|
||||
raise InvalidConfigException(f"No such action {action}")
|
||||
if "exe" not in action_cfg:
|
||||
raise InvalidConfigException(f"No exe for action {action}")
|
||||
|
||||
command = [action_cfg["exe"], *action_cfg.get("args", [])]
|
||||
config_env = {key: str(value) for key, value in config.get("env", {}).items()}
|
||||
env = {
|
||||
**os.environ.copy(),
|
||||
**config_env,
|
||||
"STATE_PATH": str(source.get_state_path()),
|
||||
}
|
||||
|
||||
# Launch the process
|
||||
try:
|
||||
process = Popen(
|
||||
command,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
cwd=source.source_path,
|
||||
env=env,
|
||||
encoding="utf8",
|
||||
)
|
||||
except PermissionError:
|
||||
raise SourceUpdateException(f"Command not executable: {''.join(command)}")
|
||||
|
||||
# Kick off monitoring threads
|
||||
output = []
|
||||
t_stdout: Thread = Thread(target=_read_stdout, args=(process, output), daemon=True)
|
||||
t_stdout.start()
|
||||
t_stderr: Thread = Thread(target=_read_stderr, args=(process,), daemon=True)
|
||||
t_stderr.start()
|
||||
|
||||
# Send input to the process, if provided
|
||||
if input:
|
||||
process.stdin.write(input)
|
||||
if not input.endswith("\n"):
|
||||
process.stdin.write("\n")
|
||||
process.stdin.flush()
|
||||
|
||||
try:
|
||||
process.wait(timeout=timeout.total_seconds())
|
||||
except TimeoutExpired:
|
||||
process.kill()
|
||||
t_stdout.join(timeout=1)
|
||||
t_stderr.join(timeout=1)
|
||||
|
||||
if process.poll():
|
||||
raise SourceUpdateException(
|
||||
f"{source.source_name} {action} failed with code {process.returncode}"
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def fetch_items(source: LocalSource, timeout: int = 60) -> List[Item]:
|
||||
"""
|
||||
Execute the feed source and return the current feed items.
|
||||
Returns a list of feed items on success.
|
||||
Throws SourceUpdateException if the feed source update failed.
|
||||
"""
|
||||
items: List[Item] = []
|
||||
|
||||
output = _execute_source_action(source, "fetch", None, timedelta(timeout))
|
||||
|
||||
for line in output:
|
||||
try:
|
||||
item = Item.create(source, **json.loads(line))
|
||||
items.append(item)
|
||||
except json.JSONDecodeError:
|
||||
raise SourceUpdateException("invalid json")
|
||||
|
||||
return items
|
||||
|
||||
|
||||
def execute_action(
|
||||
source: LocalSource, item_id: str, action: str, timeout: int = 60
|
||||
) -> dict:
|
||||
"""
|
||||
Execute the action for a feed source.
|
||||
"""
|
||||
item: Item = source.get_item(item_id)
|
||||
|
||||
output = _execute_source_action(
|
||||
source, action, item.serialize(indent=False), timedelta(timeout)
|
||||
)
|
||||
if not output:
|
||||
raise SourceUpdateException("no item")
|
||||
|
||||
try:
|
||||
item = Item(source, json.loads(output[0]))
|
||||
source.save_item(item)
|
||||
return item
|
||||
except json.JSONDecodeError:
|
||||
raise SourceUpdateException("invalid json")
|
||||
|
||||
|
||||
def update_items(source: LocalSource, fetched_items: List[Item]):
|
||||
"""
|
||||
Update the source with a batch of new items, doing creations, updates, and
|
||||
deletions as necessary.
|
||||
"""
|
||||
# Get a list of item ids that already existed for this source.
|
||||
prior_ids = source.get_item_ids()
|
||||
print(f"Found {len(prior_ids)} prior items", file=sys.stderr)
|
||||
|
||||
# Determine which items are new and which are updates.
|
||||
new_items: List[Item] = []
|
||||
upd_items: List[Item] = []
|
||||
for item in fetched_items:
|
||||
if source.item_exists(item["id"]):
|
||||
upd_items.append(item)
|
||||
else:
|
||||
new_items.append(item)
|
||||
|
||||
# Write all the new items to the source directory.
|
||||
for item in new_items:
|
||||
# TODO: support on-create trigger
|
||||
source.save_item(item)
|
||||
|
||||
# Update the other items using the fetched items' values.
|
||||
for upd_item in upd_items:
|
||||
old_item = source.get_item(upd_item["id"])
|
||||
old_item.update_from(upd_item)
|
||||
source.save_item(old_item)
|
||||
|
||||
# Items are removed when they are old (not in the latest fetch) and
|
||||
# inactive. Some item fields change this basic behavior.
|
||||
del_count = 0
|
||||
# now = int(current_time())
|
||||
upd_ids = [item["id"] for item in upd_items]
|
||||
old_item_ids = [item_id for item_id in prior_ids if item_id not in upd_ids]
|
||||
|
||||
for item_id in old_item_ids:
|
||||
if source.get_item(item_id).can_remove:
|
||||
source.delete_item(item_id)
|
||||
del_count += 1
|
||||
|
||||
print(len(new_items), "new,", del_count, "deleted", file=sys.stderr)
|
@ -1,91 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Intake - {{ subtitle }}</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAAOwgAADsIBFShKgAAAABh0RVh0U29mdHdhcmUAcGFpbnQubmV0IDQuMS41ZEdYUgAAAGFJREFUOE+lkFEKwDAIxXrzXXB3ckMm9EnAV/YRCxFCcUXEL3Jc77NDjpDA/VGL3RFWYEICfeGC8oQc9IPuCAnQDcoRVmBCAn3hgvKEHPSD7ggJ0A3KEVZgQgJ94YLSJ9YDUzNGDXGZ/JEAAAAASUVORK5CYII=">
|
||||
<style>
|
||||
main {
|
||||
max-width: 700px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
article {
|
||||
border: 1px solid black; border-radius: 6px;
|
||||
padding: 5px;
|
||||
margin-bottom: 20px;
|
||||
word-break: break-word;
|
||||
}
|
||||
.item-title {
|
||||
font-size: 1.4em;
|
||||
}
|
||||
.item-button {
|
||||
font-size: 1em;
|
||||
float:right;
|
||||
margin-left: 2px;
|
||||
}
|
||||
.item-link {
|
||||
text-decoration: none;
|
||||
float:right;
|
||||
font-size: 1em;
|
||||
padding: 2px 7px;
|
||||
border: 1px solid;
|
||||
border-radius: 2px;
|
||||
}
|
||||
.item-info {
|
||||
color: rgba(0, 0, 0, 0.7);
|
||||
}
|
||||
article img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
article textarea {
|
||||
width: 100%;
|
||||
resize: vertical;
|
||||
}
|
||||
button, summary {
|
||||
cursor: pointer;
|
||||
}
|
||||
summary {
|
||||
display: block;
|
||||
}
|
||||
summary:focus {
|
||||
outline: 1px dotted gray;
|
||||
}
|
||||
.strikethru span, .strikethru p {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
.fade span, .fade p {
|
||||
color: rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
table.feed-control td {
|
||||
font-family: monospace; padding: 5px 10px;
|
||||
}
|
||||
span.error-message {
|
||||
color: red;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
|
||||
<article>
|
||||
<form method="post">
|
||||
<label for="config" class="item-title">Config Editor</label>
|
||||
<textarea autofocus id="config" name="config" rows=20>{{config}}</textarea>
|
||||
<p><input type="submit" value="Submit">
|
||||
{% if error_message %}
|
||||
<span class="error-message">{{ error_message }}</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
</form>
|
||||
{% if helptext -%}
|
||||
<pre>{{ helptext }}
|
||||
</pre>
|
||||
{%- endif %}
|
||||
</article>
|
||||
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
@ -1,211 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Intake{% if items %} ({{ items|length }}){% endif %}</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAAOwgAADsIBFShKgAAAABh0RVh0U29mdHdhcmUAcGFpbnQubmV0IDQuMS41ZEdYUgAAAGFJREFUOE+lkFEKwDAIxXrzXXB3ckMm9EnAV/YRCxFCcUXEL3Jc77NDjpDA/VGL3RFWYEICfeGC8oQc9IPuCAnQDcoRVmBCAn3hgvKEHPSD7ggJ0A3KEVZgQgJ94YLSJ9YDUzNGDXGZ/JEAAAAASUVORK5CYII=">
|
||||
<style>
|
||||
main {
|
||||
max-width: 700px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
article {
|
||||
border: 1px solid black; border-radius: 6px;
|
||||
padding: 5px;
|
||||
margin-bottom: 20px;
|
||||
word-break: break-word;
|
||||
}
|
||||
.item-title {
|
||||
font-size: 1.4em;
|
||||
}
|
||||
.item-button {
|
||||
font-size: 1em;
|
||||
float:right;
|
||||
margin-left: 2px;
|
||||
}
|
||||
.item-link {
|
||||
text-decoration: none;
|
||||
float:right;
|
||||
font-size: 1em;
|
||||
padding: 2px 7px;
|
||||
border: 1px solid;
|
||||
border-radius: 2px;
|
||||
}
|
||||
.item-info {
|
||||
color: rgba(0, 0, 0, 0.7);
|
||||
}
|
||||
article img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
button, summary {
|
||||
cursor: pointer;
|
||||
}
|
||||
summary {
|
||||
display: block;
|
||||
}
|
||||
summary:focus {
|
||||
outline: 1px dotted gray;
|
||||
}
|
||||
.strikethru span, .strikethru p {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
.fade span, .fade p {
|
||||
color: rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
table.feed-control td {
|
||||
font-family: monospace; padding: 5px 10px;
|
||||
}
|
||||
article.center {
|
||||
text-align: center;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
var deactivate = function (source, itemid) {
|
||||
fetch(`/item/${source}/${itemid}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(function (data) {
|
||||
if (!data.active) {
|
||||
document.getElementById(source + "-" + itemid)
|
||||
.classList.add("strikethru", "fade");
|
||||
}
|
||||
});
|
||||
};
|
||||
var punt = function (source, itemid) {
|
||||
fetch(`/item/${source}/${itemid}`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
},
|
||||
body: JSON.stringify({tts: "+1"}),
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(function (data) {
|
||||
if (data.tts) {
|
||||
document.getElementById(source + "-" + itemid)
|
||||
.classList.add("fade");
|
||||
}
|
||||
});
|
||||
};
|
||||
var mdeactivate = function (items) {
|
||||
console.log(items);
|
||||
if (confirm(`Deactivate ${items.length} items?`)) {
|
||||
fetch('/mass-deactivate/', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
},
|
||||
body: JSON.stringify({items: items}),
|
||||
})
|
||||
.then(function () {
|
||||
location.reload();
|
||||
});
|
||||
}
|
||||
};
|
||||
var doAction = function (source, itemid, action) {
|
||||
document.getElementById(`${source}-${itemid}-action-${action}`).disabled = true;
|
||||
fetch(`/action/${source}/${itemid}/${action}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json; charset=UTF-8',
|
||||
},
|
||||
})
|
||||
.then(function (data) {
|
||||
location.reload()
|
||||
});
|
||||
};
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
<article class="center">
|
||||
<span class="item-title">
|
||||
<a href="{{url_for('root')}}">Home</a>
|
||||
[<a href="{{ set_query(hidden='false', page=None, count=None) }}">Active</a> | <a href="{{ set_query(hidden='true', page=None, count=None) }}">All</a>]
|
||||
{% if item_count > items|length -%}
|
||||
[<a {% if page_num is greaterthan(0) -%} href="{{ set_query(page=page_num - 1) }}" {%- endif %}>Prev</a>
|
||||
|
|
||||
<a {% if ((page_num + 1) * page_count) is lessthan(item_count) -%} href="{{ set_query(page=page_num + 1) }}" {%- endif %}>Next</a>]
|
||||
{%- endif %}
|
||||
</span>
|
||||
</article>
|
||||
{% if items %}
|
||||
{% for item in items %}
|
||||
<article class="
|
||||
{%- if not item.active %} strikethru{% endif %}
|
||||
{%- if item.is_hidden %} fade{% endif -%}
|
||||
" id="{{item.source}}-{{item.id}}">
|
||||
{% if item.id %}
|
||||
<button class="item-button" onclick="javascript:deactivate('{{item.source}}', '{{item.id}}')" title="Deactivate">✕</button>
|
||||
{% endif %}
|
||||
{% if item.id %}
|
||||
<button class="item-button" onclick="javascript:punt('{{item.source}}', '{{item.id}}')" title="Punt to tomorrow">↷</button>
|
||||
{% endif %}
|
||||
{% if item.link %}
|
||||
<a class="item-link" href="{{item.link}}" target="_blank">⇗</a>
|
||||
{% endif %}
|
||||
|
||||
{# The item title is a clickable <summary> if there is body content #}
|
||||
{% if item.body or item.action %}
|
||||
<details>
|
||||
<summary><span class="item-title">{{item.display_title}}</span></summary>
|
||||
{% if item.body %}
|
||||
<p>{{item.body|safe}}</p>
|
||||
{% endif %}
|
||||
{% for action in item.action %}
|
||||
<p><button id="{{item.source}}-{{item.id}}-action-{{action}}" onclick="javascript:doAction('{{item.source}}', '{{item.id}}', '{{action}}')">{{action}}</button></p>
|
||||
{% endfor %}
|
||||
</details>
|
||||
{% else %}
|
||||
<span class="item-title">{{item.display_title}}</span><br>
|
||||
{% endif %}
|
||||
|
||||
{# author/time footer line #}
|
||||
{% if item.author or item.time %}
|
||||
<span class="item-info">
|
||||
{% if item.author %}{{item.author}}{% endif %}
|
||||
{% if item.time %}{{item.time|datetimeformat}}{% endif %}
|
||||
</span><br>
|
||||
{% endif %}
|
||||
|
||||
{# source/id/created footer line #}
|
||||
{% if item.source or item.id or item.created %}
|
||||
<span class="item-info" title="{{ 'Tags: {}'.format(', '.join(item.tags)) }}">
|
||||
{% if item.source %}{{item.source}}{% endif %}
|
||||
{% if item.id %}{{item.id}}{% endif %}
|
||||
{% if item.created %}{{item.created|datetimeformat}}{% endif %}
|
||||
{% if item.ttl %}L{% endif %}{% if item.ttd %}D{% endif %}{% if item.tts %}S{% endif %}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
</article>
|
||||
{% endfor %}
|
||||
|
||||
{% if item_count > items|length %}
|
||||
<article class="center">
|
||||
<span class="item-title">
|
||||
<a {% if page_num is greaterthan(0) -%} href="{{ set_query(page=page_num - 1) }}" {%- endif %}>Prev</a>
|
||||
|
|
||||
<a {% if ((page_num + 1) * page_count) is lessthan(item_count) -%} href="{{ set_query(page=page_num + 1) }}" {%- endif %}>Next</a>
|
||||
</span>
|
||||
</article>
|
||||
{% endif %}
|
||||
|
||||
<article class="center">
|
||||
<button onclick="javascript:mdeactivate({{ mdeac|safe }})">Deactivate All</button>
|
||||
</article>
|
||||
|
||||
{# if items #}
|
||||
{% else %}
|
||||
<article class="center">
|
||||
<span class="item-title">Feed is empty</span>
|
||||
</article>
|
||||
{% endif %}
|
||||
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
@ -1,113 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Intake</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAAOwgAADsIBFShKgAAAABh0RVh0U29mdHdhcmUAcGFpbnQubmV0IDQuMS41ZEdYUgAAAGFJREFUOE+lkFEKwDAIxXrzXXB3ckMm9EnAV/YRCxFCcUXEL3Jc77NDjpDA/VGL3RFWYEICfeGC8oQc9IPuCAnQDcoRVmBCAn3hgvKEHPSD7ggJ0A3KEVZgQgJ94YLSJ9YDUzNGDXGZ/JEAAAAASUVORK5CYII=">
|
||||
<style>
|
||||
main {
|
||||
max-width: 700px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
article {
|
||||
border: 1px solid black; border-radius: 6px;
|
||||
padding: 5px;
|
||||
margin-bottom: 20px;
|
||||
word-break: break-word;
|
||||
}
|
||||
.item-title {
|
||||
font-size: 1.4em;
|
||||
}
|
||||
article img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
button, summary {
|
||||
cursor: pointer;
|
||||
}
|
||||
summary {
|
||||
display: block;
|
||||
}
|
||||
summary:focus {
|
||||
outline: 1px dotted gray;
|
||||
}
|
||||
.wide {
|
||||
width: 100%;
|
||||
resize: vertical;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
|
||||
<article>
|
||||
<details open>
|
||||
<summary><span class="item-title">Channels</span></summary>
|
||||
{% if not channels %}
|
||||
<p>No channels found.</p>
|
||||
{% else %}
|
||||
{% for channel in channels %}
|
||||
<p><a href="{{ url_for('channel_feed', name=channel) }}">{{ channel }}</a></p>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
<p><a href="{{ url_for('channels_edit') }}">Edit channels</a></p>
|
||||
</details>
|
||||
</article>
|
||||
|
||||
<article>
|
||||
<details>
|
||||
<summary><span class="item-title">Sources</span></summary>
|
||||
{% if not sources %}
|
||||
<p>No sources found.</p>
|
||||
{% else %}
|
||||
{% for source in sources %}
|
||||
<p>
|
||||
{%- for channel, srcs in channels|items -%}
|
||||
{%- if source.source_name in srcs -%}
|
||||
^
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
<a href="{{ url_for('source_feed', name=source.source_name) }}">{{ source.source_name|safe }}</a>
|
||||
(<a href="{{ url_for('source_edit', name=source.source_name) }}">edit</a>)
|
||||
</p>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
</details>
|
||||
</article>
|
||||
|
||||
<article>
|
||||
<details open>
|
||||
<summary><span class="item-title">Add item</span></summary>
|
||||
<form action="add" method="post">
|
||||
<p>
|
||||
<input type="text" name="title" class="wide" placeholder="Title">
|
||||
</p>
|
||||
<p>
|
||||
<input type="url" name="link" class="wide" placeholder="Link">
|
||||
</p>
|
||||
<p>
|
||||
<textarea name="body" class="wide" placeholder="Body"></textarea>
|
||||
</p>
|
||||
<p>
|
||||
<input type="text" name="tags" class="wide" placeholder="Tags, comma-separated">
|
||||
</p>
|
||||
<p>
|
||||
<label for="tts">TTS:</label>
|
||||
<input type="datetime-local" name="tts">
|
||||
</p>
|
||||
<p>
|
||||
<label for="ttl">TTL:</label>
|
||||
<input type="datetime-local" name="ttl">
|
||||
</p>
|
||||
<p>
|
||||
<label for="ttd">TTD:</label>
|
||||
<input type="datetime-local" name="ttd">
|
||||
</p>
|
||||
<p>
|
||||
<input type="submit" value="Add">
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
@ -1,21 +0,0 @@
|
||||
"""
|
||||
Common exception types.
|
||||
"""
|
||||
|
||||
|
||||
class IntakeException(Exception):
|
||||
"""
|
||||
Base class for intake application exceptions.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidConfigException(IntakeException):
|
||||
"""
|
||||
Could not interact with a source because the source's config was not valid.
|
||||
"""
|
||||
|
||||
|
||||
class SourceUpdateException(Exception):
|
||||
"""
|
||||
The source update process did not return valid data and signal success.
|
||||
"""
|
7
main.go
Normal file
7
main.go
Normal file
@ -0,0 +1,7 @@
|
||||
package main
|
||||
|
||||
import "github.com/Jaculabilis/intake/cmd"
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
}
|
153
module.nix
153
module.nix
@ -1,153 +0,0 @@
|
||||
flake: { config, lib, pkgs, ... }:
|
||||
|
||||
let
|
||||
inherit (lib) filterAttrs foldl imap1 mapAttrsToList mkEnableOption mkIf mkMerge mkOption mkPackageOption types;
|
||||
intakeCfg = config.services.intake;
|
||||
in {
|
||||
options = {
|
||||
services.intake = {
|
||||
listen.addr = mkOption {
|
||||
type = types.str;
|
||||
default = "0.0.0.0";
|
||||
description = "The listen address for the entry point to intake services. This endpoint will redirect to a "
|
||||
"local port based on the request's HTTP Basic Auth credentials.";
|
||||
};
|
||||
|
||||
listen.port = mkOption {
|
||||
type = types.port;
|
||||
default = 80;
|
||||
description = "The listen port for the entry point to intake services. This endpoint will redirect to a local "
|
||||
"port based on the request's HTTP Basic Auth credentials.";
|
||||
};
|
||||
|
||||
package = mkPackageOption pkgs "intake" {};
|
||||
|
||||
internalPortStart = mkOption {
|
||||
type = types.port;
|
||||
default = 24130;
|
||||
description = "The first port to use for internal service endpoints. A number of ports will be continguously "
|
||||
"allocated equal to the number of users with enabled intake services.";
|
||||
};
|
||||
|
||||
extraPackages = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = [];
|
||||
description = "Extra packages available to all enabled users and their intake services.";
|
||||
};
|
||||
|
||||
users = mkOption {
|
||||
description = "User intake service definitions.";
|
||||
default = {};
|
||||
type = types.attrsOf (types.submodule {
|
||||
options = {
|
||||
enable = mkEnableOption "intake, a personal feed aggregator.";
|
||||
|
||||
extraPackages = mkOption {
|
||||
type = types.listOf types.package;
|
||||
default = [];
|
||||
description = "Extra packages available to this user and their intake service.";
|
||||
};
|
||||
};
|
||||
});
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
config =
|
||||
let
|
||||
# Define the intake package and a python environment to run it from
|
||||
intake = intakeCfg.package;
|
||||
pythonEnv = pkgs.python3.withPackages (pypkgs: [ intake ]);
|
||||
|
||||
# Assign each user an internal port for their personal intake instance
|
||||
enabledUsers = filterAttrs (userName: userCfg: userCfg.enable) intakeCfg.users;
|
||||
enabledUserNames = mapAttrsToList (userName: userCfg: userName) enabledUsers;
|
||||
userPortList = imap1 (i: userName: { ${userName} = i + intakeCfg.internalPortStart; }) enabledUserNames;
|
||||
userPort = foldl (acc: val: acc // val) {} userPortList;
|
||||
|
||||
# To avoid polluting PATH with httpd programs, define an htpasswd wrapper
|
||||
htpasswdWrapper = pkgs.writeShellScriptBin "htpasswd" ''
|
||||
${pkgs.apacheHttpd}/bin/htpasswd $@
|
||||
'';
|
||||
|
||||
# File locations
|
||||
intakeDir = "/etc/intake";
|
||||
intakePwd = "${intakeDir}/htpasswd";
|
||||
in {
|
||||
# Apply the overlay so intake is included in pkgs.
|
||||
nixpkgs.overlays = [ flake.overlays.default ];
|
||||
|
||||
# Define a user group for access to the htpasswd file. nginx needs to be able to read it.
|
||||
users.groups.intake.members = mkIf (enabledUsers != {}) (enabledUserNames ++ [ "nginx" ]);
|
||||
|
||||
# Define an activation script that ensures that the htpasswd file exists.
|
||||
system.activationScripts.etc-intake = ''
|
||||
if [ ! -e ${intakeDir} ]; then
|
||||
${pkgs.coreutils}/bin/mkdir -p ${intakeDir};
|
||||
fi
|
||||
${pkgs.coreutils}/bin/chown root:root ${intakeDir}
|
||||
${pkgs.coreutils}/bin/chmod 755 ${intakeDir}
|
||||
if [ ! -e ${intakePwd} ]; then
|
||||
${pkgs.coreutils}/bin/touch ${intakePwd}
|
||||
fi
|
||||
${pkgs.coreutils}/bin/chown root:intake ${intakePwd}
|
||||
${pkgs.coreutils}/bin/chmod 660 ${intakePwd}
|
||||
'';
|
||||
|
||||
# Give every intake user the htpasswd wrapper, the shared packages, and the user-specific packages.
|
||||
users.users =
|
||||
let
|
||||
addPackagesToUser = userName: {
|
||||
${userName}.packages =
|
||||
[ htpasswdWrapper intake ]
|
||||
++ intakeCfg.extraPackages
|
||||
++ intakeCfg.users.${userName}.extraPackages;
|
||||
};
|
||||
in mkMerge (map addPackagesToUser enabledUserNames);
|
||||
|
||||
# Enable cron
|
||||
services.cron.enable = true;
|
||||
|
||||
# Define a user service for each configured user
|
||||
systemd.services =
|
||||
let
|
||||
runScript = userName: pkgs.writeShellScript "intake-run.sh" ''
|
||||
# Add the setuid wrapper directory so `crontab` is accessible
|
||||
export PATH="${config.security.wrapperDir}:$PATH"
|
||||
${pythonEnv}/bin/intake run -d /home/${userName}/.local/share/intake --port ${toString userPort.${userName}}
|
||||
'';
|
||||
# systemd service definition for a single user, given `services.intake.users.userName` = `userCfg`
|
||||
userServiceConfig = userName: userCfg: {
|
||||
"intake@${userName}" = {
|
||||
description = "Intake service for user ${userName}";
|
||||
script = "${runScript userName}";
|
||||
path = intakeCfg.extraPackages ++ userCfg.extraPackages;
|
||||
serviceConfig = {
|
||||
User = userName;
|
||||
Type = "simple";
|
||||
};
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ];
|
||||
enable = userCfg.enable;
|
||||
};
|
||||
};
|
||||
in mkMerge (mapAttrsToList userServiceConfig intakeCfg.users);
|
||||
|
||||
# Define an nginx reverse proxy to request auth
|
||||
services.nginx = mkIf (enabledUsers != {}) {
|
||||
enable = true;
|
||||
virtualHosts."intake" = mkIf (enabledUsers != {}) {
|
||||
listen = [ intakeCfg.listen ];
|
||||
locations."/" = {
|
||||
proxyPass = "http://127.0.0.1:$target_port";
|
||||
basicAuthFile = intakePwd;
|
||||
};
|
||||
extraConfig = foldl (acc: val: acc + val) "" (mapAttrsToList (userName: port: ''
|
||||
if ($remote_user = "${userName}") {
|
||||
set $target_port ${toString port};
|
||||
}
|
||||
'') userPort);
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
[project]
|
||||
name = "intake"
|
||||
version = "1.1.0"
|
||||
|
||||
[project.scripts]
|
||||
intake = "intake.cli:main"
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["intake", "intake.templates"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
"intake.templates" = ["*.jinja2"]
|
10
shell.nix
10
shell.nix
@ -1,10 +0,0 @@
|
||||
(import
|
||||
(
|
||||
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
|
||||
fetchTarball {
|
||||
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
||||
sha256 = lock.nodes.flake-compat.locked.narHash;
|
||||
}
|
||||
)
|
||||
{ src = ./.; }
|
||||
).shellNix
|
@ -1,10 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "",
|
||||
"args": []
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
}
|
||||
}
|
25
test/test_items.sh
Executable file
25
test/test_items.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -eu
|
||||
|
||||
go build -o tmp/intake
|
||||
rm tmp/intake.db* || true
|
||||
export INTAKE_DATA_DIR="tmp"
|
||||
tmp/intake migrate
|
||||
|
||||
tmp/intake source add -s feedtest
|
||||
tmp/intake item add -s feedtest --id "this-item-has-no-title"
|
||||
tmp/intake item add -s feedtest --title "This item has only a title"
|
||||
tmp/intake item add -s feedtest --title "Title and body" --body "This is the item body"
|
||||
tmp/intake item add -s feedtest --title "Title and link" --link "#"
|
||||
tmp/intake item add -s feedtest --title "Title, link, body" --link "#" --body "This is the body"
|
||||
tmp/intake item add -s feedtest --title "<b>HTML title</b>" --link "#" --body "<i>HTML body</i>"
|
||||
tmp/intake item add -s feedtest --title "Title and author" --author "Authorname"
|
||||
tmp/intake item add -s feedtest --title "Title, author, time" --author "Authorname" --time 1700000000
|
||||
tmp/intake item add -s feedtest --title "Title, time" --time 1737780324
|
||||
tmp/intake item add -s feedtest --title "Title, author, body" --author "Authorname" --body "Hello body!"
|
||||
tmp/intake item add -s feedtest --title "Title, author, time, body" --author "Authorname" --time 1700000000 --body "Hello body!"
|
||||
tmp/intake item add -s feedtest --title "Title, time, body" --time 1737780324 --body "Hello, body!"
|
||||
|
||||
tmp/intake source add -s spook
|
||||
tmp/intake action add -s spook -a spookier -- jq -c '.title = .title + "o"'
|
||||
tmp/intake item add -s spook --id boo --title "Boo" --action '{"spookier": true}'
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
"demo": [
|
||||
"demo_basic_callback",
|
||||
"demo_logging",
|
||||
"demo_raw_sh"
|
||||
]
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Callable
|
||||
|
||||
import pytest
|
||||
|
||||
from intake.source import LocalSource
|
||||
|
||||
|
||||
def clean_source(source_path: Path):
|
||||
for item in source_path.iterdir():
|
||||
if item.name.endswith(".item"):
|
||||
item.unlink()
|
||||
(source_path / "state").unlink(missing_ok=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def using_source() -> Callable:
|
||||
test_data = Path(__file__).parent
|
||||
sources: List[Path] = []
|
||||
|
||||
def _using_source(name: str):
|
||||
source_path = test_data / name
|
||||
clean_source(source_path)
|
||||
sources.append(source_path)
|
||||
return LocalSource(test_data, name)
|
||||
yield _using_source
|
||||
|
||||
for source_path in sources:
|
||||
clean_source(source_path)
|
@ -1,7 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "true"
|
||||
}
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse, json, sys
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("action")
|
||||
args = parser.parse_args()
|
||||
|
||||
print("args:", args, file=sys.stderr, flush=True)
|
||||
|
||||
if args.action == "fetch":
|
||||
print(
|
||||
json.dumps(
|
||||
{
|
||||
"id": "updateme",
|
||||
"title": "The count is at 1",
|
||||
"action": {
|
||||
"increment": 1,
|
||||
"decrement": "",
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
if args.action == "increment":
|
||||
item = sys.stdin.readline()
|
||||
item = json.loads(item)
|
||||
item["action"]["increment"] += 1
|
||||
item["body"] = f"<p>{item['action']['increment']}</p>"
|
||||
item["title"] = f"The count is at {item['action']['increment']}"
|
||||
print(json.dumps(item))
|
||||
|
||||
if args.action == "decrement":
|
||||
item = sys.stdin.readline()
|
||||
item = json.loads(item)
|
||||
item["action"]["increment"] -= 1
|
||||
item["body"] = f"<p>{item['action']['increment']}</p>"
|
||||
item["title"] = f"The count is at {item['action']['increment']}"
|
||||
print(json.dumps(item))
|
@ -1,22 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "./increment.py",
|
||||
"args": [
|
||||
"fetch"
|
||||
]
|
||||
},
|
||||
"increment": {
|
||||
"exe": "./increment.py",
|
||||
"args": [
|
||||
"increment"
|
||||
]
|
||||
},
|
||||
"decrement": {
|
||||
"exe": "./increment.py",
|
||||
"args": [
|
||||
"decrement"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "python3",
|
||||
"args": [
|
||||
"update.py"
|
||||
]
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"HELLO": "WORLD"
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
greeting = os.environ.get("HELLO", "MISSING")
|
||||
item = json.dumps({"id": "helloworld", "title": "Hello = " + greeting})
|
||||
sys.stdout.write(item[:10])
|
||||
sys.stdout.flush()
|
||||
|
||||
for i in range(5):
|
||||
sys.stderr.write(f"{i+1}...\n")
|
||||
sys.stderr.flush()
|
||||
time.sleep(1)
|
||||
|
||||
sys.stdout.write(item[10:])
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "sh",
|
||||
"args": [
|
||||
"-c",
|
||||
"echo {\\\"id\\\": \\\"$(date +%Y-%m-%d-%H-%M)\\\"}"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
{
|
||||
"action": {
|
||||
"fetch": {
|
||||
"exe": "./update.py",
|
||||
"args": ["fetch"]
|
||||
}
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
A test source that "generates" items by returning them from the state file.
|
||||
This source exists for unit testing so that unit tests can easily manipulate
|
||||
what items are returned by writing them to the state file directly.
|
||||
"""
|
||||
|
||||
import argparse, json, os, sys
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("action")
|
||||
args = parser.parse_args()
|
||||
print("args:", args, file=sys.stderr, flush=True)
|
||||
|
||||
if args.action == "fetch":
|
||||
state_path = os.environ.get("STATE_PATH")
|
||||
with open(state_path) as f:
|
||||
state = json.load(f)
|
||||
for item in state["inbox"]:
|
||||
print(json.dumps(item))
|
@ -1,63 +0,0 @@
|
||||
import json
|
||||
|
||||
from intake.source import fetch_items, update_items, LocalSource
|
||||
|
||||
|
||||
def test_default_source(using_source):
|
||||
source: LocalSource = using_source("default")
|
||||
fetch = fetch_items(source)
|
||||
assert len(fetch) == 0
|
||||
|
||||
def test_basic_lifecycle(using_source):
|
||||
source: LocalSource = using_source("test_inbox")
|
||||
state = {"inbox": [{"id": "first"}]}
|
||||
source.get_state_path().write_text(json.dumps(state))
|
||||
|
||||
# The inboxed item is returned from fetch
|
||||
fetch = fetch_items(source)
|
||||
assert len(fetch) == 1
|
||||
assert fetch[0]["id"] == "first"
|
||||
|
||||
# Update creates the item in the source
|
||||
update_items(source, fetch)
|
||||
assert source.get_item_path("first").exists()
|
||||
assert source.get_item("first").get("active") == True
|
||||
items = list(source.get_all_items())
|
||||
assert len(items) == 1
|
||||
assert items[0]["id"] == "first"
|
||||
|
||||
# A second fetch does not change anything
|
||||
fetch = fetch_items(source)
|
||||
update_items(source, fetch)
|
||||
assert source.get_item_path("first").exists()
|
||||
assert source.get_item("first").get("active") == True
|
||||
items = list(source.get_all_items())
|
||||
assert len(items) == 1
|
||||
assert items[0]["id"] == "first"
|
||||
|
||||
# The item remains after it is no longer in the feed
|
||||
state = {"inbox": [{"id": "second"}]}
|
||||
source.get_state_path().write_text(json.dumps(state))
|
||||
|
||||
fetch = fetch_items(source)
|
||||
update_items(source, fetch)
|
||||
assert source.get_item_path("first").exists()
|
||||
assert source.get_item("first").get("active") == True
|
||||
assert source.get_item_path("second").exists()
|
||||
assert source.get_item("second").get("active") == True
|
||||
items = list(source.get_all_items())
|
||||
assert len(items) == 2
|
||||
assert sorted(map(lambda i: i["id"], items)) == ["first", "second"]
|
||||
|
||||
# The item is removed on the next update when it is inactive
|
||||
first = source.get_item("first")
|
||||
first["active"] = False
|
||||
source.save_item(first)
|
||||
|
||||
fetch = fetch_items(source)
|
||||
update_items(source, fetch)
|
||||
assert not source.get_item_path("first").exists()
|
||||
assert source.get_item_path("second").exists()
|
||||
items = list(source.get_all_items())
|
||||
assert len(items) == 1
|
||||
assert items[0]["id"] == "second"
|
32
web/html/feed.html
Normal file
32
web/html/feed.html
Normal file
@ -0,0 +1,32 @@
|
||||
{{ define "title" }}{{ if .Items }}({{ len .Items }}) {{ end }}Intake{{ end }}
|
||||
|
||||
{{ define "content" -}}
|
||||
<article class="center">
|
||||
<span class="item-title">
|
||||
<a href="/">Home</a>
|
||||
[<a href="#">Active</a> | <a href="#">All</a>]
|
||||
</span>
|
||||
</article>
|
||||
|
||||
{{ if .Items }}
|
||||
{{ range .Items }}
|
||||
{{ template "item" . }}
|
||||
{{ end }}
|
||||
|
||||
<article class="center">
|
||||
<button
|
||||
hx-post="/mass-deactivate"
|
||||
hx-vals='{{ massDeacVars .Items }}'
|
||||
hx-confirm="Deactivate {{ len .Items }} items?"
|
||||
>Deactivate All</button>
|
||||
</article>
|
||||
|
||||
{{ else }}
|
||||
<article class="center">
|
||||
<span class="item-title">Feed is empty</span>
|
||||
</article>
|
||||
{{ end }}
|
||||
{{/* end if .Items */}}
|
||||
|
||||
{{ end }}
|
||||
{{/* end define "content" */}}
|
20
web/html/home.html
Normal file
20
web/html/home.html
Normal file
@ -0,0 +1,20 @@
|
||||
{{ define "title" }}Intake{{ end }}
|
||||
|
||||
{{ define "content" -}}
|
||||
<article>
|
||||
<details>
|
||||
<summary><span class="item-title">Sources</span></summary>
|
||||
{{ if .Sources }}
|
||||
<table class="intake-sources">
|
||||
{{ range .Sources }}
|
||||
<tr>
|
||||
<td><a href="/source/{{ .Name }}">{{ .Name }}</a></td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
</table>
|
||||
{{ else }}
|
||||
<p>No sources found.</p>
|
||||
{{ end }}
|
||||
</details>
|
||||
</article>
|
||||
{{- end }}
|
91
web/html/html.go
Normal file
91
web/html/html.go
Normal file
@ -0,0 +1,91 @@
|
||||
package html
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"html/template"
|
||||
"io"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
)
|
||||
|
||||
func rawHtml(str string) template.HTML {
|
||||
return template.HTML(str)
|
||||
}
|
||||
|
||||
func tsToDate(t int) string {
|
||||
tm := time.Unix(int64(t), 0).UTC()
|
||||
return tm.Format(time.DateTime)
|
||||
}
|
||||
|
||||
func massDeactivateVals(items []core.Item) string {
|
||||
var shorts []string
|
||||
for _, item := range items {
|
||||
shorts = append(shorts, core.FormatAsShort(item))
|
||||
}
|
||||
massDeac := struct {
|
||||
Items []string `json:"items"`
|
||||
}{shorts}
|
||||
vals, err := json.Marshal(massDeac)
|
||||
if err != nil {
|
||||
log.Printf("error serializing mass deactivate list: %v", err)
|
||||
}
|
||||
return string(vals)
|
||||
}
|
||||
|
||||
var funcs = template.FuncMap{
|
||||
"raw": rawHtml,
|
||||
"tsToDate": tsToDate,
|
||||
"massDeacVars": massDeactivateVals,
|
||||
}
|
||||
|
||||
//go:embed intake.css
|
||||
var Stylesheet []byte
|
||||
|
||||
//go:embed htmx.org@2.0.4.js
|
||||
var Htmx []byte
|
||||
|
||||
//go:embed *.html
|
||||
var templates embed.FS
|
||||
|
||||
func load(files ...string) *template.Template {
|
||||
files = append([]string{"layout.html"}, files...)
|
||||
return template.Must(template.New("layout.html").Funcs(funcs).ParseFS(templates, files...))
|
||||
}
|
||||
|
||||
var home = load("home.html")
|
||||
|
||||
type SourceData struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
type HomeData struct {
|
||||
Sources []SourceData
|
||||
}
|
||||
|
||||
func Home(writer io.Writer, data HomeData) error {
|
||||
return home.Execute(writer, data)
|
||||
}
|
||||
|
||||
var feed = load("feed.html", "item.html")
|
||||
|
||||
type FeedData struct {
|
||||
Items []core.Item
|
||||
}
|
||||
|
||||
func Feed(writer io.Writer, data FeedData) error {
|
||||
return feed.Execute(writer, data)
|
||||
}
|
||||
|
||||
var item = load("itemPage.html", "item.html")
|
||||
|
||||
type ItemData struct {
|
||||
Item core.Item
|
||||
Open bool
|
||||
}
|
||||
|
||||
func Item(writer io.Writer, data ItemData) error {
|
||||
return item.Execute(writer, data)
|
||||
}
|
1
web/html/htmx.org@2.0.4.js
Normal file
1
web/html/htmx.org@2.0.4.js
Normal file
File diff suppressed because one or more lines are too long
85
web/html/intake.css
Normal file
85
web/html/intake.css
Normal file
@ -0,0 +1,85 @@
|
||||
|
||||
main {
|
||||
max-width: 700px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
article {
|
||||
border: 1px solid black; border-radius: 6px;
|
||||
padding: 5px;
|
||||
margin-bottom: 20px;
|
||||
word-break: break-word;
|
||||
display: flow-root;
|
||||
}
|
||||
.item-title {
|
||||
font-size: 1.4em;
|
||||
}
|
||||
.item-button {
|
||||
font-size: 1em;
|
||||
float:right;
|
||||
margin-left: 2px;
|
||||
}
|
||||
.item-link {
|
||||
text-decoration: none;
|
||||
float:right;
|
||||
font-size: 1em;
|
||||
padding: 2px 7px;
|
||||
border: 1px solid;
|
||||
border-radius: 2px;
|
||||
}
|
||||
.item-info {
|
||||
opacity: 0.7;
|
||||
}
|
||||
details[open] > summary > .item-button, details[open] > summary > .item-link {
|
||||
display: none;
|
||||
}
|
||||
details ~ .item-button, details ~ .item-link {
|
||||
display: none;
|
||||
}
|
||||
details[open] ~ .item-button, details[open] ~ .item-link {
|
||||
display: inline;
|
||||
}
|
||||
article img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
button, summary {
|
||||
cursor: pointer;
|
||||
}
|
||||
summary {
|
||||
display: block;
|
||||
}
|
||||
summary:focus {
|
||||
outline: 1px dotted gray;
|
||||
}
|
||||
.strikethru span, .strikethru p {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
.wide {
|
||||
width: 100%;
|
||||
resize: vertical;
|
||||
}
|
||||
.fade > * {
|
||||
opacity: 0.2;
|
||||
}
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
table.feed-control td {
|
||||
font-family: monospace; padding: 5px 10px;
|
||||
}
|
||||
.intake-sources td {
|
||||
padding-block: 0.4em;
|
||||
}
|
||||
.intake-sources form {
|
||||
margin: 0
|
||||
}
|
||||
article.center {
|
||||
text-align: center;
|
||||
}
|
||||
article textarea {
|
||||
width: 100%;
|
||||
resize: vertical;
|
||||
}
|
||||
span.error-message {
|
||||
color: red;
|
||||
}
|
70
web/html/item.html
Normal file
70
web/html/item.html
Normal file
@ -0,0 +1,70 @@
|
||||
{{ define "item-buttons" -}}
|
||||
<button
|
||||
class="item-button"
|
||||
title="Deactivate {{ .Source }}/{{ .Id }}"
|
||||
hx-target="closest article"
|
||||
hx-select="article"
|
||||
hx-delete="/item/{{ .Source }}/{{ .Id }}"
|
||||
>✕</button>
|
||||
<button
|
||||
class="item-button"
|
||||
title="Punt {{ .Source }}/{{ .Id }}"
|
||||
>↷</button>
|
||||
{{- if .Link }}<a class="item-link" href="{{ .Link }}" target="_blank">⇗</a>
|
||||
{{ end -}}
|
||||
{{ range $key, $_ := .Action }}
|
||||
<button
|
||||
class="item-button"
|
||||
title="{{ $key }}"
|
||||
hx-target="closest article"
|
||||
hx-select="article"
|
||||
hx-disabled-elt="this"
|
||||
hx-post="/item/{{ $.Source }}/{{ $.Id }}/action/{{ $key }}"
|
||||
>{{ $key }}</button>
|
||||
{{ end -}}
|
||||
{{ end }}
|
||||
|
||||
{{ define "item-title" -}}
|
||||
<span class="item-title">{{ or .Title .Id | raw }}</span>
|
||||
{{- end }}
|
||||
|
||||
{{ define "item-class" -}}{{ if not .Active }}strikethru {{ end }}{{ if not .Active }}fade{{ end }}{{- end}}
|
||||
|
||||
{{ define "item" -}}
|
||||
<article
|
||||
id="{{ .Source }}-{{ .Id }}"
|
||||
class="{{ template "item-class" . }}"
|
||||
>
|
||||
|
||||
{{- /* The item title is a clickable <summary> if there is body content */ -}}
|
||||
{{ if .Body }}
|
||||
<details>
|
||||
<summary>
|
||||
{{ template "item-buttons" . }}
|
||||
{{ template "item-title" . }}
|
||||
</summary>
|
||||
<p>{{ raw .Body }}</p>
|
||||
</details>
|
||||
{{ template "item-buttons" . }}
|
||||
{{- else -}}
|
||||
{{ template "item-buttons" . }}
|
||||
{{ template "item-title" . }}<br>
|
||||
{{ end }}
|
||||
{{- /* end if .Body */ -}}
|
||||
|
||||
{{- /* author/time footer line */ -}}
|
||||
{{ if or .Author .Time }}
|
||||
<span class="item-info">
|
||||
{{ .Author }}
|
||||
{{ .Time | tsToDate }}
|
||||
</span><br>
|
||||
{{ end -}}
|
||||
|
||||
{{- /* source/id/created footer line */ -}}
|
||||
<span class="item-info">
|
||||
<a href="/item/{{ .Source }}/{{ .Id }}">{{ .Source }}/{{ .Id }}</a>
|
||||
{{ .Created | tsToDate }}
|
||||
</span>
|
||||
</article>
|
||||
{{ end -}}
|
||||
{{- /* end define "item" */ -}}
|
5
web/html/itemPage.html
Normal file
5
web/html/itemPage.html
Normal file
@ -0,0 +1,5 @@
|
||||
{{ define "title" }}{{ if .Item.Title }}{{ .Item.Title }}{{ else }}{{ .Item.Source }}/{{ .Item.Id }}{{ end }} - Intake [{{ .Item.Source }}]{{ end }}
|
||||
|
||||
{{ define "content" -}}
|
||||
{{ template "item" .Item }}
|
||||
{{- end }}
|
16
web/html/layout.html
Normal file
16
web/html/layout.html
Normal file
@ -0,0 +1,16 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{{ block "title" . }}Intake{{ end }}</title>
|
||||
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAAOwgAADsIBFShKgAAAABh0RVh0U29mdHdhcmUAcGFpbnQubmV0IDQuMS41ZEdYUgAAAGFJREFUOE+lkFEKwDAIxXrzXXB3ckMm9EnAV/YRCxFCcUXEL3Jc77NDjpDA/VGL3RFWYEICfeGC8oQc9IPuCAnQDcoRVmBCAn3hgvKEHPSD7ggJ0A3KEVZgQgJ94YLSJ9YDUzNGDXGZ/JEAAAAASUVORK5CYII=">
|
||||
<link rel="stylesheet" href="/style.css">
|
||||
<script src="/htmx.org@2.0.4.js"></script>
|
||||
<meta name="htmx-config" content='{"ignoreTitle":true,"defaultSwapStyle":"outerHTML"}'>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
{{ template "content" . }}
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
119
web/item.go
Normal file
119
web/item.go
Normal file
@ -0,0 +1,119 @@
|
||||
package web
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/Jaculabilis/intake/web/html"
|
||||
)
|
||||
|
||||
func (env *Env) getItem(writer http.ResponseWriter, req *http.Request) {
|
||||
source := req.PathValue("source")
|
||||
id := req.PathValue("id")
|
||||
|
||||
item, err := core.GetItem(env.db, source, id)
|
||||
if err != nil {
|
||||
writer.Write([]byte(err.Error()))
|
||||
return
|
||||
}
|
||||
html.Item(writer, html.ItemData{Item: item})
|
||||
}
|
||||
|
||||
func (env *Env) deleteItem(writer http.ResponseWriter, req *http.Request) {
|
||||
source := req.PathValue("source")
|
||||
id := req.PathValue("id")
|
||||
|
||||
_, err := core.DeactivateItem(env.db, source, id)
|
||||
if err != nil {
|
||||
writer.Write([]byte(err.Error()))
|
||||
return
|
||||
}
|
||||
item, err := core.GetItem(env.db, source, id)
|
||||
if err != nil {
|
||||
writer.Write([]byte(err.Error()))
|
||||
return
|
||||
}
|
||||
html.Item(writer, html.ItemData{Item: item})
|
||||
}
|
||||
|
||||
func (env *Env) doAction(writer http.ResponseWriter, req *http.Request) {
|
||||
source := req.PathValue("source")
|
||||
id := req.PathValue("id")
|
||||
action := req.PathValue("action")
|
||||
|
||||
item, err := core.GetItem(env.db, source, id)
|
||||
if err != nil {
|
||||
http.Error(writer, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
if item.Action[action] == nil {
|
||||
http.Error(writer, "no such action", 500)
|
||||
return
|
||||
}
|
||||
|
||||
argv, err := core.GetArgvForAction(env.db, source, action)
|
||||
if err != nil {
|
||||
http.Error(writer, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
itemJson, err := json.Marshal(item)
|
||||
if err != nil {
|
||||
http.Error(writer, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
res, err := core.Execute(source, argv, nil, string(itemJson), time.Minute)
|
||||
if err != nil {
|
||||
http.Error(writer, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
if len(res) != 1 {
|
||||
http.Error(writer, "not exactly one item", 500)
|
||||
return
|
||||
}
|
||||
newItem := res[0]
|
||||
core.BackfillItem(&newItem, &item)
|
||||
|
||||
if err = core.UpdateItems(env.db, []core.Item{newItem}); err != nil {
|
||||
http.Error(writer, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
html.Item(writer, html.ItemData{Item: newItem})
|
||||
}
|
||||
|
||||
func (env *Env) massDeactivate(writer http.ResponseWriter, req *http.Request) {
|
||||
if err := req.ParseForm(); err != nil {
|
||||
log.Printf("error parsing form data: %v", err)
|
||||
http.Error(writer, "", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
for _, item := range req.PostForm["items"] {
|
||||
i := strings.Index(item, "/")
|
||||
if i == -1 {
|
||||
log.Printf("error: invalid source/item: %s", item)
|
||||
http.Error(writer, "", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
}
|
||||
for _, item := range req.PostForm["items"] {
|
||||
i := strings.Index(item, "/")
|
||||
source := item[:i]
|
||||
id := item[i+1:]
|
||||
active, err := core.DeactivateItem(env.db, source, id)
|
||||
if err != nil {
|
||||
log.Printf("error: failed to deactivate %s/%s: %v", source, id, err)
|
||||
}
|
||||
if active {
|
||||
log.Printf("deactivated %s/%s", source, id)
|
||||
}
|
||||
}
|
||||
writer.Header()["HX-Refresh"] = []string{"true"}
|
||||
http.Error(writer, "ok", http.StatusNoContent)
|
||||
}
|
40
web/main.go
Normal file
40
web/main.go
Normal file
@ -0,0 +1,40 @@
|
||||
package web
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
)
|
||||
|
||||
type Env struct {
|
||||
db *core.DB
|
||||
}
|
||||
|
||||
func logged(handler http.HandlerFunc) http.HandlerFunc {
|
||||
return func(writer http.ResponseWriter, req *http.Request) {
|
||||
log.Printf("%s %s", req.Method, req.URL.Path)
|
||||
handler(writer, req)
|
||||
}
|
||||
}
|
||||
|
||||
func handleFunc(pattern string, handler http.HandlerFunc) {
|
||||
http.HandleFunc(pattern, logged(handler))
|
||||
}
|
||||
|
||||
func RunServer(db *core.DB, addr string, port string) {
|
||||
env := &Env{db}
|
||||
bind := net.JoinHostPort(addr, port)
|
||||
|
||||
handleFunc("GET /", env.getRoot)
|
||||
handleFunc("GET /style.css", env.getStyle)
|
||||
handleFunc("GET /htmx.org@2.0.4.js", env.getScript)
|
||||
handleFunc("GET /source/{source}", env.getSource)
|
||||
handleFunc("GET /item/{source}/{id}", env.getItem)
|
||||
handleFunc("DELETE /item/{source}/{id}", env.deleteItem)
|
||||
handleFunc("POST /item/{source}/{id}/action/{action}", env.doAction)
|
||||
handleFunc("POST /mass-deactivate", env.massDeactivate)
|
||||
|
||||
log.Fatal(http.ListenAndServe(bind, nil))
|
||||
}
|
41
web/root.go
Normal file
41
web/root.go
Normal file
@ -0,0 +1,41 @@
|
||||
package web
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/Jaculabilis/intake/web/html"
|
||||
)
|
||||
|
||||
func (env *Env) getRoot(writer http.ResponseWriter, req *http.Request) {
|
||||
if req.URL.Path != "/" {
|
||||
http.NotFound(writer, req)
|
||||
return
|
||||
}
|
||||
|
||||
names, err := core.GetSources(env.db)
|
||||
if err != nil {
|
||||
writer.Write([]byte(err.Error()))
|
||||
}
|
||||
|
||||
var sources []html.SourceData
|
||||
for _, name := range names {
|
||||
sources = append(sources, html.SourceData{Name: name})
|
||||
}
|
||||
data := html.HomeData{
|
||||
Sources: sources,
|
||||
}
|
||||
html.Home(writer, data)
|
||||
}
|
||||
|
||||
func (env *Env) getStyle(writer http.ResponseWriter, req *http.Request) {
|
||||
writer.Header()["Cache-Control"] = []string{"public, max-age=86400"}
|
||||
writer.Header()["Content-Type"] = []string{"text/css; charset=utf-8"}
|
||||
writer.Write(html.Stylesheet)
|
||||
}
|
||||
|
||||
func (env *Env) getScript(writer http.ResponseWriter, req *http.Request) {
|
||||
writer.Header()["Cache-Control"] = []string{"public, max-age=86400"}
|
||||
writer.Header()["Content-Type"] = []string{"application/javascript; charset=utf-8"}
|
||||
writer.Write(html.Htmx)
|
||||
}
|
27
web/source.go
Normal file
27
web/source.go
Normal file
@ -0,0 +1,27 @@
|
||||
package web
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/Jaculabilis/intake/core"
|
||||
"github.com/Jaculabilis/intake/web/html"
|
||||
)
|
||||
|
||||
func (env *Env) getSource(writer http.ResponseWriter, req *http.Request) {
|
||||
source := req.PathValue("source")
|
||||
if source == "" {
|
||||
http.NotFound(writer, req)
|
||||
return
|
||||
}
|
||||
|
||||
// TODO this needs to properly error if the source doesn't exist instead of just returning []
|
||||
items, err := core.GetAllItemsForSource(env.db, source)
|
||||
if err != nil {
|
||||
http.NotFound(writer, req)
|
||||
return
|
||||
}
|
||||
data := html.FeedData{
|
||||
Items: items,
|
||||
}
|
||||
html.Feed(writer, data)
|
||||
}
|
Loading…
Reference in New Issue
Block a user