aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorFulgen301 <tokmajigeorge@gmail.com>2018-08-26 20:08:54 +0200
committerFulgen301 <tokmajigeorge@gmail.com>2018-08-26 20:08:54 +0200
commit305383c4b85dd6c826cb41faa42fd97015f33067 (patch)
treeead97e4c63de382a6ca20a026f2d9b12b21fc8a6
parent50622f038d63490277d610a83fe095ee000f2b98 (diff)
downloadparry-305383c4b85dd6c826cb41faa42fd97015f33067.tar.gz
parry-305383c4b85dd6c826cb41faa42fd97015f33067.zip
Rewrite database system with sqlalchemy, add /api/auth, add /api/uploads/<id>comments
-rw-r--r--database.py101
-rw-r--r--handlers.py118
-rw-r--r--helpers.py82
-rw-r--r--routes/__init__.py4
-rw-r--r--routes/__pycache__/__init__.cpython-37.pycbin0 -> 185 bytes
-rw-r--r--routes/__pycache__/auth.cpython-37.pycbin0 -> 812 bytes
-rw-r--r--routes/__pycache__/media.cpython-37.pycbin0 -> 1110 bytes
-rw-r--r--routes/__pycache__/uploads.cpython-37.pycbin0 -> 4475 bytes
-rw-r--r--routes/auth.py18
-rw-r--r--routes/media.py39
-rw-r--r--routes/uploads.py153
11 files changed, 392 insertions, 123 deletions
diff --git a/database.py b/database.py
new file mode 100644
index 0000000..f5eea45
--- /dev/null
+++ b/database.py
@@ -0,0 +1,101 @@
+import os
+import sys
+from bson.objectid import ObjectId
+from datetime import datetime
+import sqlalchemy as db
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import relationship, sessionmaker
+
+class Types(object):
+ class ObjectId(db.TypeDecorator):
+ impl = db.UnicodeText
+
+ def process_bind_param(self, value : ObjectId, dialect) -> str:
+ return str(value)
+
+ def process_result_value(self, value : str, dialect) -> ObjectId:
+ if value in [None, "None"]:
+ return ObjectId("0" * 24)
+ return ObjectId(value)
+
+ class List(db.TypeDecorator):
+ impl = db.UnicodeText
+
+ def process_bind_param(self, value : list, dialect) -> str:
+ return ";".join(i.replace(";", ",") for i in value)
+
+ def process_result_value(self, value : str, dialect) -> list:
+ return value.split(";")
+
+Base = declarative_base()
+
+class User(Base):
+ __tablename__ = "users"
+ id = db.Column(Types.ObjectId, primary_key=True, default=ObjectId)
+ name = db.Column(db.UnicodeText, nullable=False)
+ email = db.Column(db.UnicodeText)
+ hash = db.Column(db.String(512), nullable=False)
+
+class Upload(Base):
+ __tablename__ = "uploads"
+ id = db.Column(Types.ObjectId, primary_key=True, default=ObjectId)
+ title = db.Column(db.UnicodeText, nullable=False)
+ author_id = db.Column(Types.ObjectId, db.ForeignKey("users.id"), default=lambda: ObjectId("0"*24))
+ description = db.Column(db.UnicodeText, default="")
+ slug = db.Column(db.Text, nullable=False)
+ tags = db.Column(Types.List, default=list)
+ created_at = db.Column(db.DateTime, nullable=False, default=datetime.now)
+ updated_at = db.Column(db.DateTime, nullable=False, default=datetime.now, onupdate=datetime.now)
+ _v = db.Column(db.Integer, default=0)
+
+ author = relationship("User")
+ #dependency = relationship("Dependency")
+
+class File(Base):
+ __tablename__ = "files"
+ id = db.Column(Types.ObjectId, primary_key=True, default=ObjectId)
+ name = db.Column(db.Unicode(255), nullable=False)
+ hash = db.Column(db.String(40), nullable=False)
+ content_type = db.Column(db.String(255), default="application/octet-stream")
+ length = db.Column(db.BigInteger, default=0)
+ date = db.Column(db.DateTime, nullable=False, default=datetime.now)
+ upload_id = db.Column(Types.ObjectId, db.ForeignKey("uploads.id"))
+ download_url = db.Column(db.Text)
+
+ upload = relationship("Upload")
+
+class Comment(Base):
+ __tablename__ = "comments"
+ id = db.Column(Types.ObjectId, primary_key=True, default=ObjectId)
+ author_id = db.Column(Types.ObjectId, db.ForeignKey("users.id"))
+ upload_id = db.Column(Types.ObjectId, db.ForeignKey("uploads.id"))
+ created_at = db.Column(db.DateTime, nullable=False, default=datetime.now)
+ updated_at = db.Column(db.DateTime, nullable=False, default=datetime.now, onupdate=datetime.now)
+ body = db.Column(db.UnicodeText, nullable=False)
+
+ author = relationship("User")
+ upload = relationship("Upload")
+
+class Voting(Base):
+ __tablename__ = "votings"
+ id = db.Column(Types.ObjectId, primary_key=True, default=ObjectId)
+ author_id = db.Column(Types.ObjectId, db.ForeignKey("users.id"))
+ target_id = db.Column(Types.ObjectId, db.ForeignKey("uploads.id"))
+
+ author = relationship("User")
+ target = relationship("Upload")
+ pass
+
+#class Dependency(Base):
+ #__tablename__ = "dependencies"
+ #id = db.Column(db.Integer, primary_key=True, default=ObjectId)
+ #target_id = db.Column(Types.ObjectId, db.ForeignKey("uploads.id"))
+ ##dependency_id = db.Column(Types.ObjectId, db.ForeignKey("uploads.id"))
+
+ #target = relationship("Upload", foreign_keys=[target_id])
+ ##dependency = relationship("Upload", foreign_keys=[dependency_id])
+
+engine = db.create_engine("sqlite:///parry.db")
+Base.metadata.bind = engine
+Base.metadata.create_all(engine)
+DBSession = sessionmaker(bind=engine)
diff --git a/handlers.py b/handlers.py
index 8139c44..bfca09b 100644
--- a/handlers.py
+++ b/handlers.py
@@ -41,74 +41,64 @@ class Website(Site):
def process(self):
r = requests.get(self.address)
if r:
- i = 0
+ session = DBSession()
for m in self.regexes["list"].finditer(r.text):
- #if i > 10:
- #break
- print(m["title"])
- id = str(ObjectId.from_datetime(datetime.strptime(m["updatedAt"], self.date_format)))
- if id not in database["entries"]:
- entry = {
- "title" : html.unescape(m["title"]),
- "voting" : {
- "sum" : round(float(m["niveau"]), 0) if "niveau" in m.groups() else 0,
- "count" : 0,
- "votes" : None
- },
- "tags" : [self.tag],
- "files" : [],
- "dependencies" : [],
- "deleted" : False,
- "description" : "",
- "pic" : None,
- "author" : {
- "username" : html.unescape(m["author"])
- },
- "slug" : "",
- "updatedAt" : datetime.strptime(m["updatedAt"], self.date_format).isoformat(),
- "__v" : 1,
- "comments" : None,
- "id" : id,
- "__intern" : {
- "entryURL" : self.prefix + m["entryURL"]
- }
- }
+ id = ObjectId.from_datetime(datetime.strptime(m["updatedAt"], self.date_format))
+ try:
+ entry = session.query(Upload).filter_by(id=id).one()
+ try:
+ entry.author = session.query(User).filter_by(name=m["author"]).one()
+ except db.orm.exc.NoResultFound:
+ pass
- downloadURL = self.prefix + m["downloadURL"]
+ continue
+ except db.orm.exc.NoResultFound:
+ pass
+
+ entry = Upload(
+ id=id,
+ title=html.unescape(m["title"]),
+ tags=[self.tag],
+ slug="".join(i for i in html.unescape(m["title"]).lower() if i in string.ascii_letters),
+ updated_at=datetime.strptime(m["updatedAt"], self.date_format),
+ _v=1
+ )
+
+ try:
+ entry.author = session.query(User).filter_by(name=m["author"]).one()
+ except db.orm.exc.NoResultFound:
+ pass
+
+ downloadURL = self.prefix + m["downloadURL"]
+ try:
r = requests.get(downloadURL, stream=True, allow_redirects=True)
- if not r:
- continue
-
- locale.setlocale(locale.LC_ALL, "C")
- entry["files"] = [{
- "metadata" : {
- "hashes" : {
- "sha1" : calculateHashForResource(r).hexdigest()
- },
- "downloadURL" : downloadURL
- },
- "aliases" : None,
- "deleted" : False,
- "_id" : entry["id"],
- "filename" : m["downloadURL"].split("/")[-1],
- "content-type" : r.headers.get("Content-Type", "application/octet-stream"),
- "length" : int(r.headers["Content-Length"]),
- "chunkSize" : 4096, # what is this for
- "uploadDate" : datetime.strptime(r.headers["Date"], "%a, %d %b %Y %H:%M:%S GMT").isoformat(),
- }
- ]
-
- locale.setlocale(locale.LC_ALL, "")
+ except requests.exceptions.ConnectionError:
+ continue
- entry["createdAt"] = entry["updatedAt"]
- entry["slug"] = "".join(filter(lambda x: x in string.ascii_letters, entry["title"].lower()))
-
- r = requests.get(entry["__intern"]["entryURL"])
- d = self.regexes["desc"].match(r.text)
- if d and "description" in d.groups():
- entry["description"] = html.unescape(d["description"])
- yield entry
- i += 1
+ if not r:
+ continue
+
+ locale.setlocale(locale.LC_ALL, "C")
+ session.add(File(
+ hash=calculateHashForResource(r).hexdigest(),
+ id=entry.id,
+ name=downloadURL.split("/")[-1],
+ content_type=r.headers.get("Content-Type", "application/octet-stream"),
+ length=int(r.headers["Content-Length"]),
+ date=datetime.strptime(r.headers["Date"], "%a, %d %b %Y %H:%M:%S GMT"),
+ download_url=downloadURL,
+ upload=entry
+ ))
+
+ locale.setlocale(locale.LC_ALL, "")
+
+ r = requests.get(self.prefix + m["entryURL"])
+ d = self.regexes["desc"].match(r.text)
+ if d and "description" in d.groups():
+ entry.description = html.unescape(d["description"])
+
+ session.add(entry)
+ session.commit()
class CCAN(Website):
regexes = {
diff --git a/helpers.py b/helpers.py
index 0e9da70..66c733b 100644
--- a/helpers.py
+++ b/helpers.py
@@ -15,43 +15,79 @@
import sys
import os, re, json, math
import requests, hashlib
-from bottle import route, run, Bottle, request, static_file, response, hook, HTTPResponse
-from bson.objectid import ObjectId
+from bottle import route, run, Bottle, request, static_file, response, hook, HTTPResponse, JSONPlugin, install
import threading
os.chdir(os.path.dirname(__file__))
+from .database import *
-io_lock = threading.Lock()
+BLOCKSIZE = 1024 ** 2
-def loadJSON(name : str) -> dict:
- try:
- with open(name, "r") as fobj:
- return json.load(fobj)
-
- except (FileNotFoundError, json.decoder.JSONDecodeError):
- return {}
+locks = {
+ "io" : threading.Lock()
+ }
-def saveJSON(obj : dict, name : str) -> None:
- with io_lock:
- with open(name, "w") as fobj:
- json.dump(obj, fobj)
+def with_lock(k):
+ def wrapper(f):
+ def func(*args, **kwargs):
+ with locks[k]:
+ return f(*args, **kwargs)
+
+ return func
+ return wrapper
-database = loadJSON("database.json")
-if "entries" not in database:
- database["entries"] = {}
+def calculateHashForResource(resource : requests.Response) -> object:
+ hashobj = hashlib.sha1()
+
+ calculateHashForFile(resource.raw, hashobj)
+ assert(resource.raw.tell())
+ if "content-length" not in resource.headers:
+ resource.headers["Content-Length"] = resource.raw.tell()
+ return hashobj
-def calculateHashForResource(resource : requests.Response, hashobj : object = None) -> object:
+def calculateHashForFile(file, hashobj : object = None) -> object:
if hashobj is None:
hashobj = hashlib.sha1()
- l = 0
- for block in resource.iter_content(4096):
- l += len(block)
+ while True:
+ block = file.read(BLOCKSIZE)
+ if not block:
+ break
+
hashobj.update(block)
- if "content-length" not in resource.headers:
- resource.headers["Content-Length"] = l
return hashobj
def notAllowed():
raise HTTPResponse(f"Cannot {request.method} {request.path}")
+
+@hook('after_request')
+def enable_cors():
+ response.headers["Access-Control-Allow-Origin"] = "*"
+
+# Auth
+
+def calculateUserHash(username : str, password : str) -> object:
+ return hashlib.sha512(hashlib.sha512(username.encode("utf-8")).digest() + hashlib.sha512(password.encode("utf-8")).digest())
+
+def auth_basic(f):
+ def checkAuth(*args, **kwargs):
+ session = DBSession()
+ try:
+ User.query.filter_by(name=requests.forms["username"], hash=calculateUserHash(request.forms["username"], request.forms["password"]).hexdigest()).first()
+ except db.orm.exc.NoResultFound:
+ return HTTPResponse(status=401)
+
+ del request.forms["password"]
+ return f(*args, **kwargs)
+ return checkAuth
+
+class ParryEncoder(json.JSONEncoder):
+ _default = json.JSONEncoder.default
+ def default(self, obj):
+ if isinstance(obj, ObjectId):
+ return str(obj)
+
+ return self._default(obj)
+
+install(JSONPlugin(json_dumps=lambda s: json.dumps(s, cls=ParryEncoder)))
diff --git a/routes/__init__.py b/routes/__init__.py
index a92e40a..034e4d0 100644
--- a/routes/__init__.py
+++ b/routes/__init__.py
@@ -12,6 +12,6 @@
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-from .uploads import *
-from .media import *
from .auth import *
+from .media import *
+from .uploads import *
diff --git a/routes/__pycache__/__init__.cpython-37.pyc b/routes/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..b48be50
--- /dev/null
+++ b/routes/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/routes/__pycache__/auth.cpython-37.pyc b/routes/__pycache__/auth.cpython-37.pyc
new file mode 100644
index 0000000..22ab737
--- /dev/null
+++ b/routes/__pycache__/auth.cpython-37.pyc
Binary files differ
diff --git a/routes/__pycache__/media.cpython-37.pyc b/routes/__pycache__/media.cpython-37.pyc
new file mode 100644
index 0000000..1c53de3
--- /dev/null
+++ b/routes/__pycache__/media.cpython-37.pyc
Binary files differ
diff --git a/routes/__pycache__/uploads.cpython-37.pyc b/routes/__pycache__/uploads.cpython-37.pyc
new file mode 100644
index 0000000..4ebc636
--- /dev/null
+++ b/routes/__pycache__/uploads.cpython-37.pyc
Binary files differ
diff --git a/routes/auth.py b/routes/auth.py
index ebf52c0..a34aaa5 100644
--- a/routes/auth.py
+++ b/routes/auth.py
@@ -15,5 +15,19 @@
from ..helpers import *
@route("/api/auth", method="POST")
-def post_auth():
- raise HTTPResponse(status=501)
+def post_auth_new():
+ session = DBSession()
+ try:
+ username = request.forms.username
+ password = request.forms.password
+ except KeyError as e:
+ raise HTTPResponse("Username or password missing", 400)
+
+ hash = calculateUserHash(username, password).hexdigest()
+ try:
+ session.query(User).filter(User.name == username or User.hash == hash).one()
+ raise HTTPResponse("User already exists", status=410)
+ except db.orm.exc.NoResultFound:
+ session.add(User(name=username, hash=hash))
+ session.commit()
+ return HTTPResponse(status=201)
diff --git a/routes/media.py b/routes/media.py
index 6c3c831..b9828a8 100644
--- a/routes/media.py
+++ b/routes/media.py
@@ -19,31 +19,24 @@ from datetime import datetime
def get_media():
notAllowed()
-@route("/api/media", method="POST")
-def post_media():
- try:
- entry = database["entries"][request.forms["id"]]
- except KeyError as e:
- raise HTTPResponse("Invalid id", 400) from e
-
- for f in request.files:
- entry["files"].append({})
-
- id = ObjectId()
@route("/api/media/<id>")
def get_media_id(id):
- for entry in database["entries"].values():
- for file in entry["files"]:
- if file["_id"] == id:
- response.set_header("Content-Type", file["content-type"])
- response.set_header("Content-Length", file["length"])
- response.set_header("Date", datetime.fromisoformat(file["uploadDate"]).strftime("%a, %d %b %Y %H:%M:%S GMT"))
- response.set_header("Content-Disposition", f"attachment; filename=\"{file['filename']}\"")
- if request.method == "GET":
- if "downloadURL" in file["metadata"]:
- return HTTPResponse(status=302, headers={"Location" : file["metadata"]["downloadURL"]})
- else:
- return static_file(file["filename"], root=f"media/{id}")
+ session = DBSession()
+ try:
+ file = session.query(File).filter_by(id=id).one()
+ except db.orm.exc.NoResultFound:
+ raise HTTPResponse(status=404)
+
+ response.set_header("Content-Type", file.content_type)
+ response.set_header("Content-Length", file.length)
+ response.set_header("Date", file.date.strftime("%a, %d %b %Y %H:%M:%S GMT"))
+ response.set_header("Content-Disposition", f"attachment; filename=\"{file.name}\"")
+ if request.method == "GET":
+ if file.download_url:
+ #return requests.request(request.method, file.download_url, allow_redirects=True)
+ return HTTPResponse(status=302, headers={"Location" : file.download_url})
+ else:
+ return static_file(file.id, os.path.join(os.getcwd(), "media"), file.content_type, file.name)
raise HTTPResponse(status=404)
diff --git a/routes/uploads.py b/routes/uploads.py
index 703d3d1..391827d 100644
--- a/routes/uploads.py
+++ b/routes/uploads.py
@@ -13,6 +13,66 @@
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from ..helpers import *
+import string, magic
+
+def _add_upload(entry : Upload, session : DBSession):
+ return {
+ "voting" : {
+ "sum" : 0,
+ "count" : 0,
+ "votes" : None
+ },
+ "id" : entry.id,
+ "title" : entry.title,
+ "author" : {
+ "id" : entry.author.id if entry.author is not None else "0" * 24,
+ "username" : entry.author.username if entry.author is not None else "N/A"
+ },
+ "tags" : entry.tags,
+ "files" : [{
+ "metadata" : {
+ "hashes" : {
+ "sha1" : file.hash
+ }
+ },
+ "aliases" : None,
+ "deleted" : False,
+ "id" : file.id,
+ "filename" : file.name,
+ "content-type" : file.content_type,
+ "length" : file.length,
+ "chunkSize" : BLOCKSIZE,
+ "uploadData" : file.date.isoformat()
+ } for file in session.query(File).filter_by(upload=entry)
+ ],
+ "dependencies" : [], #TODO
+ "deleted" : False,
+ "description" : entry.description,
+ "pic" : None, #TODO
+ "slug" : entry.slug,
+ "createdAt" : entry.created_at.isoformat(),
+ "updatedAt" : entry.updated_at.isoformat(),
+ "__v" : entry._v,
+ "comments" : [{
+ "voting" : {
+ "sum" : 0,
+ "count" : 0,
+ "votes" : None
+ },
+ "deleted" : False,
+ "id" : comment.id,
+ "body" : comment.body,
+ "author" : {
+ "id" : comment.author.id,
+ "username" : comment.author.username
+ },
+ "upload" : comment.upload.id,
+ "createdAt" : comment.created_at.isoformat(),
+ "updatedAt" : comment.updated_at.isoformat()
+ } for comment in session.query(Comment).filter_by(upload=entry)
+ ]
+ }
+
@route("/api/uploads")
def get_uploads():
@@ -26,12 +86,10 @@ def get_uploads():
"uploads" : []
}
- for entry in database["entries"].values():
- if "__intern" in entry:
- entry = entry.copy()
- del entry["__intern"]
-
- ret["uploads"].append(entry)
+ session = DBSession()
+ for entry in session.query(Upload).order_by(Upload.updated_at.desc()):
+ ret["uploads"].append(_add_upload(entry, session))
+
ret["pagination"]["total"] = ret["pagination"]["limit"] = len(ret["uploads"])
@@ -39,11 +97,88 @@ def get_uploads():
@route("/api/uploads/<id>")
def get_upload(id):
- if id in database["entries"]:
- return database["entries"][id]
+ session = DBSession()
+ entry = session.query(Upload).get(id)
+ if entry is not None:
+ return _add_upload(entry, session)
else:
raise HTTPResponse(status=404)
@route("/api/uploads", method="POST")
+@auth_basic
def post_upload():
- raise HTTPResponse(status=501)
+ try:
+ session = DBSession()
+ if len(session.query(Upload).filter_by(title=requests.forms.title).all()):
+ raise HTTPResponse("An entry with the specified title already exists", 410)
+
+ entry = Upload(
+ title=request.forms.title,
+ author=session.query(User).filter_by(username=request.forms.username),
+ description=request.forms.description,
+ slug="".join(i for i in requests.forms.title.lower() if i in string.ascii_letters),
+ tags=request.forms.tags.split(";") if "tags" in request.forms else []
+ )
+
+ session.add(entry)
+
+ try:
+ os.mkdir(os.path.join(os.getcwd(), "media"))
+ except FileExistsError:
+ pass
+
+ for file in request.files.values():
+ f = File(
+ name=file.filename,
+ upload=entry
+ )
+
+ path = os.path.join(os.getcwd(), "media", f["id"])
+ file.save(path)
+
+ with open(path, "rb") as fobj:
+ f.hash = calculateHashForFile(fobj).hexdigest()
+ f.length = fobj.tell()
+
+ f.content_type = magic.from_file(path, mime=True)
+ session.add(f)
+
+ except KeyError as e:
+ session.rollback()
+ raise HTTPResponse(f"Missing form value: {e.args[0]}", 400)
+
+ session.commit()
+ return HTTPResponse(status=201)
+
+@route("/api/uploads/<id>/comments", method="POST")
+@auth_basic
+def post_comments(id):
+ session = DBSession()
+ try:
+ session.query(Upload).filter_by(id=id).one()
+ except db.orm.exc.NoResultFound:
+ raise HTTPResponse("Invalid upload id", 404)
+
+ try:
+ session.add(Comment(
+ body=request.forms.body,
+ author=session.query(User).filter_by(username=request.forms.username).one()
+ ))
+ except KeyError as e:
+ raise HTTPResponse(f"Missing form value: {e.args[0]}", 400)
+
+ session.commit()
+ return HTTPResponse(status=201)
+
+@route("/api/uploads/<id>/comments/<comment_id>", method="DELETE")
+@auth_basic
+def delete_comments(id, comment_id):
+ session = DBSession()
+ try:
+ comment = session.query(Comment).filter_by(id=comment_id, author=session.query(User).filter_by(username).one(), upload=session.query(Upload).filter_by(id=id).one()).one()
+ except db.orm.exc.NoResultFound:
+ raise HTTPResponse("Requested comment not found", 404)
+
+ session.delete(comment)
+ session.commit()
+ return HTTPResponse(status=204)