diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..672f9cc
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,6 @@
+{
+ "python.linting.pylintEnabled": false,
+"python.formatting.provider": "autopep8",
+ "python.pythonPath": "C:\\Program Files\\Python36\\python.exe",
+ "editor.tabSize": 4
+}
\ No newline at end of file
diff --git a/plugins/radicale_stamm_auth/__init__.py b/plugins/radicale_stamm_auth/__init__.py
index cda5300..c8873d7 100644
--- a/plugins/radicale_stamm_auth/__init__.py
+++ b/plugins/radicale_stamm_auth/__init__.py
@@ -24,15 +24,15 @@ class Auth(BaseAuth):
return True
def map_login_to_user(self, login):
- # Get uuid from username
+ # Get uid from username
if login is None or login is "":
return None
main_uri = self.generate_base_uri(
- "/client/uuid") + "&username=" + login
+ "/client/uid") + "&username=" + login
req = urllib.request.urlopen(main_uri, data=None)
jsons = req.read()
data = json.loads(jsons)
print(data)
if "error" in data:
return None
- return data["uuid"]
+ return data["uid"]
diff --git a/plugins/radicale_stamm_rights/__init__.py b/plugins/radicale_stamm_rights/__init__.py
new file mode 100644
index 0000000..58f8bdd
--- /dev/null
+++ b/plugins/radicale_stamm_rights/__init__.py
@@ -0,0 +1,26 @@
+from radicale.rights import BaseRights
+from radicale.
+import urllib.request
+import json
+import hashlib
+
+
+class Rights(BaseRights):
+ def __init__(self, configuration, logger):
+ self.configuration = configuration
+ self.logger = logger
+
+ def authorized(self, user, path, permission):
+ """Check if the user is allowed to read or write the collection.
+ If ``user`` is empty, check for anonymous rights.
+ ``path`` is sanitized.
+ ``permission`` is "r" or "w".
+ """
+ raise NotImplementedError
+
+ def authorized_item(self, user, path, permission):
+ """Check if the user is allowed to read or write the item."""
+ path = storage.sanitize_path(path)
+ parent_path = storage.sanitize_path(
+ "/%s/" % posixpath.dirname(path.strip("/")))
+ return self.authorized(user, parent_path, permission)
diff --git a/plugins/radicale_stamm_storage/__init__.py b/plugins/radicale_stamm_storage/__init__.py
new file mode 100644
index 0000000..c70e33a
--- /dev/null
+++ b/plugins/radicale_stamm_storage/__init__.py
@@ -0,0 +1,317 @@
+from radicale.storage import BaseCollection
+import urllib.request
+import json
+import hashlib
+import couchdb
+
+
+class Collection(BaseCollection):
+
+ # Overriden on copy by the "load" function
+ configuration = None
+ logger = None
+
+ # Properties of instance
+ """The sanitized path of the collection without leading or trailing ``/``.
+ """
+ path = ""
+
+ @property
+ def owner(self):
+ """The owner of the collection."""
+ return self.path.split("/", maxsplit=1)[0]
+
+ @property
+ def is_principal(self):
+ """Collection is a principal."""
+ return bool(self.path) and "/" not in self.path
+
+ @owner.setter
+ def owner(self, value):
+ # DEPRECATED: Included for compatibility reasons
+ pass
+
+ @is_principal.setter
+ def is_principal(self, value):
+ # DEPRECATED: Included for compatibility reasons
+ pass
+
+ @classmethod
+ def discover(cls, path, depth="0"):
+ """Discover a list of collections under the given ``path``.
+
+ ``path`` is sanitized.
+
+ If ``depth`` is "0", only the actual object under ``path`` is
+ returned.
+
+ If ``depth`` is anything but "0", it is considered as "1" and direct
+ children are included in the result.
+
+ The root collection "/" must always exist.
+
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def move(cls, item, to_collection, to_href):
+ """Move an object.
+
+ ``item`` is the item to move.
+
+ ``to_collection`` is the target collection.
+
+ ``to_href`` is the target name in ``to_collection``. An item with the
+ same name might already exist.
+
+ """
+ if item.collection.path == to_collection.path and item.href == to_href:
+ return
+ to_collection.upload(to_href, item.item)
+ item.collection.delete(item.href)
+
+ @property
+ def etag(self):
+ """Encoded as quoted-string (see RFC 2616)."""
+ etag = md5()
+ for item in self.get_all():
+ etag.update((item.href + "/" + item.etag).encode("utf-8"))
+ etag.update(json.dumps(self.get_meta(), sort_keys=True).encode())
+ return '"%s"' % etag.hexdigest()
+
+ @classmethod
+ def create_collection(cls, href, collection=None, props=None):
+ """Create a collection.
+
+ ``href`` is the sanitized path.
+
+ If the collection already exists and neither ``collection`` nor
+ ``props`` are set, this method shouldn't do anything. Otherwise the
+ existing collection must be replaced.
+
+ ``collection`` is a list of vobject components.
+
+ ``props`` are metadata values for the collection.
+
+ ``props["tag"]`` is the type of collection (VCALENDAR or
+ VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the
+ collection.
+
+ """
+ raise NotImplementedError
+
+ def sync(self, old_token=None):
+ """Get the current sync token and changed items for synchronization.
+
+ ``old_token`` an old sync token which is used as the base of the
+ delta update. If sync token is missing, all items are returned.
+ ValueError is raised for invalid or old tokens.
+
+ WARNING: This simple default implementation treats all sync-token as
+ invalid. It adheres to the specification but some clients
+ (e.g. InfCloud) don't like it. Subclasses should provide a
+ more sophisticated implementation.
+
+ """
+ token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
+ if old_token:
+ raise ValueError("Sync token are not supported")
+ return token, self.list()
+
+ def list(self):
+ """List collection items."""
+ raise NotImplementedError
+
+ def get(self, href):
+ """Fetch a single item."""
+ raise NotImplementedError
+
+ def get_multi(self, hrefs):
+ """Fetch multiple items. Duplicate hrefs must be ignored.
+
+ DEPRECATED: use ``get_multi2`` instead
+
+ """
+ return (self.get(href) for href in set(hrefs))
+
+ def get_multi2(self, hrefs):
+ """Fetch multiple items.
+
+ Functionally similar to ``get``, but might bring performance benefits
+ on some storages when used cleverly. It's not required to return the
+ requested items in the correct order. Duplicated hrefs can be ignored.
+
+ Returns tuples with the href and the item or None if the item doesn't
+ exist.
+
+ """
+ return ((href, self.get(href)) for href in hrefs)
+
+ def get_all(self):
+ """Fetch all items.
+
+ Functionally similar to ``get``, but might bring performance benefits
+ on some storages when used cleverly.
+
+ """
+ return map(self.get, self.list())
+
+ def get_all_filtered(self, filters):
+ """Fetch all items with optional filtering.
+
+ This can largely improve performance of reports depending on
+ the filters and this implementation.
+
+ Returns tuples in the form ``(item, filters_matched)``.
+ ``filters_matched`` is a bool that indicates if ``filters`` are fully
+ matched.
+
+ This returns all events by default
+ """
+ return ((item, False) for item in self.get_all())
+
+ def pre_filtered_list(self, filters):
+ """List collection items with optional pre filtering.
+
+ DEPRECATED: use ``get_all_filtered`` instead
+
+ """
+ return self.get_all()
+
+ def has(self, href):
+ """Check if an item exists by its href.
+
+ Functionally similar to ``get``, but might bring performance benefits
+ on some storages when used cleverly.
+
+ """
+ return self.get(href) is not None
+
+ def upload(self, href, vobject_item):
+ """Upload a new or replace an existing item."""
+ raise NotImplementedError
+
+ def delete(self, href=None):
+ """Delete an item.
+
+ When ``href`` is ``None``, delete the collection.
+
+ """
+ raise NotImplementedError
+
+ def get_meta(self, key=None):
+ """Get metadata value for collection.
+
+ Return the value of the property ``key``. If ``key`` is ``None`` return
+ a dict with all properties
+
+ """
+ raise NotImplementedError
+
+ def set_meta(self, props):
+ """Set metadata values for collection.
+
+ ``props`` a dict with updates for properties. If a value is empty, the
+ property must be deleted.
+
+ DEPRECATED: use ``set_meta_all`` instead
+
+ """
+ raise NotImplementedError
+
+ def set_meta_all(self, props):
+ """Set metadata values for collection.
+
+ ``props`` a dict with values for properties.
+
+ """
+ delta_props = self.get_meta()
+ for key in delta_props.keys():
+ if key not in props:
+ delta_props[key] = None
+ delta_props.update(props)
+ self.set_meta(self, delta_props)
+
+ @property
+ def last_modified(self):
+ """Get the HTTP-datetime of when the collection was modified."""
+ raise NotImplementedError
+
+ def serialize(self):
+ """Get the unicode string representing the whole collection."""
+ if self.get_meta("tag") == "VCALENDAR":
+ in_vcalendar = False
+ vtimezones = ""
+ included_tzids = set()
+ vtimezone = []
+ tzid = None
+ components = ""
+ # Concatenate all child elements of VCALENDAR from all items
+ # together, while preventing duplicated VTIMEZONE entries.
+ # VTIMEZONEs are only distinguished by their TZID, if different
+ # timezones share the same TZID this produces errornous ouput.
+ # VObject fails at this too.
+ for item in self.get_all():
+ depth = 0
+ for line in item.serialize().split("\r\n"):
+ if line.startswith("BEGIN:"):
+ depth += 1
+ if depth == 1 and line == "BEGIN:VCALENDAR":
+ in_vcalendar = True
+ elif in_vcalendar:
+ if depth == 1 and line.startswith("END:"):
+ in_vcalendar = False
+ if depth == 2 and line == "BEGIN:VTIMEZONE":
+ vtimezone.append(line + "\r\n")
+ elif vtimezone:
+ vtimezone.append(line + "\r\n")
+ if depth == 2 and line.startswith("TZID:"):
+ tzid = line[len("TZID:"):]
+ elif depth == 2 and line.startswith("END:"):
+ if tzid is None or tzid not in included_tzids:
+ vtimezones += "".join(vtimezone)
+ included_tzids.add(tzid)
+ vtimezone.clear()
+ tzid = None
+ elif depth >= 2:
+ components += line + "\r\n"
+ if line.startswith("END:"):
+ depth -= 1
+ template = vobject.iCalendar()
+ displayname = self.get_meta("D:displayname")
+ if displayname:
+ template.add("X-WR-CALNAME")
+ template.x_wr_calname.value_param = "TEXT"
+ template.x_wr_calname.value = displayname
+ description = self.get_meta("C:calendar-description")
+ if description:
+ template.add("X-WR-CALDESC")
+ template.x_wr_caldesc.value_param = "TEXT"
+ template.x_wr_caldesc.value = description
+ template = template.serialize()
+ template_insert_pos = template.find("\r\nEND:VCALENDAR\r\n") + 2
+ assert template_insert_pos != -1
+ return (template[:template_insert_pos] +
+ vtimezones + components +
+ template[template_insert_pos:])
+ elif self.get_meta("tag") == "VADDRESSBOOK":
+ return "".join((item.serialize() for item in self.get_all()))
+ return ""
+
+ @classmethod
+ @contextmanager
+ def acquire_lock(cls, mode, user=None):
+ """Set a context manager to lock the whole storage.
+
+ ``mode`` must either be "r" for shared access or "w" for exclusive
+ access.
+
+ ``user`` is the name of the logged in user or empty.
+
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def verify(cls):
+ """Check the storage for errors."""
+ return True
diff --git a/plugins/setup.py b/plugins/setup.py
index 3cbe888..5cd8cde 100644
--- a/plugins/setup.py
+++ b/plugins/setup.py
@@ -3,3 +3,5 @@
from distutils.core import setup
setup(name="radicale_stamm_auth", packages=["radicale_stamm_auth"])
+
+setup(name="radicale_stamm_rights", packages=["radicale_stamm_rights"])
diff --git a/radicale/__init__.py b/radicale/__init__.py
new file mode 100644
index 0000000..88b1bb6
--- /dev/null
+++ b/radicale/__init__.py
@@ -0,0 +1,949 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2008 Nicolas Kandel
+# Copyright © 2008 Pascal Halter
+# Copyright © 2008-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale Server module.
+
+This module offers a WSGI application class.
+
+To use this module, you should take a look at the file ``radicale.py`` that
+should have been included in this package.
+
+"""
+
+import base64
+import contextlib
+import datetime
+import io
+import itertools
+import logging
+import os
+import posixpath
+import pprint
+import random
+import socket
+import socketserver
+import ssl
+import sys
+import threading
+import time
+import wsgiref.simple_server
+import zlib
+from http import client
+from urllib.parse import unquote, urlparse
+from xml.etree import ElementTree as ET
+
+import vobject
+
+from . import auth, rights, storage, web, xmlutils
+
+VERSION = "2.1.8"
+
+NOT_ALLOWED = (
+ client.FORBIDDEN, (("Content-Type", "text/plain"),),
+ "Access to the requested resource forbidden.")
+BAD_REQUEST = (
+ client.BAD_REQUEST, (("Content-Type", "text/plain"),), "Bad Request")
+NOT_FOUND = (
+ client.NOT_FOUND, (("Content-Type", "text/plain"),),
+ "The requested resource could not be found.")
+WEBDAV_PRECONDITION_FAILED = (
+ client.CONFLICT, (("Content-Type", "text/plain"),),
+ "WebDAV precondition failed.")
+PRECONDITION_FAILED = (
+ client.PRECONDITION_FAILED,
+ (("Content-Type", "text/plain"),), "Precondition failed.")
+REQUEST_TIMEOUT = (
+ client.REQUEST_TIMEOUT, (("Content-Type", "text/plain"),),
+ "Connection timed out.")
+REQUEST_ENTITY_TOO_LARGE = (
+ client.REQUEST_ENTITY_TOO_LARGE, (("Content-Type", "text/plain"),),
+ "Request body too large.")
+REMOTE_DESTINATION = (
+ client.BAD_GATEWAY, (("Content-Type", "text/plain"),),
+ "Remote destination not supported.")
+DIRECTORY_LISTING = (
+ client.FORBIDDEN, (("Content-Type", "text/plain"),),
+ "Directory listings are not supported.")
+INTERNAL_SERVER_ERROR = (
+ client.INTERNAL_SERVER_ERROR, (("Content-Type", "text/plain"),),
+ "A server error occurred. Please contact the administrator.")
+
+DAV_HEADERS = "1, 2, 3, calendar-access, addressbook, extended-mkcol"
+
+
+class HTTPServer(wsgiref.simple_server.WSGIServer):
+ """HTTP server."""
+
+ # These class attributes must be set before creating instance
+ client_timeout = None
+ max_connections = None
+ logger = None
+
+ def __init__(self, address, handler, bind_and_activate=True):
+ """Create server."""
+ ipv6 = ":" in address[0]
+
+ if ipv6:
+ self.address_family = socket.AF_INET6
+
+ # Do not bind and activate, as we might change socket options
+ super().__init__(address, handler, False)
+
+ if ipv6:
+ # Only allow IPv6 connections to the IPv6 socket
+ self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
+
+ if self.max_connections:
+ self.connections_guard = threading.BoundedSemaphore(
+ self.max_connections)
+ else:
+ # use dummy context manager
+ self.connections_guard = contextlib.ExitStack()
+
+ if bind_and_activate:
+ try:
+ self.server_bind()
+ self.server_activate()
+ except BaseException:
+ self.server_close()
+ raise
+
+ if self.client_timeout and sys.version_info < (3, 5, 2):
+ self.logger.warning("Using server.timeout with Python < 3.5.2 "
+ "can cause network connection failures")
+
+ def get_request(self):
+ # Set timeout for client
+ _socket, address = super().get_request()
+ if self.client_timeout:
+ _socket.settimeout(self.client_timeout)
+ return _socket, address
+
+ def handle_error(self, request, client_address):
+ if issubclass(sys.exc_info()[0], socket.timeout):
+ self.logger.info("client timed out", exc_info=True)
+ else:
+ self.logger.error("An exception occurred during request: %s",
+ sys.exc_info()[1], exc_info=True)
+
+
+class HTTPSServer(HTTPServer):
+ """HTTPS server."""
+
+ # These class attributes must be set before creating instance
+ certificate = None
+ key = None
+ protocol = None
+ ciphers = None
+ certificate_authority = None
+
+ def __init__(self, address, handler):
+ """Create server by wrapping HTTP socket in an SSL socket."""
+ super().__init__(address, handler, bind_and_activate=False)
+
+ self.socket = ssl.wrap_socket(
+ self.socket, self.key, self.certificate, server_side=True,
+ cert_reqs=ssl.CERT_REQUIRED if self.certificate_authority else
+ ssl.CERT_NONE,
+ ca_certs=self.certificate_authority or None,
+ ssl_version=self.protocol, ciphers=self.ciphers,
+ do_handshake_on_connect=False)
+
+ self.server_bind()
+ self.server_activate()
+
+
+class ThreadedHTTPServer(socketserver.ThreadingMixIn, HTTPServer):
+ def process_request_thread(self, request, client_address):
+ with self.connections_guard:
+ return super().process_request_thread(request, client_address)
+
+
+class ThreadedHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer):
+ def process_request_thread(self, request, client_address):
+ try:
+ try:
+ request.do_handshake()
+ except socket.timeout:
+ raise
+ except Exception as e:
+ raise RuntimeError("SSL handshake failed: %s" % e) from e
+ except Exception:
+ try:
+ self.handle_error(request, client_address)
+ finally:
+ self.shutdown_request(request)
+ return
+ with self.connections_guard:
+ return super().process_request_thread(request, client_address)
+
+
+class RequestHandler(wsgiref.simple_server.WSGIRequestHandler):
+ """HTTP requests handler."""
+
+ # These class attributes must be set before creating instance
+ logger = None
+
+ def __init__(self, *args, **kwargs):
+ # Store exception for logging
+ self.error_stream = io.StringIO()
+ super().__init__(*args, **kwargs)
+
+ def get_stderr(self):
+ return self.error_stream
+
+ def log_message(self, *args, **kwargs):
+ """Disable inner logging management."""
+
+ def get_environ(self):
+ env = super().get_environ()
+ if hasattr(self.connection, "getpeercert"):
+ # The certificate can be evaluated by the auth module
+ env["REMOTE_CERTIFICATE"] = self.connection.getpeercert()
+ # Parent class only tries latin1 encoding
+ env["PATH_INFO"] = unquote(self.path.split("?", 1)[0])
+ return env
+
+ def handle(self):
+ super().handle()
+ # Log exception
+ error = self.error_stream.getvalue().strip("\n")
+ if error:
+ self.logger.error(
+ "An unhandled exception occurred during request:\n%s" % error)
+
+
+class Application:
+ """WSGI application managing collections."""
+
+ def __init__(self, configuration, logger):
+ """Initialize application."""
+ super().__init__()
+ self.configuration = configuration
+ self.logger = logger
+ self.Auth = auth.load(configuration, logger)
+ self.Collection = storage.load(configuration, logger)
+ self.Rights = rights.load(configuration, logger)
+ self.Web = web.load(configuration, logger)
+ self.encoding = configuration.get("encoding", "request")
+
+ def headers_log(self, environ):
+ """Sanitize headers for logging."""
+ request_environ = dict(environ)
+
+ # Remove environment variables
+ if not self.configuration.getboolean("logging", "full_environment"):
+ for shell_variable in os.environ:
+ request_environ.pop(shell_variable, None)
+
+ # Mask passwords
+ mask_passwords = self.configuration.getboolean(
+ "logging", "mask_passwords")
+ authorization = request_environ.get("HTTP_AUTHORIZATION", "")
+ if mask_passwords and authorization.startswith("Basic"):
+ request_environ["HTTP_AUTHORIZATION"] = "Basic **masked**"
+ if request_environ.get("HTTP_COOKIE"):
+ request_environ["HTTP_COOKIE"] = "**masked**"
+
+ return request_environ
+
+ def decode(self, text, environ):
+ """Try to magically decode ``text`` according to given ``environ``."""
+ # List of charsets to try
+ charsets = []
+
+ # First append content charset given in the request
+ content_type = environ.get("CONTENT_TYPE")
+ if content_type and "charset=" in content_type:
+ charsets.append(
+ content_type.split("charset=")[1].split(";")[0].strip())
+ # Then append default Radicale charset
+ charsets.append(self.encoding)
+ # Then append various fallbacks
+ charsets.append("utf-8")
+ charsets.append("iso8859-1")
+
+ # Try to decode
+ for charset in charsets:
+ try:
+ return text.decode(charset)
+ except UnicodeDecodeError:
+ pass
+ raise UnicodeDecodeError
+
+ def collect_allowed_items(self, items, user):
+ """Get items from request that user is allowed to access."""
+ read_allowed_items = []
+ write_allowed_items = []
+ for item in items:
+ if isinstance(item, storage.BaseCollection):
+ path = storage.sanitize_path("/%s/" % item.path)
+ can_read = self.Rights.authorized(user, path, "r")
+ can_write = self.Rights.authorized(user, path, "w")
+ target = "collection %r" % item.path
+ else:
+ path = storage.sanitize_path("/%s/%s" % (item.collection.path,
+ item.href))
+ can_read = self.Rights.authorized_item(user, path, "r")
+ can_write = self.Rights.authorized_item(user, path, "w")
+ target = "item %r from %r" % (item.href, item.collection.path)
+ text_status = []
+ if can_read:
+ text_status.append("read")
+ read_allowed_items.append(item)
+ if can_write:
+ text_status.append("write")
+ write_allowed_items.append(item)
+ self.logger.debug(
+ "%s has %s access to %s",
+ repr(user) if user else "anonymous user",
+ " and ".join(text_status) if text_status else "NO", target)
+ return read_allowed_items, write_allowed_items
+
+ def __call__(self, environ, start_response):
+ try:
+ status, headers, answers = self._handle_request(environ)
+ except Exception as e:
+ try:
+ method = str(environ["REQUEST_METHOD"])
+ except Exception:
+ method = "unknown"
+ try:
+ path = str(environ.get("PATH_INFO", ""))
+ except Exception:
+ path = ""
+ self.logger.error("An exception occurred during %s request on %r: "
+ "%s", method, path, e, exc_info=True)
+ status, headers, answer = INTERNAL_SERVER_ERROR
+ answer = answer.encode("ascii")
+ status = "%d %s" % (
+ status, client.responses.get(status, "Unknown"))
+ headers = [("Content-Length", str(len(answer)))] + list(headers)
+ answers = [answer]
+ start_response(status, headers)
+ return answers
+
+ def _handle_request(self, environ):
+ """Manage a request."""
+ def response(status, headers=(), answer=None):
+ headers = dict(headers)
+ # Set content length
+ if answer:
+ if hasattr(answer, "encode"):
+ self.logger.debug("Response content:\n%s", answer)
+ headers["Content-Type"] += "; charset=%s" % self.encoding
+ answer = answer.encode(self.encoding)
+ accept_encoding = [
+ encoding.strip() for encoding in
+ environ.get("HTTP_ACCEPT_ENCODING", "").split(",")
+ if encoding.strip()]
+
+ if "gzip" in accept_encoding:
+ zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS)
+ answer = zcomp.compress(answer) + zcomp.flush()
+ headers["Content-Encoding"] = "gzip"
+
+ headers["Content-Length"] = str(len(answer))
+
+ # Add extra headers set in configuration
+ if self.configuration.has_section("headers"):
+ for key in self.configuration.options("headers"):
+ headers[key] = self.configuration.get("headers", key)
+
+ # Start response
+ time_end = datetime.datetime.now()
+ status = "%d %s" % (
+ status, client.responses.get(status, "Unknown"))
+ self.logger.info(
+ "%s response status for %r%s in %.3f seconds: %s",
+ environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""),
+ depthinfo, (time_end - time_begin).total_seconds(), status)
+ # Return response content
+ return status, list(headers.items()), [answer] if answer else []
+
+ remote_host = "unknown"
+ if environ.get("REMOTE_HOST"):
+ remote_host = repr(environ["REMOTE_HOST"])
+ elif environ.get("REMOTE_ADDR"):
+ remote_host = environ["REMOTE_ADDR"]
+ if environ.get("HTTP_X_FORWARDED_FOR"):
+ remote_host = "%r (forwarded by %s)" % (
+ environ["HTTP_X_FORWARDED_FOR"], remote_host)
+ remote_useragent = ""
+ if environ.get("HTTP_USER_AGENT"):
+ remote_useragent = " using %r" % environ["HTTP_USER_AGENT"]
+ depthinfo = ""
+ if environ.get("HTTP_DEPTH"):
+ depthinfo = " with depth %r" % environ["HTTP_DEPTH"]
+ time_begin = datetime.datetime.now()
+ self.logger.info(
+ "%s request for %r%s received from %s%s",
+ environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo,
+ remote_host, remote_useragent)
+ headers = pprint.pformat(self.headers_log(environ))
+ self.logger.debug("Request headers:\n%s", headers)
+
+ # Let reverse proxies overwrite SCRIPT_NAME
+ if "HTTP_X_SCRIPT_NAME" in environ:
+ # script_name must be removed from PATH_INFO by the client.
+ unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"]
+ self.logger.debug("Script name overwritten by client: %r",
+ unsafe_base_prefix)
+ else:
+ # SCRIPT_NAME is already removed from PATH_INFO, according to the
+ # WSGI specification.
+ unsafe_base_prefix = environ.get("SCRIPT_NAME", "")
+ # Sanitize base prefix
+ base_prefix = storage.sanitize_path(unsafe_base_prefix).rstrip("/")
+ self.logger.debug("Sanitized script name: %r", base_prefix)
+ # Sanitize request URI (a WSGI server indicates with an empty path,
+ # that the URL targets the application root without a trailing slash)
+ path = storage.sanitize_path(environ.get("PATH_INFO", ""))
+ self.logger.debug("Sanitized path: %r", path)
+
+ # Get function corresponding to method
+ function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper())
+
+ # If "/.well-known" is not available, clients query "/"
+ if path == "/.well-known" or path.startswith("/.well-known/"):
+ return response(*NOT_FOUND)
+
+ # Ask authentication backend to check rights
+ external_login = self.Auth.get_external_login(environ)
+ authorization = environ.get("HTTP_AUTHORIZATION", "")
+ if external_login:
+ login, password = external_login
+ elif authorization.startswith("Basic"):
+ authorization = authorization[len("Basic"):].strip()
+ login, password = self.decode(base64.b64decode(
+ authorization.encode("ascii")), environ).split(":", 1)
+ else:
+ # DEPRECATED: use remote_user backend instead
+ login = environ.get("REMOTE_USER", "")
+ password = ""
+ user = self.Auth.map_login_to_user(login)
+
+ if not user:
+ is_authenticated = True
+ elif not storage.is_safe_path_component(user):
+ # Prevent usernames like "user/calendar.ics"
+ self.logger.info("Refused unsafe username: %r", user)
+ is_authenticated = False
+ else:
+ is_authenticated = self.Auth.is_authenticated2(login, user,
+ password)
+ if not is_authenticated:
+ self.logger.info("Failed login attempt: %r", user)
+ # Random delay to avoid timing oracles and bruteforce attacks
+ delay = self.configuration.getfloat("auth", "delay")
+ if delay > 0:
+ random_delay = delay * (0.5 + random.random())
+ self.logger.debug("Sleeping %.3f seconds", random_delay)
+ time.sleep(random_delay)
+ else:
+ self.logger.info("Successful login: %r", user)
+
+ # Create principal collection
+ if user and is_authenticated:
+ principal_path = "/%s/" % user
+ if self.Rights.authorized(user, principal_path, "w"):
+ with self.Collection.acquire_lock("r", user):
+ principal = next(
+ self.Collection.discover(principal_path, depth="1"),
+ None)
+ if not principal:
+ with self.Collection.acquire_lock("w", user):
+ try:
+ self.Collection.create_collection(principal_path)
+ except ValueError as e:
+ self.logger.warning("Failed to create principal "
+ "collection %r: %s", user, e)
+ is_authenticated = False
+ else:
+ self.logger.warning("Access to principal path %r denied by "
+ "rights backend", principal_path)
+
+ # Verify content length
+ content_length = int(environ.get("CONTENT_LENGTH") or 0)
+ if content_length:
+ max_content_length = self.configuration.getint(
+ "server", "max_content_length")
+ if max_content_length and content_length > max_content_length:
+ self.logger.info(
+ "Request body too large: %d", content_length)
+ return response(*REQUEST_ENTITY_TOO_LARGE)
+
+ if is_authenticated:
+ status, headers, answer = function(
+ environ, base_prefix, path, user)
+ if (status, headers, answer) == NOT_ALLOWED:
+ self.logger.info("Access to %r denied for %s", path,
+ repr(user) if user else "anonymous user")
+ else:
+ status, headers, answer = NOT_ALLOWED
+
+ if (status, headers, answer) == NOT_ALLOWED and not (
+ user and is_authenticated) and not external_login:
+ # Unknown or unauthorized user
+ self.logger.debug("Asking client for authentication")
+ status = client.UNAUTHORIZED
+ realm = self.configuration.get("server", "realm")
+ headers = dict(headers)
+ headers.update({
+ "WWW-Authenticate":
+ "Basic realm=\"%s\"" % realm})
+
+ return response(status, headers, answer)
+
+ def _access(self, user, path, permission, item=None):
+ """Check if ``user`` can access ``path`` or the parent collection.
+
+ ``permission`` must either be "r" or "w".
+
+ If ``item`` is given, only access to that class of item is checked.
+
+ """
+ allowed = False
+ if not item or isinstance(item, storage.BaseCollection):
+ allowed |= self.Rights.authorized(user, path, permission)
+ if not item or not isinstance(item, storage.BaseCollection):
+ allowed |= self.Rights.authorized_item(user, path, permission)
+ return allowed
+
+ def _read_raw_content(self, environ):
+ content_length = int(environ.get("CONTENT_LENGTH") or 0)
+ if not content_length:
+ return b""
+ content = environ["wsgi.input"].read(content_length)
+ if len(content) < content_length:
+ raise RuntimeError("Request body too short: %d" % len(content))
+ return content
+
+ def _read_content(self, environ):
+ content = self.decode(self._read_raw_content(environ), environ)
+ self.logger.debug("Request content:\n%s", content)
+ return content
+
+ def _read_xml_content(self, environ):
+ content = self.decode(self._read_raw_content(environ), environ)
+ if not content:
+ return None
+ try:
+ xml_content = ET.fromstring(content)
+ except ET.ParseError as e:
+ self.logger.debug("Request content (Invalid XML):\n%s", content)
+ raise RuntimeError("Failed to parse XML: %s" % e) from e
+ if self.logger.isEnabledFor(logging.DEBUG):
+ self.logger.debug("Request content:\n%s",
+ xmlutils.pretty_xml(xml_content))
+ return xml_content
+
+ def _write_xml_content(self, xml_content):
+ if self.logger.isEnabledFor(logging.DEBUG):
+ self.logger.debug("Response content:\n%s",
+ xmlutils.pretty_xml(xml_content))
+ f = io.BytesIO()
+ ET.ElementTree(xml_content).write(f, encoding=self.encoding,
+ xml_declaration=True)
+ return f.getvalue()
+
+ def do_DELETE(self, environ, base_prefix, path, user):
+ """Manage DELETE request."""
+ if not self._access(user, path, "w"):
+ return NOT_ALLOWED
+ with self.Collection.acquire_lock("w", user):
+ item = next(self.Collection.discover(path), None)
+ if not self._access(user, path, "w", item):
+ return NOT_ALLOWED
+ if not item:
+ return NOT_FOUND
+ if_match = environ.get("HTTP_IF_MATCH", "*")
+ if if_match not in ("*", item.etag):
+ # ETag precondition not verified, do not delete item
+ return PRECONDITION_FAILED
+ if isinstance(item, storage.BaseCollection):
+ xml_answer = xmlutils.delete(base_prefix, path, item)
+ else:
+ xml_answer = xmlutils.delete(
+ base_prefix, path, item.collection, item.href)
+ headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
+ return client.OK, headers, self._write_xml_content(xml_answer)
+
+ def do_GET(self, environ, base_prefix, path, user):
+ """Manage GET request."""
+ # Redirect to .web if the root URL is requested
+ if not path.strip("/"):
+ web_path = ".web"
+ if not environ.get("PATH_INFO"):
+ web_path = posixpath.join(posixpath.basename(base_prefix),
+ web_path)
+ return (client.FOUND,
+ {"Location": web_path, "Content-Type": "text/plain"},
+ "Redirected to %s" % web_path)
+ # Dispatch .web URL to web module
+ if path == "/.web" or path.startswith("/.web/"):
+ return self.Web.get(environ, base_prefix, path, user)
+ if not self._access(user, path, "r"):
+ return NOT_ALLOWED
+ with self.Collection.acquire_lock("r", user):
+ item = next(self.Collection.discover(path), None)
+ if not self._access(user, path, "r", item):
+ return NOT_ALLOWED
+ if not item:
+ return NOT_FOUND
+ if isinstance(item, storage.BaseCollection):
+ tag = item.get_meta("tag")
+ if not tag:
+ return DIRECTORY_LISTING
+ content_type = xmlutils.MIMETYPES[tag]
+ else:
+ content_type = xmlutils.OBJECT_MIMETYPES[item.name]
+ headers = {
+ "Content-Type": content_type,
+ "Last-Modified": item.last_modified,
+ "ETag": item.etag}
+ answer = item.serialize()
+ return client.OK, headers, answer
+
+ def do_HEAD(self, environ, base_prefix, path, user):
+ """Manage HEAD request."""
+ status, headers, answer = self.do_GET(
+ environ, base_prefix, path, user)
+ return status, headers, None
+
+ def do_MKCALENDAR(self, environ, base_prefix, path, user):
+ """Manage MKCALENDAR request."""
+ if not self.Rights.authorized(user, path, "w"):
+ return NOT_ALLOWED
+ try:
+ xml_content = self._read_xml_content(environ)
+ except RuntimeError as e:
+ self.logger.warning(
+ "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ except socket.timeout as e:
+ self.logger.debug("client timed out", exc_info=True)
+ return REQUEST_TIMEOUT
+ with self.Collection.acquire_lock("w", user):
+ item = next(self.Collection.discover(path), None)
+ if item:
+ return WEBDAV_PRECONDITION_FAILED
+ props = xmlutils.props_from_request(xml_content)
+ props["tag"] = "VCALENDAR"
+ # TODO: use this?
+ # timezone = props.get("C:calendar-timezone")
+ try:
+ storage.check_and_sanitize_props(props)
+ self.Collection.create_collection(path, props=props)
+ except ValueError as e:
+ self.logger.warning(
+ "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ return client.CREATED, {}, None
+
+ def do_MKCOL(self, environ, base_prefix, path, user):
+ """Manage MKCOL request."""
+ if not self.Rights.authorized(user, path, "w"):
+ return NOT_ALLOWED
+ try:
+ xml_content = self._read_xml_content(environ)
+ except RuntimeError as e:
+ self.logger.warning(
+ "Bad MKCOL request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ except socket.timeout as e:
+ self.logger.debug("client timed out", exc_info=True)
+ return REQUEST_TIMEOUT
+ with self.Collection.acquire_lock("w", user):
+ item = next(self.Collection.discover(path), None)
+ if item:
+ return WEBDAV_PRECONDITION_FAILED
+ props = xmlutils.props_from_request(xml_content)
+ try:
+ storage.check_and_sanitize_props(props)
+ self.Collection.create_collection(path, props=props)
+ except ValueError as e:
+ self.logger.warning(
+ "Bad MKCOL request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ return client.CREATED, {}, None
+
+ def do_MOVE(self, environ, base_prefix, path, user):
+ """Manage MOVE request."""
+ raw_dest = environ.get("HTTP_DESTINATION", "")
+ to_url = urlparse(raw_dest)
+ if to_url.netloc != environ["HTTP_HOST"]:
+ self.logger.info("Unsupported destination address: %r", raw_dest)
+ # Remote destination server, not supported
+ return REMOTE_DESTINATION
+ if not self._access(user, path, "w"):
+ return NOT_ALLOWED
+ to_path = storage.sanitize_path(to_url.path)
+ if not (to_path + "/").startswith(base_prefix + "/"):
+ self.logger.warning("Destination %r from MOVE request on %r does"
+ "n't start with base prefix", to_path, path)
+ return NOT_ALLOWED
+ to_path = to_path[len(base_prefix):]
+ if not self._access(user, to_path, "w"):
+ return NOT_ALLOWED
+
+ with self.Collection.acquire_lock("w", user):
+ item = next(self.Collection.discover(path), None)
+ if not self._access(user, path, "w", item):
+ return NOT_ALLOWED
+ if not self._access(user, to_path, "w", item):
+ return NOT_ALLOWED
+ if not item:
+ return NOT_FOUND
+ if isinstance(item, storage.BaseCollection):
+ return WEBDAV_PRECONDITION_FAILED
+
+ to_item = next(self.Collection.discover(to_path), None)
+ if (isinstance(to_item, storage.BaseCollection) or
+ to_item and environ.get("HTTP_OVERWRITE", "F") != "T"):
+ return WEBDAV_PRECONDITION_FAILED
+ to_parent_path = storage.sanitize_path(
+ "/%s/" % posixpath.dirname(to_path.strip("/")))
+ to_collection = next(
+ self.Collection.discover(to_parent_path), None)
+ if not to_collection:
+ return WEBDAV_PRECONDITION_FAILED
+ to_href = posixpath.basename(to_path.strip("/"))
+ try:
+ self.Collection.move(item, to_collection, to_href)
+ except ValueError as e:
+ self.logger.warning(
+ "Bad MOVE request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ return client.CREATED, {}, None
+
+ def do_OPTIONS(self, environ, base_prefix, path, user):
+ """Manage OPTIONS request."""
+ headers = {
+ "Allow": ", ".join(
+ name[3:] for name in dir(self) if name.startswith("do_")),
+ "DAV": DAV_HEADERS}
+ return client.OK, headers, None
+
+ def do_PROPFIND(self, environ, base_prefix, path, user):
+ """Manage PROPFIND request."""
+ if not self._access(user, path, "r"):
+ return NOT_ALLOWED
+ try:
+ xml_content = self._read_xml_content(environ)
+ except RuntimeError as e:
+ self.logger.warning(
+ "Bad PROPFIND request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ except socket.timeout as e:
+ self.logger.debug("client timed out", exc_info=True)
+ return REQUEST_TIMEOUT
+ with self.Collection.acquire_lock("r", user):
+ items = self.Collection.discover(
+ path, environ.get("HTTP_DEPTH", "0"))
+ # take root item for rights checking
+ item = next(items, None)
+ if not self._access(user, path, "r", item):
+ return NOT_ALLOWED
+ if not item:
+ return NOT_FOUND
+ # put item back
+ items = itertools.chain([item], items)
+ read_items, write_items = self.collect_allowed_items(items, user)
+ headers = {"DAV": DAV_HEADERS,
+ "Content-Type": "text/xml; charset=%s" % self.encoding}
+ status, xml_answer = xmlutils.propfind(
+ base_prefix, path, xml_content, read_items, write_items, user)
+ if status == client.FORBIDDEN:
+ return NOT_ALLOWED
+ else:
+ return status, headers, self._write_xml_content(xml_answer)
+
+ def do_PROPPATCH(self, environ, base_prefix, path, user):
+ """Manage PROPPATCH request."""
+ if not self.Rights.authorized(user, path, "w"):
+ return NOT_ALLOWED
+ try:
+ xml_content = self._read_xml_content(environ)
+ except RuntimeError as e:
+ self.logger.warning(
+ "Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ except socket.timeout as e:
+ self.logger.debug("client timed out", exc_info=True)
+ return REQUEST_TIMEOUT
+ with self.Collection.acquire_lock("w", user):
+ item = next(self.Collection.discover(path), None)
+ if not isinstance(item, storage.BaseCollection):
+ return WEBDAV_PRECONDITION_FAILED
+ headers = {"DAV": DAV_HEADERS,
+ "Content-Type": "text/xml; charset=%s" % self.encoding}
+ try:
+ xml_answer = xmlutils.proppatch(base_prefix, path, xml_content,
+ item)
+ except ValueError as e:
+ self.logger.warning(
+ "Bad PROPPATCH request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ return (client.MULTI_STATUS, headers,
+ self._write_xml_content(xml_answer))
+
+ def do_PUT(self, environ, base_prefix, path, user):
+ """Manage PUT request."""
+ if not self._access(user, path, "w"):
+ return NOT_ALLOWED
+ try:
+ content = self._read_content(environ)
+ except RuntimeError as e:
+ self.logger.warning(
+ "Bad PUT request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ except socket.timeout as e:
+ self.logger.debug("client timed out", exc_info=True)
+ return REQUEST_TIMEOUT
+ with self.Collection.acquire_lock("w", user):
+ parent_path = storage.sanitize_path(
+ "/%s/" % posixpath.dirname(path.strip("/")))
+ item = next(self.Collection.discover(path), None)
+ parent_item = next(self.Collection.discover(parent_path), None)
+
+ write_whole_collection = (
+ isinstance(item, storage.BaseCollection) or
+ not parent_item or (
+ not next(parent_item.list(), None) and
+ parent_item.get_meta("tag") not in (
+ "VADDRESSBOOK", "VCALENDAR")))
+ if write_whole_collection:
+ if not self.Rights.authorized(user, path, "w"):
+ return NOT_ALLOWED
+ elif not self.Rights.authorized_item(user, path, "w"):
+ return NOT_ALLOWED
+
+ etag = environ.get("HTTP_IF_MATCH", "")
+ if not item and etag:
+ # Etag asked but no item found: item has been removed
+ return PRECONDITION_FAILED
+ if item and etag and item.etag != etag:
+ # Etag asked but item not matching: item has changed
+ return PRECONDITION_FAILED
+
+ match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
+ if item and match:
+ # Creation asked but item found: item can't be replaced
+ return PRECONDITION_FAILED
+
+ try:
+ items = tuple(vobject.readComponents(content or ""))
+ if not write_whole_collection and len(items) != 1:
+ raise RuntimeError(
+ "Item contains %d components" % len(items))
+ if write_whole_collection or not parent_item.get_meta("tag"):
+ content_type = environ.get("CONTENT_TYPE",
+ "").split(";")[0]
+ tags = {value: key
+ for key, value in xmlutils.MIMETYPES.items()}
+ tag = tags.get(content_type)
+ if items and items[0].name == "VCALENDAR":
+ tag = "VCALENDAR"
+ elif items and items[0].name in ("VCARD", "VLIST"):
+ tag = "VADDRESSBOOK"
+ else:
+ tag = parent_item.get_meta("tag")
+ if tag == "VCALENDAR" and len(items) > 1:
+ raise RuntimeError("VCALENDAR collection contains %d "
+ "components" % len(items))
+ for i in items:
+ storage.check_and_sanitize_item(
+ i, is_collection=write_whole_collection, uid=item.uid
+ if not write_whole_collection and item else None,
+ tag=tag)
+ except Exception as e:
+ self.logger.warning(
+ "Bad PUT request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+
+ if write_whole_collection:
+ props = {}
+ if tag:
+ props["tag"] = tag
+ if tag == "VCALENDAR" and items:
+ if hasattr(items[0], "x_wr_calname"):
+ calname = items[0].x_wr_calname.value
+ if calname:
+ props["D:displayname"] = calname
+ if hasattr(items[0], "x_wr_caldesc"):
+ caldesc = items[0].x_wr_caldesc.value
+ if caldesc:
+ props["C:calendar-description"] = caldesc
+ try:
+ storage.check_and_sanitize_props(props)
+ new_item = self.Collection.create_collection(
+ path, items, props)
+ except ValueError as e:
+ self.logger.warning(
+ "Bad PUT request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ else:
+ href = posixpath.basename(path.strip("/"))
+ try:
+ if tag and not parent_item.get_meta("tag"):
+ new_props = parent_item.get_meta()
+ new_props["tag"] = tag
+ storage.check_and_sanitize_props(new_props)
+ parent_item.set_meta_all(new_props)
+ new_item = parent_item.upload(href, items[0])
+ except ValueError as e:
+ self.logger.warning(
+ "Bad PUT request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ headers = {"ETag": new_item.etag}
+ return client.CREATED, headers, None
+
+ def do_REPORT(self, environ, base_prefix, path, user):
+ """Manage REPORT request."""
+ if not self._access(user, path, "r"):
+ return NOT_ALLOWED
+ try:
+ xml_content = self._read_xml_content(environ)
+ except RuntimeError as e:
+ self.logger.warning(
+ "Bad REPORT request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ except socket.timeout as e:
+ self.logger.debug("client timed out", exc_info=True)
+ return REQUEST_TIMEOUT
+ with self.Collection.acquire_lock("r", user):
+ item = next(self.Collection.discover(path), None)
+ if not self._access(user, path, "r", item):
+ return NOT_ALLOWED
+ if not item:
+ return NOT_FOUND
+ if isinstance(item, storage.BaseCollection):
+ collection = item
+ else:
+ collection = item.collection
+ headers = {"Content-Type": "text/xml; charset=%s" % self.encoding}
+ try:
+ status, xml_answer = xmlutils.report(
+ base_prefix, path, xml_content, collection)
+ except ValueError as e:
+ self.logger.warning(
+ "Bad REPORT request on %r: %s", path, e, exc_info=True)
+ return BAD_REQUEST
+ return (status, headers, self._write_xml_content(xml_answer))
diff --git a/radicale/__main__.py b/radicale/__main__.py
new file mode 100644
index 0000000..3e133c7
--- /dev/null
+++ b/radicale/__main__.py
@@ -0,0 +1,291 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2011-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale executable module.
+
+This module can be executed from a command line with ``$python -m radicale`` or
+from a python programme with ``radicale.__main__.run()``.
+
+"""
+
+import argparse
+import atexit
+import os
+import select
+import signal
+import socket
+import ssl
+import sys
+from wsgiref.simple_server import make_server
+
+from . import (VERSION, Application, RequestHandler, ThreadedHTTPServer,
+ ThreadedHTTPSServer, config, log, storage)
+
+
+def run():
+ """Run Radicale as a standalone server."""
+ # Get command-line arguments
+ parser = argparse.ArgumentParser(usage="radicale [OPTIONS]")
+
+ parser.add_argument("--version", action="version", version=VERSION)
+ parser.add_argument("--verify-storage", action="store_true",
+ help="check the storage for errors and exit")
+ parser.add_argument(
+ "-C", "--config", help="use a specific configuration file")
+
+ groups = {}
+ for section, values in config.INITIAL_CONFIG.items():
+ group = parser.add_argument_group(section)
+ groups[group] = []
+ for option, data in values.items():
+ kwargs = data.copy()
+ long_name = "--{0}-{1}".format(
+ section, option.replace("_", "-"))
+ args = kwargs.pop("aliases", [])
+ args.append(long_name)
+ kwargs["dest"] = "{0}_{1}".format(section, option)
+ groups[group].append(kwargs["dest"])
+ del kwargs["value"]
+ if "internal" in kwargs:
+ del kwargs["internal"]
+
+ if kwargs["type"] == bool:
+ del kwargs["type"]
+ kwargs["action"] = "store_const"
+ kwargs["const"] = "True"
+ opposite_args = kwargs.pop("opposite", [])
+ opposite_args.append("--no{0}".format(long_name[1:]))
+ group.add_argument(*args, **kwargs)
+
+ kwargs["const"] = "False"
+ kwargs["help"] = "do not {0} (opposite of {1})".format(
+ kwargs["help"], long_name)
+ group.add_argument(*opposite_args, **kwargs)
+ else:
+ group.add_argument(*args, **kwargs)
+
+ args = parser.parse_args()
+ if args.config is not None:
+ config_paths = [args.config] if args.config else []
+ ignore_missing_paths = False
+ else:
+ config_paths = ["/etc/radicale/config",
+ os.path.expanduser("~/.config/radicale/config")]
+ if "RADICALE_CONFIG" in os.environ:
+ config_paths.append(os.environ["RADICALE_CONFIG"])
+ ignore_missing_paths = True
+ try:
+ configuration = config.load(config_paths,
+ ignore_missing_paths=ignore_missing_paths)
+ except Exception as e:
+ print("ERROR: Invalid configuration: %s" % e, file=sys.stderr)
+ if args.logging_debug:
+ raise
+ exit(1)
+
+ # Update Radicale configuration according to arguments
+ for group, actions in groups.items():
+ section = group.title
+ for action in actions:
+ value = getattr(args, action)
+ if value is not None:
+ configuration.set(section, action.split('_', 1)[1], value)
+
+ if args.verify_storage:
+ # Write to stderr when storage verification is requested
+ configuration["logging"]["config"] = ""
+
+ # Start logging
+ filename = os.path.expanduser(configuration.get("logging", "config"))
+ debug = configuration.getboolean("logging", "debug")
+ try:
+ logger = log.start("radicale", filename, debug)
+ except Exception as e:
+ print("ERROR: Failed to start logger: %s" % e, file=sys.stderr)
+ if debug:
+ raise
+ exit(1)
+
+ if args.verify_storage:
+ logger.info("Verifying storage")
+ try:
+ Collection = storage.load(configuration, logger)
+ with Collection.acquire_lock("r"):
+ if not Collection.verify():
+ logger.error("Storage verifcation failed")
+ exit(1)
+ except Exception as e:
+ logger.error("An exception occurred during storage verification: "
+ "%s", e, exc_info=True)
+ exit(1)
+ return
+
+ try:
+ serve(configuration, logger)
+ except Exception as e:
+ logger.error("An exception occurred during server startup: %s", e,
+ exc_info=True)
+ exit(1)
+
+
+def daemonize(configuration, logger):
+ """Fork and decouple if Radicale is configured as daemon."""
+ # Check and create PID file in a race-free manner
+ if configuration.get("server", "pid"):
+ try:
+ pid_path = os.path.abspath(os.path.expanduser(
+ configuration.get("server", "pid")))
+ pid_fd = os.open(
+ pid_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+ except OSError as e:
+ raise OSError("PID file exists: %r" %
+ configuration.get("server", "pid")) from e
+ pid = os.fork()
+ if pid:
+ # Write PID
+ if configuration.get("server", "pid"):
+ with os.fdopen(pid_fd, "w") as pid_file:
+ pid_file.write(str(pid))
+ sys.exit()
+ if configuration.get("server", "pid"):
+ os.close(pid_fd)
+
+ # Register exit function
+ def cleanup():
+ """Remove the PID files."""
+ logger.debug("Cleaning up")
+ # Remove PID file
+ os.unlink(pid_path)
+ atexit.register(cleanup)
+ # Decouple environment
+ os.chdir("/")
+ os.setsid()
+ with open(os.devnull, "r") as null_in:
+ os.dup2(null_in.fileno(), sys.stdin.fileno())
+ with open(os.devnull, "w") as null_out:
+ os.dup2(null_out.fileno(), sys.stdout.fileno())
+ os.dup2(null_out.fileno(), sys.stderr.fileno())
+
+
+def serve(configuration, logger):
+ """Serve radicale from configuration."""
+ logger.info("Starting Radicale")
+
+ # Create collection servers
+ servers = {}
+ if configuration.getboolean("server", "ssl"):
+ server_class = ThreadedHTTPSServer
+ server_class.certificate = configuration.get("server", "certificate")
+ server_class.key = configuration.get("server", "key")
+ server_class.certificate_authority = configuration.get(
+ "server", "certificate_authority")
+ server_class.ciphers = configuration.get("server", "ciphers")
+ server_class.protocol = getattr(
+ ssl, configuration.get("server", "protocol"), ssl.PROTOCOL_SSLv23)
+ # Test if the SSL files can be read
+ for name in ["certificate", "key"] + (
+ ["certificate_authority"]
+ if server_class.certificate_authority else []):
+ filename = getattr(server_class, name)
+ try:
+ open(filename, "r").close()
+ except OSError as e:
+ raise RuntimeError("Failed to read SSL %s %r: %s" %
+ (name, filename, e)) from e
+ else:
+ server_class = ThreadedHTTPServer
+ server_class.client_timeout = configuration.getint("server", "timeout")
+ server_class.max_connections = configuration.getint(
+ "server", "max_connections")
+ server_class.logger = logger
+
+ RequestHandler.logger = logger
+ if not configuration.getboolean("server", "dns_lookup"):
+ RequestHandler.address_string = lambda self: self.client_address[0]
+
+ shutdown_program = False
+
+ for host in configuration.get("server", "hosts").split(","):
+ try:
+ address, port = host.strip().rsplit(":", 1)
+ address, port = address.strip("[] "), int(port)
+ except ValueError as e:
+ raise RuntimeError(
+ "Failed to parse address %r: %s" % (host, e)) from e
+ application = Application(configuration, logger)
+ try:
+ server = make_server(
+ address, port, application, server_class, RequestHandler)
+ except OSError as e:
+ raise RuntimeError(
+ "Failed to start server %r: %s" % (host, e)) from e
+ servers[server.socket] = server
+ logger.info("Listening to %r on port %d%s",
+ server.server_name, server.server_port, " using SSL"
+ if configuration.getboolean("server", "ssl") else "")
+
+ # Create a socket pair to notify the select syscall of program shutdown
+ # This is not available in python < 3.5 on Windows
+ if hasattr(socket, "socketpair"):
+ shutdown_program_socket_in, shutdown_program_socket_out = (
+ socket.socketpair())
+ else:
+ shutdown_program_socket_in, shutdown_program_socket_out = None, None
+
+ # SIGTERM and SIGINT (aka KeyboardInterrupt) should just mark this for
+ # shutdown
+ def shutdown(*args):
+ nonlocal shutdown_program
+ if shutdown_program:
+ # Ignore following signals
+ return
+ logger.info("Stopping Radicale")
+ shutdown_program = True
+ if shutdown_program_socket_in:
+ shutdown_program_socket_in.sendall(b"goodbye")
+ signal.signal(signal.SIGTERM, shutdown)
+ signal.signal(signal.SIGINT, shutdown)
+
+ # Main loop: wait for requests on any of the servers or program shutdown
+ sockets = list(servers.keys())
+ if shutdown_program_socket_out:
+ # Use socket pair to get notified of program shutdown
+ sockets.append(shutdown_program_socket_out)
+ select_timeout = None
+ if not shutdown_program_socket_out or os.name == "nt":
+ # Fallback to busy waiting. (select.select blocks SIGINT on Windows.)
+ select_timeout = 1.0
+ if configuration.getboolean("server", "daemon"):
+ daemonize(configuration, logger)
+ logger.info("Radicale server ready")
+ while not shutdown_program:
+ try:
+ rlist, _, xlist = select.select(
+ sockets, [], sockets, select_timeout)
+ except (KeyboardInterrupt, select.error):
+ # SIGINT is handled by signal handler above
+ rlist, xlist = [], []
+ if xlist:
+ raise RuntimeError("unhandled socket error")
+ if rlist:
+ server = servers.get(rlist[0])
+ if server:
+ server.handle_request()
+
+
+if __name__ == "__main__":
+ run()
diff --git a/radicale/auth.py b/radicale/auth.py
new file mode 100644
index 0000000..fc5d425
--- /dev/null
+++ b/radicale/auth.py
@@ -0,0 +1,274 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2008 Nicolas Kandel
+# Copyright © 2008 Pascal Halter
+# Copyright © 2008-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Authentication management.
+
+Default is htpasswd authentication.
+
+Apache's htpasswd command (httpd.apache.org/docs/programs/htpasswd.html)
+manages a file for storing user credentials. It can encrypt passwords using
+different methods, e.g. BCRYPT, MD5-APR1 (a version of MD5 modified for
+Apache), SHA1, or by using the system's CRYPT routine. The CRYPT and SHA1
+encryption methods implemented by htpasswd are considered as insecure. MD5-APR1
+provides medium security as of 2015. Only BCRYPT can be considered secure by
+current standards.
+
+MD5-APR1-encrypted credentials can be written by all versions of htpasswd (it
+is the default, in fact), whereas BCRYPT requires htpasswd 2.4.x or newer.
+
+The `is_authenticated(user, password)` function provided by this module
+verifies the user-given credentials by parsing the htpasswd credential file
+pointed to by the ``htpasswd_filename`` configuration value while assuming
+the password encryption method specified via the ``htpasswd_encryption``
+configuration value.
+
+The following htpasswd password encrpytion methods are supported by Radicale
+out-of-the-box:
+
+ - plain-text (created by htpasswd -p...) -- INSECURE
+ - CRYPT (created by htpasswd -d...) -- INSECURE
+ - SHA1 (created by htpasswd -s...) -- INSECURE
+
+When passlib (https://pypi.python.org/pypi/passlib) is importable, the
+following significantly more secure schemes are parsable by Radicale:
+
+ - MD5-APR1 (htpasswd -m...) -- htpasswd's default method
+ - BCRYPT (htpasswd -B...) -- Requires htpasswd 2.4.x
+
+"""
+
+import base64
+import functools
+import hashlib
+import hmac
+import os
+from importlib import import_module
+
+INTERNAL_TYPES = ("None", "none", "remote_user", "http_x_remote_user",
+ "htpasswd")
+
+
+def load(configuration, logger):
+ """Load the authentication manager chosen in configuration."""
+ auth_type = configuration.get("auth", "type")
+ if auth_type in ("None", "none"): # DEPRECATED: use "none"
+ class_ = NoneAuth
+ elif auth_type == "remote_user":
+ class_ = RemoteUserAuth
+ elif auth_type == "http_x_remote_user":
+ class_ = HttpXRemoteUserAuth
+ elif auth_type == "htpasswd":
+ class_ = Auth
+ else:
+ try:
+ class_ = import_module(auth_type).Auth
+ except Exception as e:
+ raise RuntimeError("Failed to load authentication module %r: %s" %
+ (auth_type, e)) from e
+ logger.info("Authentication type is %r", auth_type)
+ return class_(configuration, logger)
+
+
+class BaseAuth:
+ def __init__(self, configuration, logger):
+ self.configuration = configuration
+ self.logger = logger
+
+ def get_external_login(self, environ):
+ """Optionally provide the login and password externally.
+
+ ``environ`` a dict with the WSGI environment
+
+ If ``()`` is returned, Radicale handles HTTP authentication.
+ Otherwise, returns a tuple ``(login, password)``. For anonymous users
+ ``login`` must be ``""``.
+
+ """
+ return ()
+
+ def is_authenticated2(self, login, user, password):
+ """Validate credentials.
+
+ ``login`` the login name
+
+ ``user`` the user from ``map_login_to_user(login)``.
+
+ ``password`` the login password
+
+ """
+ return self.is_authenticated(user, password)
+
+ def is_authenticated(self, user, password):
+ """Validate credentials.
+
+ DEPRECATED: use ``is_authenticated2`` instead
+
+ """
+ raise NotImplementedError
+
+ def map_login_to_user(self, login):
+ """Map login name to internal user.
+
+ ``login`` the login name, ``""`` for anonymous users
+
+ Returns a string with the user name.
+ If a login can't be mapped to an user, return ``login`` and
+ return ``False`` in ``is_authenticated2(...)``.
+
+ """
+ return login
+
+
+class NoneAuth(BaseAuth):
+ def is_authenticated(self, user, password):
+ return True
+
+
+class Auth(BaseAuth):
+ def __init__(self, configuration, logger):
+ super().__init__(configuration, logger)
+ self.filename = os.path.expanduser(
+ configuration.get("auth", "htpasswd_filename"))
+ self.encryption = configuration.get("auth", "htpasswd_encryption")
+
+ if self.encryption == "ssha":
+ self.verify = self._ssha
+ elif self.encryption == "sha1":
+ self.verify = self._sha1
+ elif self.encryption == "plain":
+ self.verify = self._plain
+ elif self.encryption == "md5":
+ try:
+ from passlib.hash import apr_md5_crypt
+ except ImportError as e:
+ raise RuntimeError(
+ "The htpasswd encryption method 'md5' requires "
+ "the passlib module.") from e
+ self.verify = functools.partial(self._md5apr1, apr_md5_crypt)
+ elif self.encryption == "bcrypt":
+ try:
+ from passlib.hash import bcrypt
+ except ImportError as e:
+ raise RuntimeError(
+ "The htpasswd encryption method 'bcrypt' requires "
+ "the passlib module with bcrypt support.") from e
+ # A call to `encrypt` raises passlib.exc.MissingBackendError with a
+ # good error message if bcrypt backend is not available. Trigger
+ # this here.
+ bcrypt.encrypt("test-bcrypt-backend")
+ self.verify = functools.partial(self._bcrypt, bcrypt)
+ elif self.encryption == "crypt":
+ try:
+ import crypt
+ except ImportError as e:
+ raise RuntimeError(
+ "The htpasswd encryption method 'crypt' requires "
+ "the crypt() system support.") from e
+ self.verify = functools.partial(self._crypt, crypt)
+ else:
+ raise RuntimeError(
+ "The htpasswd encryption method %r is not "
+ "supported." % self.encryption)
+
+ def _plain(self, hash_value, password):
+ """Check if ``hash_value`` and ``password`` match, plain method."""
+ return hmac.compare_digest(hash_value, password)
+
+ def _crypt(self, crypt, hash_value, password):
+ """Check if ``hash_value`` and ``password`` match, crypt method."""
+ hash_value = hash_value.strip()
+ return hmac.compare_digest(crypt.crypt(password, hash_value),
+ hash_value)
+
+ def _sha1(self, hash_value, password):
+ """Check if ``hash_value`` and ``password`` match, sha1 method."""
+ hash_value = base64.b64decode(hash_value.strip().replace(
+ "{SHA}", "").encode("ascii"))
+ password = password.encode(self.configuration.get("encoding", "stock"))
+ sha1 = hashlib.sha1()
+ sha1.update(password)
+ return hmac.compare_digest(sha1.digest(), hash_value)
+
+ def _ssha(self, hash_value, password):
+ """Check if ``hash_value`` and ``password`` match, salted sha1 method.
+
+ This method is not directly supported by htpasswd, but it can be
+ written with e.g. openssl, and nginx can parse it.
+
+ """
+ hash_value = base64.b64decode(hash_value.strip().replace(
+ "{SSHA}", "").encode("ascii"))
+ password = password.encode(self.configuration.get("encoding", "stock"))
+ salt_value = hash_value[20:]
+ hash_value = hash_value[:20]
+ sha1 = hashlib.sha1()
+ sha1.update(password)
+ sha1.update(salt_value)
+ return hmac.compare_digest(sha1.digest(), hash_value)
+
+ def _bcrypt(self, bcrypt, hash_value, password):
+ hash_value = hash_value.strip()
+ return bcrypt.verify(password, hash_value)
+
+ def _md5apr1(self, md5_apr1, hash_value, password):
+ hash_value = hash_value.strip()
+ return md5_apr1.verify(password, hash_value)
+
+ def is_authenticated(self, user, password):
+ """Validate credentials.
+
+ Iterate through htpasswd credential file until user matches, extract
+ hash (encrypted password) and check hash against user-given password,
+ using the method specified in the Radicale config.
+
+ The content of the file is not cached because reading is generally a
+ very cheap operation, and it's useful to get live updates of the
+ htpasswd file.
+
+ """
+ try:
+ with open(self.filename) as f:
+ for line in f:
+ line = line.rstrip("\n")
+ if line.lstrip() and not line.lstrip().startswith("#"):
+ try:
+ login, hash_value = line.split(":", maxsplit=1)
+ # Always compare both login and password to avoid
+ # timing attacks, see #591.
+ login_ok = hmac.compare_digest(login, user)
+ password_ok = self.verify(hash_value, password)
+ if login_ok and password_ok:
+ return True
+ except ValueError as e:
+ raise RuntimeError("Invalid htpasswd file %r: %s" %
+ (self.filename, e)) from e
+ except OSError as e:
+ raise RuntimeError("Failed to load htpasswd file %r: %s" %
+ (self.filename, e)) from e
+ return False
+
+
+class RemoteUserAuth(NoneAuth):
+ def get_external_login(self, environ):
+ return environ.get("REMOTE_USER", ""), ""
+
+
+class HttpXRemoteUserAuth(NoneAuth):
+ def get_external_login(self, environ):
+ return environ.get("HTTP_X_REMOTE_USER", ""), ""
diff --git a/radicale/config.py b/radicale/config.py
new file mode 100644
index 0000000..7a6fa6b
--- /dev/null
+++ b/radicale/config.py
@@ -0,0 +1,259 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2008-2017 Guillaume Ayoub
+# Copyright © 2008 Nicolas Kandel
+# Copyright © 2008 Pascal Halter
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale configuration module.
+
+Give a configparser-like interface to read and write configuration.
+
+"""
+
+import math
+import os
+from collections import OrderedDict
+from configparser import RawConfigParser as ConfigParser
+
+from . import auth, rights, storage, web
+
+
+def positive_int(value):
+ value = int(value)
+ if value < 0:
+ raise ValueError("value is negative: %d" % value)
+ return value
+
+
+def positive_float(value):
+ value = float(value)
+ if not math.isfinite(value):
+ raise ValueError("value is infinite")
+ if math.isnan(value):
+ raise ValueError("value is not a number")
+ if value < 0:
+ raise ValueError("value is negative: %f" % value)
+ return value
+
+
+# Default configuration
+INITIAL_CONFIG = OrderedDict([
+ ("server", OrderedDict([
+ ("hosts", {
+ "value": "127.0.0.1:5232",
+ "help": "set server hostnames including ports",
+ "aliases": ["-H", "--hosts"],
+ "type": str}),
+ ("daemon", {
+ "value": "False",
+ "help": "launch as daemon",
+ "aliases": ["-d", "--daemon"],
+ "opposite": ["-f", "--foreground"],
+ "type": bool}),
+ ("pid", {
+ "value": "",
+ "help": "set PID filename for daemon mode",
+ "aliases": ["-p", "--pid"],
+ "type": str}),
+ ("max_connections", {
+ "value": "20",
+ "help": "maximum number of parallel connections",
+ "type": positive_int}),
+ ("max_content_length", {
+ "value": "10000000",
+ "help": "maximum size of request body in bytes",
+ "type": positive_int}),
+ ("timeout", {
+ "value": "10",
+ "help": "socket timeout",
+ "type": positive_int}),
+ ("ssl", {
+ "value": "False",
+ "help": "use SSL connection",
+ "aliases": ["-s", "--ssl"],
+ "opposite": ["-S", "--no-ssl"],
+ "type": bool}),
+ ("certificate", {
+ "value": "/etc/ssl/radicale.cert.pem",
+ "help": "set certificate file",
+ "aliases": ["-c", "--certificate"],
+ "type": str}),
+ ("key", {
+ "value": "/etc/ssl/radicale.key.pem",
+ "help": "set private key file",
+ "aliases": ["-k", "--key"],
+ "type": str}),
+ ("certificate_authority", {
+ "value": "",
+ "help": "set CA certificate for validating clients",
+ "aliases": ["--certificate-authority"],
+ "type": str}),
+ ("protocol", {
+ "value": "PROTOCOL_TLSv1_2",
+ "help": "SSL protocol used",
+ "type": str}),
+ ("ciphers", {
+ "value": "",
+ "help": "available ciphers",
+ "type": str}),
+ ("dns_lookup", {
+ "value": "True",
+ "help": "use reverse DNS to resolve client address in logs",
+ "type": bool}),
+ ("realm", {
+ "value": "Radicale - Password Required",
+ "help": "message displayed when a password is needed",
+ "type": str})])),
+ ("encoding", OrderedDict([
+ ("request", {
+ "value": "utf-8",
+ "help": "encoding for responding requests",
+ "type": str}),
+ ("stock", {
+ "value": "utf-8",
+ "help": "encoding for storing local collections",
+ "type": str})])),
+ ("auth", OrderedDict([
+ ("type", {
+ "value": "none",
+ "help": "authentication method",
+ "type": str,
+ "internal": auth.INTERNAL_TYPES}),
+ ("htpasswd_filename", {
+ "value": "/etc/radicale/users",
+ "help": "htpasswd filename",
+ "type": str}),
+ ("htpasswd_encryption", {
+ "value": "bcrypt",
+ "help": "htpasswd encryption method",
+ "type": str}),
+ ("delay", {
+ "value": "1",
+ "help": "incorrect authentication delay",
+ "type": positive_float})])),
+ ("rights", OrderedDict([
+ ("type", {
+ "value": "owner_only",
+ "help": "rights backend",
+ "type": str,
+ "internal": rights.INTERNAL_TYPES}),
+ ("file", {
+ "value": "/etc/radicale/rights",
+ "help": "file for rights management from_file",
+ "type": str})])),
+ ("storage", OrderedDict([
+ ("type", {
+ "value": "multifilesystem",
+ "help": "storage backend",
+ "type": str,
+ "internal": storage.INTERNAL_TYPES}),
+ ("filesystem_folder", {
+ "value": os.path.expanduser(
+ "/var/lib/radicale/collections"),
+ "help": "path where collections are stored",
+ "type": str}),
+ ("max_sync_token_age", {
+ "value": 2592000, # 30 days
+ "help": "delete sync token that are older",
+ "type": int}),
+ ("filesystem_fsync", {
+ "value": "True",
+ "help": "sync all changes to filesystem during requests",
+ "type": bool}),
+ ("filesystem_locking", {
+ "value": "True",
+ "help": "lock the storage while accessing it",
+ "type": bool}),
+ ("filesystem_close_lock_file", {
+ "value": "False",
+ "help": "close the lock file when no more clients are waiting",
+ "type": bool}),
+ ("hook", {
+ "value": "",
+ "help": "command that is run after changes to storage",
+ "type": str})])),
+ ("web", OrderedDict([
+ ("type", {
+ "value": "internal",
+ "help": "web interface backend",
+ "type": str,
+ "internal": web.INTERNAL_TYPES})])),
+ ("logging", OrderedDict([
+ ("config", {
+ "value": "",
+ "help": "logging configuration file",
+ "type": str}),
+ ("debug", {
+ "value": "False",
+ "help": "print debug information",
+ "aliases": ["-D", "--debug"],
+ "type": bool}),
+ ("full_environment", {
+ "value": "False",
+ "help": "store all environment variables",
+ "type": bool}),
+ ("mask_passwords", {
+ "value": "True",
+ "help": "mask passwords in logs",
+ "type": bool})]))])
+
+
+def load(paths=(), extra_config=None, ignore_missing_paths=True):
+ config = ConfigParser()
+ for section, values in INITIAL_CONFIG.items():
+ config.add_section(section)
+ for key, data in values.items():
+ config.set(section, key, data["value"])
+ if extra_config:
+ for section, values in extra_config.items():
+ for key, value in values.items():
+ config.set(section, key, value)
+ for path in paths:
+ if path or not ignore_missing_paths:
+ try:
+ if not config.read(path) and not ignore_missing_paths:
+ raise RuntimeError("No such file: %r" % path)
+ except Exception as e:
+ raise RuntimeError(
+ "Failed to load config file %r: %s" % (path, e)) from e
+ # Check the configuration
+ for section in config.sections():
+ if section == "headers":
+ continue
+ if section not in INITIAL_CONFIG:
+ raise RuntimeError("Invalid section %r in config" % section)
+ allow_extra_options = ("type" in INITIAL_CONFIG[section] and
+ config.get(section, "type") not in
+ INITIAL_CONFIG[section]["type"].get("internal",
+ ()))
+ for option in config[section]:
+ if option not in INITIAL_CONFIG[section]:
+ if allow_extra_options:
+ continue
+ raise RuntimeError("Invalid option %r in section %r in "
+ "config" % (option, section))
+ type_ = INITIAL_CONFIG[section][option]["type"]
+ try:
+ if type_ == bool:
+ config.getboolean(section, option)
+ else:
+ type_(config.get(section, option))
+ except Exception as e:
+ raise RuntimeError(
+ "Invalid %s value for option %r in section %r in config: "
+ "%r" % (type_.__name__, option, section,
+ config.get(section, option))) from e
+ return config
diff --git a/radicale/log.py b/radicale/log.py
new file mode 100644
index 0000000..e803a19
--- /dev/null
+++ b/radicale/log.py
@@ -0,0 +1,75 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2011-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale logging module.
+
+Manage logging from a configuration file. For more information, see:
+http://docs.python.org/library/logging.config.html
+
+"""
+
+import logging
+import logging.config
+import signal
+import sys
+
+
+def configure_from_file(logger, filename, debug):
+ logging.config.fileConfig(filename, disable_existing_loggers=False)
+ if debug:
+ logger.setLevel(logging.DEBUG)
+ for handler in logger.handlers:
+ handler.setLevel(logging.DEBUG)
+ return logger
+
+
+class RemoveTracebackFilter(logging.Filter):
+ def filter(self, record):
+ record.exc_info = None
+ return True
+
+
+def start(name="radicale", filename=None, debug=False):
+ """Start the logging according to the configuration."""
+ logger = logging.getLogger(name)
+ if debug:
+ logger.setLevel(logging.DEBUG)
+ else:
+ logger.addFilter(RemoveTracebackFilter())
+ if filename:
+ # Configuration taken from file
+ try:
+ configure_from_file(logger, filename, debug)
+ except Exception as e:
+ raise RuntimeError("Failed to load logging configuration file %r: "
+ "%s" % (filename, e)) from e
+ # Reload config on SIGHUP (UNIX only)
+ if hasattr(signal, "SIGHUP"):
+ def handler(signum, frame):
+ try:
+ configure_from_file(logger, filename, debug)
+ except Exception as e:
+ logger.error("Failed to reload logging configuration file "
+ "%r: %s", filename, e, exc_info=True)
+ signal.signal(signal.SIGHUP, handler)
+ else:
+ # Default configuration, standard output
+ handler = logging.StreamHandler(sys.stderr)
+ handler.setFormatter(
+ logging.Formatter("[%(thread)x] %(levelname)s: %(message)s"))
+ logger.addHandler(handler)
+ return logger
diff --git a/radicale/rights.py b/radicale/rights.py
new file mode 100644
index 0000000..76f4ad3
--- /dev/null
+++ b/radicale/rights.py
@@ -0,0 +1,176 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2012-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Rights backends.
+
+This module loads the rights backend, according to the rights
+configuration.
+
+Default rights are based on a regex-based file whose name is specified in the
+config (section "right", key "file").
+
+Authentication login is matched against the "user" key, and collection's path
+is matched against the "collection" key. You can use Python's ConfigParser
+interpolation values %(login)s and %(path)s. You can also get groups from the
+user regex in the collection with {0}, {1}, etc.
+
+For example, for the "user" key, ".+" means "authenticated user" and ".*"
+means "anybody" (including anonymous users).
+
+Section names are only used for naming the rule.
+
+Leading or ending slashes are trimmed from collection's path.
+
+"""
+
+import configparser
+import os.path
+import posixpath
+import re
+from importlib import import_module
+
+from . import storage
+
+INTERNAL_TYPES = ("None", "none", "authenticated", "owner_write", "owner_only",
+ "from_file")
+
+
+def load(configuration, logger):
+ """Load the rights manager chosen in configuration."""
+ rights_type = configuration.get("rights", "type")
+ if configuration.get("auth", "type") in ("None", "none"): # DEPRECATED
+ rights_type = "None"
+ if rights_type in ("None", "none"): # DEPRECATED: use "none"
+ rights_class = NoneRights
+ elif rights_type == "authenticated":
+ rights_class = AuthenticatedRights
+ elif rights_type == "owner_write":
+ rights_class = OwnerWriteRights
+ elif rights_type == "owner_only":
+ rights_class = OwnerOnlyRights
+ elif rights_type == "from_file":
+ rights_class = Rights
+ else:
+ try:
+ rights_class = import_module(rights_type).Rights
+ except Exception as e:
+ raise RuntimeError("Failed to load rights module %r: %s" %
+ (rights_type, e)) from e
+ logger.info("Rights type is %r", rights_type)
+ return rights_class(configuration, logger)
+
+
+class BaseRights:
+ def __init__(self, configuration, logger):
+ self.configuration = configuration
+ self.logger = logger
+
+ def authorized(self, user, path, permission):
+ """Check if the user is allowed to read or write the collection.
+
+ If ``user`` is empty, check for anonymous rights.
+
+ ``path`` is sanitized.
+
+ ``permission`` is "r" or "w".
+
+ """
+ raise NotImplementedError
+
+ def authorized_item(self, user, path, permission):
+ """Check if the user is allowed to read or write the item."""
+ path = storage.sanitize_path(path)
+ parent_path = storage.sanitize_path(
+ "/%s/" % posixpath.dirname(path.strip("/")))
+ return self.authorized(user, parent_path, permission)
+
+
+class NoneRights(BaseRights):
+ def authorized(self, user, path, permission):
+ return True
+
+
+class AuthenticatedRights(BaseRights):
+ def authorized(self, user, path, permission):
+ return bool(user)
+
+
+class OwnerWriteRights(BaseRights):
+ def authorized(self, user, path, permission):
+ sane_path = storage.sanitize_path(path).strip("/")
+ return bool(user) and (permission == "r" or
+ user == sane_path.split("/", maxsplit=1)[0])
+
+
+class OwnerOnlyRights(BaseRights):
+ def authorized(self, user, path, permission):
+ sane_path = storage.sanitize_path(path).strip("/")
+ return bool(user) and (
+ permission == "r" and not sane_path or
+ user == sane_path.split("/", maxsplit=1)[0])
+
+ def authorized_item(self, user, path, permission):
+ sane_path = storage.sanitize_path(path).strip("/")
+ if "/" not in sane_path:
+ return False
+ return super().authorized_item(user, path, permission)
+
+
+class Rights(BaseRights):
+ def __init__(self, configuration, logger):
+ super().__init__(configuration, logger)
+ self.filename = os.path.expanduser(configuration.get("rights", "file"))
+
+ def authorized(self, user, path, permission):
+ user = user or ""
+ sane_path = storage.sanitize_path(path).strip("/")
+ # Prevent "regex injection"
+ user_escaped = re.escape(user)
+ sane_path_escaped = re.escape(sane_path)
+ regex = configparser.ConfigParser(
+ {"login": user_escaped, "path": sane_path_escaped})
+ try:
+ if not regex.read(self.filename):
+ raise RuntimeError("No such file: %r" %
+ self.filename)
+ except Exception as e:
+ raise RuntimeError("Failed to load rights file %r: %s" %
+ (self.filename, e)) from e
+ for section in regex.sections():
+ try:
+ re_user_pattern = regex.get(section, "user")
+ re_collection_pattern = regex.get(section, "collection")
+ # Emulate fullmatch
+ user_match = re.match(r"(?:%s)\Z" % re_user_pattern, user)
+ collection_match = user_match and re.match(
+ r"(?:%s)\Z" % re_collection_pattern.format(
+ *map(re.escape, user_match.groups())), sane_path)
+ except Exception as e:
+ raise RuntimeError("Error in section %r of rights file %r: "
+ "%s" % (section, self.filename, e)) from e
+ if user_match and collection_match:
+ self.logger.debug("Rule %r:%r matches %r:%r from section %r",
+ user, sane_path, re_user_pattern,
+ re_collection_pattern, section)
+ return permission in regex.get(section, "permission")
+ else:
+ self.logger.debug("Rule %r:%r doesn't match %r:%r from section"
+ " %r", user, sane_path, re_user_pattern,
+ re_collection_pattern, section)
+ self.logger.info(
+ "Rights: %r:%r doesn't match any section", user, sane_path)
+ return False
diff --git a/radicale/storage.py b/radicale/storage.py
new file mode 100644
index 0000000..9ac3842
--- /dev/null
+++ b/radicale/storage.py
@@ -0,0 +1,1683 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2014 Jean-Marc Martins
+# Copyright © 2012-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Storage backends.
+
+This module loads the storage backend, according to the storage configuration.
+
+Default storage uses one folder per collection and one file per collection
+entry.
+
+"""
+
+import binascii
+import contextlib
+import json
+import os
+import pickle
+import posixpath
+import shlex
+import subprocess
+import sys
+import threading
+import time
+from contextlib import contextmanager
+from hashlib import md5
+from importlib import import_module
+from itertools import chain, groupby
+from math import log
+from random import getrandbits
+from tempfile import NamedTemporaryFile, TemporaryDirectory
+
+import vobject
+
+if sys.version_info >= (3, 5):
+ # HACK: Avoid import cycle for Python < 3.5
+ from . import xmlutils
+
+if os.name == "nt":
+ import ctypes
+ import ctypes.wintypes
+ import msvcrt
+
+ LOCKFILE_EXCLUSIVE_LOCK = 2
+ if ctypes.sizeof(ctypes.c_void_p) == 4:
+ ULONG_PTR = ctypes.c_uint32
+ else:
+ ULONG_PTR = ctypes.c_uint64
+
+ class Overlapped(ctypes.Structure):
+ _fields_ = [
+ ("internal", ULONG_PTR),
+ ("internal_high", ULONG_PTR),
+ ("offset", ctypes.wintypes.DWORD),
+ ("offset_high", ctypes.wintypes.DWORD),
+ ("h_event", ctypes.wintypes.HANDLE)]
+
+ lock_file_ex = ctypes.windll.kernel32.LockFileEx
+ lock_file_ex.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.POINTER(Overlapped)]
+ lock_file_ex.restype = ctypes.wintypes.BOOL
+ unlock_file_ex = ctypes.windll.kernel32.UnlockFileEx
+ unlock_file_ex.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.POINTER(Overlapped)]
+ unlock_file_ex.restype = ctypes.wintypes.BOOL
+elif os.name == "posix":
+ import fcntl
+
+INTERNAL_TYPES = ("multifilesystem",)
+
+
+def load(configuration, logger):
+ """Load the storage manager chosen in configuration."""
+ if sys.version_info < (3, 5):
+ # HACK: Avoid import cycle for Python < 3.5
+ global xmlutils
+ from . import xmlutils
+ storage_type = configuration.get("storage", "type")
+ if storage_type == "multifilesystem":
+ collection_class = Collection
+ else:
+ try:
+ collection_class = import_module(storage_type).Collection
+ except Exception as e:
+ raise RuntimeError("Failed to load storage module %r: %s" %
+ (storage_type, e)) from e
+ logger.info("Storage type is %r", storage_type)
+
+ class CollectionCopy(collection_class):
+ """Collection copy, avoids overriding the original class attributes."""
+ CollectionCopy.configuration = configuration
+ CollectionCopy.logger = logger
+ return CollectionCopy
+
+
+def check_and_sanitize_item(vobject_item, is_collection=False, uid=None,
+ tag=None):
+ """Check vobject items for common errors and add missing UIDs.
+
+ ``multiple`` indicates that the vobject_item contains unrelated components.
+
+ If ``uid`` is not set, the UID is generated randomly.
+
+ """
+ if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"):
+ raise ValueError("Unsupported collection tag: %r" % tag)
+ if vobject_item.name == "VCALENDAR" and tag == "VCALENDAR":
+ component_name = None
+ object_uid = None
+ object_uid_set = False
+ for component in vobject_item.components():
+ # https://tools.ietf.org/html/rfc4791#section-4.1
+ if component.name == "VTIMEZONE":
+ continue
+ if component_name is None or is_collection:
+ component_name = component.name
+ elif component_name != component.name:
+ raise ValueError("Multiple component types in object: %r, %r" %
+ (component_name, component.name))
+ if component_name not in ("VTODO", "VEVENT", "VJOURNAL"):
+ continue
+ component_uid = get_uid(component)
+ if not object_uid_set or is_collection:
+ object_uid_set = True
+ object_uid = component_uid
+ if component_uid is None:
+ component.add("UID").value = uid or random_uuid4()
+ elif not component_uid:
+ component.uid.value = uid or random_uuid4()
+ elif not object_uid or not component_uid:
+ raise ValueError("Multiple %s components without UID in "
+ "object" % component_name)
+ elif object_uid != component_uid:
+ raise ValueError(
+ "Multiple %s components with different UIDs in object: "
+ "%r, %r" % (component_name, object_uid, component_uid))
+ # vobject interprets recurrence rules on demand
+ try:
+ component.rruleset
+ except Exception as e:
+ raise ValueError("invalid recurrence rules in %s" %
+ component.name) from e
+ elif vobject_item.name == "VCARD" and tag == "VADDRESSBOOK":
+ # https://tools.ietf.org/html/rfc6352#section-5.1
+ object_uid = get_uid(vobject_item)
+ if object_uid is None:
+ vobject_item.add("UID").value = uid or random_uuid4()
+ elif not object_uid:
+ vobject_item.uid.value = uid or random_uuid4()
+ elif vobject_item.name == "VLIST" and tag == "VADDRESSBOOK":
+ # Custom format used by SOGo Connector to store lists of contacts
+ pass
+ else:
+ raise ValueError("Item type %r not supported in %s collection" %
+ (vobject_item.name, repr(tag) if tag else "generic"))
+
+
+def check_and_sanitize_props(props):
+ """Check collection properties for common errors."""
+ tag = props.get("tag")
+ if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"):
+ raise ValueError("Unsupported collection tag: %r" % tag)
+
+
+def random_uuid4():
+ """Generate a pseudo-random UUID"""
+ r = "%016x" % getrandbits(128)
+ return "%s-%s-%s-%s-%s" % (r[:8], r[8:12], r[12:16], r[16:20], r[20:])
+
+
+def scandir(path, only_dirs=False, only_files=False):
+ """Iterator for directory elements. (For compatibility with Python < 3.5)
+
+ ``only_dirs`` only return directories
+
+ ``only_files`` only return files
+
+ """
+ if sys.version_info >= (3, 5):
+ for entry in os.scandir(path):
+ if ((not only_files or entry.is_file()) and
+ (not only_dirs or entry.is_dir())):
+ yield entry.name
+ else:
+ for name in os.listdir(path):
+ p = os.path.join(path, name)
+ if ((not only_files or os.path.isfile(p)) and
+ (not only_dirs or os.path.isdir(p))):
+ yield name
+
+
+def get_etag(text):
+ """Etag from collection or item.
+
+ Encoded as quoted-string (see RFC 2616).
+
+ """
+ etag = md5()
+ etag.update(text.encode("utf-8"))
+ return '"%s"' % etag.hexdigest()
+
+
+def get_uid(vobject_component):
+ """UID value of an item if defined."""
+ return ((hasattr(vobject_component, "uid") or None) and
+ vobject_component.uid.value)
+
+
+def get_uid_from_object(vobject_item):
+ """UID value of an calendar/addressbook object."""
+ if vobject_item.name == "VCALENDAR":
+ if hasattr(vobject_item, "vevent"):
+ return get_uid(vobject_item.vevent)
+ if hasattr(vobject_item, "vjournal"):
+ return get_uid(vobject_item.vjournal)
+ if hasattr(vobject_item, "vtodo"):
+ return get_uid(vobject_item.vtodo)
+ elif vobject_item.name == "VCARD":
+ return get_uid(vobject_item)
+ return None
+
+
+def sanitize_path(path):
+ """Make path absolute with leading slash to prevent access to other data.
+
+ Preserve a potential trailing slash.
+
+ """
+ trailing_slash = "/" if path.endswith("/") else ""
+ path = posixpath.normpath(path)
+ new_path = "/"
+ for part in path.split("/"):
+ if not is_safe_path_component(part):
+ continue
+ new_path = posixpath.join(new_path, part)
+ trailing_slash = "" if new_path.endswith("/") else trailing_slash
+ return new_path + trailing_slash
+
+
+def is_safe_path_component(path):
+ """Check if path is a single component of a path.
+
+ Check that the path is safe to join too.
+
+ """
+ return path and "/" not in path and path not in (".", "..")
+
+
+def is_safe_filesystem_path_component(path):
+ """Check if path is a single component of a local and posix filesystem
+ path.
+
+ Check that the path is safe to join too.
+
+ """
+ return (
+ path and not os.path.splitdrive(path)[0] and
+ not os.path.split(path)[0] and path not in (os.curdir, os.pardir) and
+ not path.startswith(".") and not path.endswith("~") and
+ is_safe_path_component(path))
+
+
+def path_to_filesystem(root, *paths):
+ """Convert path to a local filesystem path relative to base_folder.
+
+ `root` must be a secure filesystem path, it will be prepend to the path.
+
+ Conversion of `paths` is done in a secure manner, or raises ``ValueError``.
+
+ """
+ paths = [sanitize_path(path).strip("/") for path in paths]
+ safe_path = root
+ for path in paths:
+ if not path:
+ continue
+ for part in path.split("/"):
+ if not is_safe_filesystem_path_component(part):
+ raise UnsafePathError(part)
+ safe_path_parent = safe_path
+ safe_path = os.path.join(safe_path, part)
+ # Check for conflicting files (e.g. case-insensitive file systems
+ # or short names on Windows file systems)
+ if (os.path.lexists(safe_path) and
+ part not in scandir(safe_path_parent)):
+ raise CollidingPathError(part)
+ return safe_path
+
+
+def left_encode_int(v):
+ length = int(log(v, 256)) + 1 if v != 0 else 1
+ return bytes((length,)) + v.to_bytes(length, 'little')
+
+
+class UnsafePathError(ValueError):
+ def __init__(self, path):
+ message = "Can't translate name safely to filesystem: %r" % path
+ super().__init__(message)
+
+
+class CollidingPathError(ValueError):
+ def __init__(self, path):
+ message = "File name collision: %r" % path
+ super().__init__(message)
+
+
+class ComponentExistsError(ValueError):
+ def __init__(self, path):
+ message = "Component already exists: %r" % path
+ super().__init__(message)
+
+
+class ComponentNotFoundError(ValueError):
+ def __init__(self, path):
+ message = "Component doesn't exist: %r" % path
+ super().__init__(message)
+
+
+class Item:
+ def __init__(self, collection, item=None, href=None, last_modified=None,
+ text=None, etag=None, uid=None, name=None,
+ component_name=None):
+ """Initialize an item.
+
+ ``collection`` the parent collection.
+
+ ``href`` the href of the item.
+
+ ``last_modified`` the HTTP-datetime of when the item was modified.
+
+ ``text`` the text representation of the item (optional if ``item`` is
+ set).
+
+ ``item`` the vobject item (optional if ``text`` is set).
+
+ ``etag`` the etag of the item (optional). See ``get_etag``.
+
+ ``uid`` the UID of the object (optional). See ``get_uid_from_object``.
+
+ """
+ if text is None and item is None:
+ raise ValueError("at least one of 'text' or 'item' must be set")
+ self.collection = collection
+ self.href = href
+ self.last_modified = last_modified
+ self._text = text
+ self._item = item
+ self._etag = etag
+ self._uid = uid
+ self._name = name
+ self._component_name = component_name
+
+ def __getattr__(self, attr):
+ return getattr(self.item, attr)
+
+ def serialize(self):
+ if self._text is None:
+ try:
+ self._text = self.item.serialize()
+ except Exception as e:
+ raise RuntimeError("Failed to serialize item %r from %r: %s" %
+ (self.href, self.collection.path, e)) from e
+ return self._text
+
+ @property
+ def item(self):
+ if self._item is None:
+ try:
+ self._item = vobject.readOne(self._text)
+ except Exception as e:
+ raise RuntimeError("Failed to parse item %r from %r: %s" %
+ (self.href, self.collection.path, e)) from e
+ return self._item
+
+ @property
+ def etag(self):
+ """Encoded as quoted-string (see RFC 2616)."""
+ if self._etag is None:
+ self._etag = get_etag(self.serialize())
+ return self._etag
+
+ @property
+ def uid(self):
+ if self._uid is None:
+ self._uid = get_uid_from_object(self.item)
+ return self._uid
+
+ @property
+ def name(self):
+ if self._name is not None:
+ return self._name
+ return self.item.name
+
+ @property
+ def component_name(self):
+ if self._component_name is not None:
+ return self._component_name
+ return xmlutils.find_tag(self.item)
+
+
+class BaseCollection:
+
+ # Overriden on copy by the "load" function
+ configuration = None
+ logger = None
+
+ # Properties of instance
+ """The sanitized path of the collection without leading or trailing ``/``.
+ """
+ path = ""
+
+ @property
+ def owner(self):
+ """The owner of the collection."""
+ return self.path.split("/", maxsplit=1)[0]
+
+ @property
+ def is_principal(self):
+ """Collection is a principal."""
+ return bool(self.path) and "/" not in self.path
+
+ @owner.setter
+ def owner(self, value):
+ # DEPRECATED: Included for compatibility reasons
+ pass
+
+ @is_principal.setter
+ def is_principal(self, value):
+ # DEPRECATED: Included for compatibility reasons
+ pass
+
+ @classmethod
+ def discover(cls, path, depth="0"):
+ """Discover a list of collections under the given ``path``.
+
+ ``path`` is sanitized.
+
+ If ``depth`` is "0", only the actual object under ``path`` is
+ returned.
+
+ If ``depth`` is anything but "0", it is considered as "1" and direct
+ children are included in the result.
+
+ The root collection "/" must always exist.
+
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def move(cls, item, to_collection, to_href):
+ """Move an object.
+
+ ``item`` is the item to move.
+
+ ``to_collection`` is the target collection.
+
+ ``to_href`` is the target name in ``to_collection``. An item with the
+ same name might already exist.
+
+ """
+ if item.collection.path == to_collection.path and item.href == to_href:
+ return
+ to_collection.upload(to_href, item.item)
+ item.collection.delete(item.href)
+
+ @property
+ def etag(self):
+ """Encoded as quoted-string (see RFC 2616)."""
+ etag = md5()
+ for item in self.get_all():
+ etag.update((item.href + "/" + item.etag).encode("utf-8"))
+ etag.update(json.dumps(self.get_meta(), sort_keys=True).encode())
+ return '"%s"' % etag.hexdigest()
+
+ @classmethod
+ def create_collection(cls, href, collection=None, props=None):
+ """Create a collection.
+
+ ``href`` is the sanitized path.
+
+ If the collection already exists and neither ``collection`` nor
+ ``props`` are set, this method shouldn't do anything. Otherwise the
+ existing collection must be replaced.
+
+ ``collection`` is a list of vobject components.
+
+ ``props`` are metadata values for the collection.
+
+ ``props["tag"]`` is the type of collection (VCALENDAR or
+ VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the
+ collection.
+
+ """
+ raise NotImplementedError
+
+ def sync(self, old_token=None):
+ """Get the current sync token and changed items for synchronization.
+
+ ``old_token`` an old sync token which is used as the base of the
+ delta update. If sync token is missing, all items are returned.
+ ValueError is raised for invalid or old tokens.
+
+ WARNING: This simple default implementation treats all sync-token as
+ invalid. It adheres to the specification but some clients
+ (e.g. InfCloud) don't like it. Subclasses should provide a
+ more sophisticated implementation.
+
+ """
+ token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
+ if old_token:
+ raise ValueError("Sync token are not supported")
+ return token, self.list()
+
+ def list(self):
+ """List collection items."""
+ raise NotImplementedError
+
+ def get(self, href):
+ """Fetch a single item."""
+ raise NotImplementedError
+
+ def get_multi(self, hrefs):
+ """Fetch multiple items. Duplicate hrefs must be ignored.
+
+ DEPRECATED: use ``get_multi2`` instead
+
+ """
+ return (self.get(href) for href in set(hrefs))
+
+ def get_multi2(self, hrefs):
+ """Fetch multiple items.
+
+ Functionally similar to ``get``, but might bring performance benefits
+ on some storages when used cleverly. It's not required to return the
+ requested items in the correct order. Duplicated hrefs can be ignored.
+
+ Returns tuples with the href and the item or None if the item doesn't
+ exist.
+
+ """
+ return ((href, self.get(href)) for href in hrefs)
+
+ def get_all(self):
+ """Fetch all items.
+
+ Functionally similar to ``get``, but might bring performance benefits
+ on some storages when used cleverly.
+
+ """
+ return map(self.get, self.list())
+
+ def get_all_filtered(self, filters):
+ """Fetch all items with optional filtering.
+
+ This can largely improve performance of reports depending on
+ the filters and this implementation.
+
+ Returns tuples in the form ``(item, filters_matched)``.
+ ``filters_matched`` is a bool that indicates if ``filters`` are fully
+ matched.
+
+ This returns all events by default
+ """
+ return ((item, False) for item in self.get_all())
+
+ def pre_filtered_list(self, filters):
+ """List collection items with optional pre filtering.
+
+ DEPRECATED: use ``get_all_filtered`` instead
+
+ """
+ return self.get_all()
+
+ def has(self, href):
+ """Check if an item exists by its href.
+
+ Functionally similar to ``get``, but might bring performance benefits
+ on some storages when used cleverly.
+
+ """
+ return self.get(href) is not None
+
+ def upload(self, href, vobject_item):
+ """Upload a new or replace an existing item."""
+ raise NotImplementedError
+
+ def delete(self, href=None):
+ """Delete an item.
+
+ When ``href`` is ``None``, delete the collection.
+
+ """
+ raise NotImplementedError
+
+ def get_meta(self, key=None):
+ """Get metadata value for collection.
+
+ Return the value of the property ``key``. If ``key`` is ``None`` return
+ a dict with all properties
+
+ """
+ raise NotImplementedError
+
+ def set_meta(self, props):
+ """Set metadata values for collection.
+
+ ``props`` a dict with updates for properties. If a value is empty, the
+ property must be deleted.
+
+ DEPRECATED: use ``set_meta_all`` instead
+
+ """
+ raise NotImplementedError
+
+ def set_meta_all(self, props):
+ """Set metadata values for collection.
+
+ ``props`` a dict with values for properties.
+
+ """
+ delta_props = self.get_meta()
+ for key in delta_props.keys():
+ if key not in props:
+ delta_props[key] = None
+ delta_props.update(props)
+ self.set_meta(self, delta_props)
+
+ @property
+ def last_modified(self):
+ """Get the HTTP-datetime of when the collection was modified."""
+ raise NotImplementedError
+
+ def serialize(self):
+ """Get the unicode string representing the whole collection."""
+ if self.get_meta("tag") == "VCALENDAR":
+ in_vcalendar = False
+ vtimezones = ""
+ included_tzids = set()
+ vtimezone = []
+ tzid = None
+ components = ""
+ # Concatenate all child elements of VCALENDAR from all items
+ # together, while preventing duplicated VTIMEZONE entries.
+ # VTIMEZONEs are only distinguished by their TZID, if different
+ # timezones share the same TZID this produces errornous ouput.
+ # VObject fails at this too.
+ for item in self.get_all():
+ depth = 0
+ for line in item.serialize().split("\r\n"):
+ if line.startswith("BEGIN:"):
+ depth += 1
+ if depth == 1 and line == "BEGIN:VCALENDAR":
+ in_vcalendar = True
+ elif in_vcalendar:
+ if depth == 1 and line.startswith("END:"):
+ in_vcalendar = False
+ if depth == 2 and line == "BEGIN:VTIMEZONE":
+ vtimezone.append(line + "\r\n")
+ elif vtimezone:
+ vtimezone.append(line + "\r\n")
+ if depth == 2 and line.startswith("TZID:"):
+ tzid = line[len("TZID:"):]
+ elif depth == 2 and line.startswith("END:"):
+ if tzid is None or tzid not in included_tzids:
+ vtimezones += "".join(vtimezone)
+ included_tzids.add(tzid)
+ vtimezone.clear()
+ tzid = None
+ elif depth >= 2:
+ components += line + "\r\n"
+ if line.startswith("END:"):
+ depth -= 1
+ template = vobject.iCalendar()
+ displayname = self.get_meta("D:displayname")
+ if displayname:
+ template.add("X-WR-CALNAME")
+ template.x_wr_calname.value_param = "TEXT"
+ template.x_wr_calname.value = displayname
+ description = self.get_meta("C:calendar-description")
+ if description:
+ template.add("X-WR-CALDESC")
+ template.x_wr_caldesc.value_param = "TEXT"
+ template.x_wr_caldesc.value = description
+ template = template.serialize()
+ template_insert_pos = template.find("\r\nEND:VCALENDAR\r\n") + 2
+ assert template_insert_pos != -1
+ return (template[:template_insert_pos] +
+ vtimezones + components +
+ template[template_insert_pos:])
+ elif self.get_meta("tag") == "VADDRESSBOOK":
+ return "".join((item.serialize() for item in self.get_all()))
+ return ""
+
+ @classmethod
+ @contextmanager
+ def acquire_lock(cls, mode, user=None):
+ """Set a context manager to lock the whole storage.
+
+ ``mode`` must either be "r" for shared access or "w" for exclusive
+ access.
+
+ ``user`` is the name of the logged in user or empty.
+
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def verify(cls):
+ """Check the storage for errors."""
+ return True
+
+
+ITEM_CACHE_VERSION = 1
+
+
+class Collection(BaseCollection):
+ """Collection stored in several files per calendar."""
+
+ def __init__(self, path, principal=None, folder=None,
+ filesystem_path=None):
+ # DEPRECATED: Remove principal and folder attributes
+ if folder is None:
+ folder = self._get_collection_root_folder()
+ # Path should already be sanitized
+ self.path = sanitize_path(path).strip("/")
+ self._encoding = self.configuration.get("encoding", "stock")
+ # DEPRECATED: Use ``self._encoding`` instead
+ self.encoding = self._encoding
+ if filesystem_path is None:
+ filesystem_path = path_to_filesystem(folder, self.path)
+ self._filesystem_path = filesystem_path
+ self._props_path = os.path.join(
+ self._filesystem_path, ".Radicale.props")
+ self._meta_cache = None
+ self._etag_cache = None
+ self._item_cache_cleaned = False
+
+ @classmethod
+ def _get_collection_root_folder(cls):
+ filesystem_folder = os.path.expanduser(
+ cls.configuration.get("storage", "filesystem_folder"))
+ return os.path.join(filesystem_folder, "collection-root")
+
+ @contextmanager
+ def _atomic_write(self, path, mode="w", newline=None, sync_directory=True):
+ directory = os.path.dirname(path)
+ tmp = NamedTemporaryFile(
+ mode=mode, dir=directory, delete=False, prefix=".Radicale.tmp-",
+ newline=newline, encoding=None if "b" in mode else self._encoding)
+ try:
+ yield tmp
+ tmp.flush()
+ try:
+ self._fsync(tmp.fileno())
+ except OSError as e:
+ raise RuntimeError("Fsync'ing file %r failed: %s" %
+ (path, e)) from e
+ tmp.close()
+ os.replace(tmp.name, path)
+ except BaseException:
+ tmp.close()
+ os.remove(tmp.name)
+ raise
+ if sync_directory:
+ self._sync_directory(directory)
+
+ @staticmethod
+ def _find_available_file_name(exists_fn, suffix=""):
+ # Prevent infinite loop
+ for _ in range(1000):
+ file_name = random_uuid4() + suffix
+ if not exists_fn(file_name):
+ return file_name
+ # something is wrong with the PRNG
+ raise RuntimeError("No unique random sequence found")
+
+ @classmethod
+ def _fsync(cls, fd):
+ if cls.configuration.getboolean("storage", "filesystem_fsync"):
+ if os.name == "posix" and hasattr(fcntl, "F_FULLFSYNC"):
+ fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
+ else:
+ os.fsync(fd)
+
+ @classmethod
+ def _sync_directory(cls, path):
+ """Sync directory to disk.
+
+ This only works on POSIX and does nothing on other systems.
+
+ """
+ if not cls.configuration.getboolean("storage", "filesystem_fsync"):
+ return
+ if os.name == "posix":
+ try:
+ fd = os.open(path, 0)
+ try:
+ cls._fsync(fd)
+ finally:
+ os.close(fd)
+ except OSError as e:
+ raise RuntimeError("Fsync'ing directory %r failed: %s" %
+ (path, e)) from e
+
+ @classmethod
+ def _makedirs_synced(cls, filesystem_path):
+ """Recursively create a directory and its parents in a sync'ed way.
+
+ This method acts silently when the folder already exists.
+
+ """
+ if os.path.isdir(filesystem_path):
+ return
+ parent_filesystem_path = os.path.dirname(filesystem_path)
+ # Prevent infinite loop
+ if filesystem_path != parent_filesystem_path:
+ # Create parent dirs recursively
+ cls._makedirs_synced(parent_filesystem_path)
+ # Possible race!
+ os.makedirs(filesystem_path, exist_ok=True)
+ cls._sync_directory(parent_filesystem_path)
+
+ @classmethod
+ def discover(cls, path, depth="0", child_context_manager=(
+ lambda path, href=None: contextlib.ExitStack())):
+ # Path should already be sanitized
+ sane_path = sanitize_path(path).strip("/")
+ attributes = sane_path.split("/") if sane_path else []
+
+ folder = cls._get_collection_root_folder()
+ # Create the root collection
+ cls._makedirs_synced(folder)
+ try:
+ filesystem_path = path_to_filesystem(folder, sane_path)
+ except ValueError as e:
+ # Path is unsafe
+ cls.logger.debug("Unsafe path %r requested from storage: %s",
+ sane_path, e, exc_info=True)
+ return
+
+ # Check if the path exists and if it leads to a collection or an item
+ if not os.path.isdir(filesystem_path):
+ if attributes and os.path.isfile(filesystem_path):
+ href = attributes.pop()
+ else:
+ return
+ else:
+ href = None
+
+ sane_path = "/".join(attributes)
+ collection = cls(sane_path)
+
+ if href:
+ yield collection.get(href)
+ return
+
+ yield collection
+
+ if depth == "0":
+ return
+
+ for href in collection.list():
+ with child_context_manager(sane_path, href):
+ yield collection.get(href)
+
+ for href in scandir(filesystem_path, only_dirs=True):
+ if not is_safe_filesystem_path_component(href):
+ if not href.startswith(".Radicale"):
+ cls.logger.debug("Skipping collection %r in %r", href,
+ sane_path)
+ continue
+ child_path = posixpath.join(sane_path, href)
+ with child_context_manager(child_path):
+ yield cls(child_path)
+
+ @classmethod
+ def verify(cls):
+ item_errors = collection_errors = 0
+
+ @contextlib.contextmanager
+ def exception_cm(path, href=None):
+ nonlocal item_errors, collection_errors
+ try:
+ yield
+ except Exception as e:
+ if href:
+ item_errors += 1
+ name = "item %r in %r" % (href, path.strip("/"))
+ else:
+ collection_errors += 1
+ name = "collection %r" % path.strip("/")
+ cls.logger.error("Invalid %s: %s", name, e, exc_info=True)
+
+ remaining_paths = [""]
+ while remaining_paths:
+ path = remaining_paths.pop(0)
+ cls.logger.debug("Verifying collection %r", path)
+ with exception_cm(path):
+ saved_item_errors = item_errors
+ collection = None
+ for item in cls.discover(path, "1", exception_cm):
+ if not collection:
+ collection = item
+ collection.get_meta()
+ continue
+ if isinstance(item, BaseCollection):
+ remaining_paths.append(item.path)
+ else:
+ cls.logger.debug("Verified item %r in %r",
+ item.href, path)
+ if item_errors == saved_item_errors:
+ collection.sync()
+ return item_errors == 0 and collection_errors == 0
+
+ @classmethod
+ def create_collection(cls, href, collection=None, props=None):
+ folder = cls._get_collection_root_folder()
+
+ # Path should already be sanitized
+ sane_path = sanitize_path(href).strip("/")
+ filesystem_path = path_to_filesystem(folder, sane_path)
+
+ if not props:
+ cls._makedirs_synced(filesystem_path)
+ return cls(sane_path)
+
+ parent_dir = os.path.dirname(filesystem_path)
+ cls._makedirs_synced(parent_dir)
+
+ # Create a temporary directory with an unsafe name
+ with TemporaryDirectory(
+ prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
+ # The temporary directory itself can't be renamed
+ tmp_filesystem_path = os.path.join(tmp_dir, "collection")
+ os.makedirs(tmp_filesystem_path)
+ self = cls(sane_path, filesystem_path=tmp_filesystem_path)
+ self.set_meta_all(props)
+
+ if collection:
+ if props.get("tag") == "VCALENDAR":
+ collection, = collection
+ items = []
+ for content in ("vevent", "vtodo", "vjournal"):
+ items.extend(
+ getattr(collection, "%s_list" % content, []))
+ items_by_uid = groupby(sorted(items, key=get_uid), get_uid)
+ vobject_items = {}
+ for uid, items in items_by_uid:
+ new_collection = vobject.iCalendar()
+ for item in items:
+ new_collection.add(item)
+ # href must comply to is_safe_filesystem_path_component
+ # and no file name collisions must exist between hrefs
+ href = self._find_available_file_name(
+ vobject_items.get, suffix=".ics")
+ vobject_items[href] = new_collection
+ self._upload_all_nonatomic(vobject_items)
+ elif props.get("tag") == "VADDRESSBOOK":
+ vobject_items = {}
+ for card in collection:
+ # href must comply to is_safe_filesystem_path_component
+ # and no file name collisions must exist between hrefs
+ href = self._find_available_file_name(
+ vobject_items.get, suffix=".vcf")
+ vobject_items[href] = card
+ self._upload_all_nonatomic(vobject_items)
+
+ # This operation is not atomic on the filesystem level but it's
+ # very unlikely that one rename operations succeeds while the
+ # other fails or that only one gets written to disk.
+ if os.path.exists(filesystem_path):
+ os.rename(filesystem_path, os.path.join(tmp_dir, "delete"))
+ os.rename(tmp_filesystem_path, filesystem_path)
+ cls._sync_directory(parent_dir)
+
+ return cls(sane_path)
+
+ def upload_all_nonatomic(self, vobject_items):
+ """DEPRECATED: Use ``_upload_all_nonatomic``"""
+ return self._upload_all_nonatomic(vobject_items)
+
+ def _upload_all_nonatomic(self, vobject_items):
+ """Upload a new set of items.
+
+ This takes a mapping of href and vobject items and
+ uploads them nonatomic and without existence checks.
+
+ """
+ cache_folder = os.path.join(self._filesystem_path,
+ ".Radicale.cache", "item")
+ self._makedirs_synced(cache_folder)
+ for href, vobject_item in vobject_items.items():
+ if not is_safe_filesystem_path_component(href):
+ raise UnsafePathError(href)
+ try:
+ cache_content = self._item_cache_content(href, vobject_item)
+ _, _, _, text, _, _, _, _ = cache_content
+ except Exception as e:
+ raise ValueError(
+ "Failed to store item %r in temporary collection %r: %s" %
+ (href, self.path, e)) from e
+ with self._atomic_write(os.path.join(cache_folder, href), "wb",
+ sync_directory=False) as f:
+ pickle.dump(cache_content, f)
+ path = path_to_filesystem(self._filesystem_path, href)
+ with self._atomic_write(
+ path, newline="", sync_directory=False) as f:
+ f.write(text)
+ self._sync_directory(cache_folder)
+ self._sync_directory(self._filesystem_path)
+
+ @classmethod
+ def move(cls, item, to_collection, to_href):
+ if not is_safe_filesystem_path_component(to_href):
+ raise UnsafePathError(to_href)
+ os.replace(
+ path_to_filesystem(item.collection._filesystem_path, item.href),
+ path_to_filesystem(to_collection._filesystem_path, to_href))
+ cls._sync_directory(to_collection._filesystem_path)
+ if item.collection._filesystem_path != to_collection._filesystem_path:
+ cls._sync_directory(item.collection._filesystem_path)
+ # Move the item cache entry
+ cache_folder = os.path.join(item.collection._filesystem_path,
+ ".Radicale.cache", "item")
+ to_cache_folder = os.path.join(to_collection._filesystem_path,
+ ".Radicale.cache", "item")
+ cls._makedirs_synced(to_cache_folder)
+ try:
+ os.replace(os.path.join(cache_folder, item.href),
+ os.path.join(to_cache_folder, to_href))
+ except FileNotFoundError:
+ pass
+ else:
+ cls._makedirs_synced(to_cache_folder)
+ if cache_folder != to_cache_folder:
+ cls._makedirs_synced(cache_folder)
+ # Track the change
+ to_collection._update_history_etag(to_href, item)
+ item.collection._update_history_etag(item.href, None)
+ to_collection._clean_history_cache()
+ if item.collection._filesystem_path != to_collection._filesystem_path:
+ item.collection._clean_history_cache()
+
+ @classmethod
+ def _clean_cache(cls, folder, names, max_age=None):
+ """Delete all ``names`` in ``folder`` that are older than ``max_age``.
+ """
+ age_limit = time.time() - max_age if max_age is not None else None
+ modified = False
+ for name in names:
+ if not is_safe_filesystem_path_component(name):
+ continue
+ if age_limit is not None:
+ try:
+ # Race: Another process might have deleted the file.
+ mtime = os.path.getmtime(os.path.join(folder, name))
+ except FileNotFoundError:
+ continue
+ if mtime > age_limit:
+ continue
+ cls.logger.debug("Found expired item in cache: %r", name)
+ # Race: Another process might have deleted or locked the
+ # file.
+ try:
+ os.remove(os.path.join(folder, name))
+ except (FileNotFoundError, PermissionError):
+ continue
+ modified = True
+ if modified:
+ cls._sync_directory(folder)
+
+ def _update_history_etag(self, href, item):
+ """Updates and retrieves the history etag from the history cache.
+
+ The history cache contains a file for each current and deleted item
+ of the collection. These files contain the etag of the item (empty
+ string for deleted items) and a history etag, which is a hash over
+ the previous history etag and the etag separated by "/".
+ """
+ history_folder = os.path.join(self._filesystem_path,
+ ".Radicale.cache", "history")
+ try:
+ with open(os.path.join(history_folder, href), "rb") as f:
+ cache_etag, history_etag = pickle.load(f)
+ except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e:
+ if isinstance(e, (pickle.UnpicklingError, ValueError)):
+ self.logger.warning(
+ "Failed to load history cache entry %r in %r: %s",
+ href, self.path, e, exc_info=True)
+ cache_etag = ""
+ # Initialize with random data to prevent collisions with cleaned
+ # expired items.
+ history_etag = binascii.hexlify(os.urandom(16)).decode("ascii")
+ etag = item.etag if item else ""
+ if etag != cache_etag:
+ self._makedirs_synced(history_folder)
+ history_etag = get_etag(history_etag + "/" + etag).strip("\"")
+ try:
+ # Race: Other processes might have created and locked the file.
+ with self._atomic_write(os.path.join(history_folder, href),
+ "wb") as f:
+ pickle.dump([etag, history_etag], f)
+ except PermissionError:
+ pass
+ return history_etag
+
+ def _get_deleted_history_hrefs(self):
+ """Returns the hrefs of all deleted items that are still in the
+ history cache."""
+ history_folder = os.path.join(self._filesystem_path,
+ ".Radicale.cache", "history")
+ try:
+ for href in scandir(history_folder):
+ if not is_safe_filesystem_path_component(href):
+ continue
+ if os.path.isfile(os.path.join(self._filesystem_path, href)):
+ continue
+ yield href
+ except FileNotFoundError:
+ pass
+
+ def _clean_history_cache(self):
+ # Delete all expired cache entries of deleted items.
+ history_folder = os.path.join(self._filesystem_path,
+ ".Radicale.cache", "history")
+ self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
+ max_age=self.configuration.getint(
+ "storage", "max_sync_token_age"))
+
+ def sync(self, old_token=None):
+ # The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
+ # where TOKEN_NAME is the md5 hash of all history etags of present and
+ # past items of the collection.
+ def check_token_name(token_name):
+ if len(token_name) != 32:
+ return False
+ for c in token_name:
+ if c not in "0123456789abcdef":
+ return False
+ return True
+
+ old_token_name = None
+ if old_token:
+ # Extract the token name from the sync token
+ if not old_token.startswith("http://radicale.org/ns/sync/"):
+ raise ValueError("Malformed token: %r" % old_token)
+ old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
+ if not check_token_name(old_token_name):
+ raise ValueError("Malformed token: %r" % old_token)
+ # Get the current state and sync-token of the collection.
+ state = {}
+ token_name_hash = md5()
+ # Find the history of all existing and deleted items
+ for href, item in chain(
+ ((item.href, item) for item in self.get_all()),
+ ((href, None) for href in self._get_deleted_history_hrefs())):
+ history_etag = self._update_history_etag(href, item)
+ state[href] = history_etag
+ token_name_hash.update((href + "/" + history_etag).encode("utf-8"))
+ token_name = token_name_hash.hexdigest()
+ token = "http://radicale.org/ns/sync/%s" % token_name
+ if token_name == old_token_name:
+ # Nothing changed
+ return token, ()
+ token_folder = os.path.join(self._filesystem_path,
+ ".Radicale.cache", "sync-token")
+ token_path = os.path.join(token_folder, token_name)
+ old_state = {}
+ if old_token_name:
+ # load the old token state
+ old_token_path = os.path.join(token_folder, old_token_name)
+ try:
+ # Race: Another process might have deleted the file.
+ with open(old_token_path, "rb") as f:
+ old_state = pickle.load(f)
+ except (FileNotFoundError, pickle.UnpicklingError,
+ ValueError) as e:
+ if isinstance(e, (pickle.UnpicklingError, ValueError)):
+ self.logger.warning(
+ "Failed to load stored sync token %r in %r: %s",
+ old_token_name, self.path, e, exc_info=True)
+ # Delete the damaged file
+ try:
+ os.remove(old_token_path)
+ except (FileNotFoundError, PermissionError):
+ pass
+ raise ValueError("Token not found: %r" % old_token)
+ # write the new token state or update the modification time of
+ # existing token state
+ if not os.path.exists(token_path):
+ self._makedirs_synced(token_folder)
+ try:
+ # Race: Other processes might have created and locked the file.
+ with self._atomic_write(token_path, "wb") as f:
+ pickle.dump(state, f)
+ except PermissionError:
+ pass
+ else:
+ # clean up old sync tokens and item cache
+ self._clean_cache(token_folder, os.listdir(token_folder),
+ max_age=self.configuration.getint(
+ "storage", "max_sync_token_age"))
+ self._clean_history_cache()
+ else:
+ # Try to update the modification time
+ try:
+ # Race: Another process might have deleted the file.
+ os.utime(token_path)
+ except FileNotFoundError:
+ pass
+ changes = []
+ # Find all new, changed and deleted (that are still in the item cache)
+ # items
+ for href, history_etag in state.items():
+ if history_etag != old_state.get(href):
+ changes.append(href)
+ # Find all deleted items that are no longer in the item cache
+ for href, history_etag in old_state.items():
+ if href not in state:
+ changes.append(href)
+ return token, changes
+
+ def list(self):
+ for href in scandir(self._filesystem_path, only_files=True):
+ if not is_safe_filesystem_path_component(href):
+ if not href.startswith(".Radicale"):
+ self.logger.debug(
+ "Skipping item %r in %r", href, self.path)
+ continue
+ yield href
+
+ def get(self, href, verify_href=True):
+ item, metadata = self._get_with_metadata(href, verify_href=verify_href)
+ return item
+
+ def _item_cache_hash(self, raw_text):
+ _hash = md5()
+ _hash.update(left_encode_int(ITEM_CACHE_VERSION))
+ _hash.update(raw_text)
+ return _hash.hexdigest()
+
+ def _item_cache_content(self, href, vobject_item, cache_hash=None):
+ text = vobject_item.serialize()
+ if cache_hash is None:
+ cache_hash = self._item_cache_hash(text.encode(self._encoding))
+ etag = get_etag(text)
+ uid = get_uid_from_object(vobject_item)
+ name = vobject_item.name
+ tag, start, end = xmlutils.find_tag_and_time_range(vobject_item)
+ return cache_hash, uid, etag, text, name, tag, start, end
+
+ def _store_item_cache(self, href, vobject_item, cache_hash=None):
+ cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
+ "item")
+ content = self._item_cache_content(href, vobject_item, cache_hash)
+ self._makedirs_synced(cache_folder)
+ try:
+ # Race: Other processes might have created and locked the
+ # file.
+ with self._atomic_write(os.path.join(cache_folder, href),
+ "wb") as f:
+ pickle.dump(content, f)
+ except PermissionError:
+ pass
+ return content
+
+ _cache_locks = {}
+ _cache_locks_lock = threading.Lock()
+
+ @contextmanager
+ def _acquire_cache_lock(self, ns=""):
+ with contextlib.ExitStack() as lock_stack:
+ with contextlib.ExitStack() as locks_lock_stack:
+ locks_lock_stack.enter_context(self._cache_locks_lock)
+ lock_id = ns + "/" + self.path
+ lock = self._cache_locks.get(lock_id)
+ if not lock:
+ cache_folder = os.path.join(self._filesystem_path,
+ ".Radicale.cache")
+ self._makedirs_synced(cache_folder)
+ lock_path = None
+ if self.configuration.getboolean(
+ "storage", "filesystem_locking"):
+ lock_path = os.path.join(
+ cache_folder,
+ ".Radicale.lock" + (".%s" % ns if ns else ""))
+ lock = FileBackedRwLock(lock_path)
+ self._cache_locks[lock_id] = lock
+ lock_stack.enter_context(lock.acquire_lock(
+ "w", lambda: locks_lock_stack.pop_all().close()))
+ try:
+ yield
+ finally:
+ with self._cache_locks_lock:
+ lock_stack.pop_all().close()
+ if not lock.in_use():
+ del self._cache_locks[lock_id]
+
+ def _load_item_cache(self, href, input_hash):
+ cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
+ "item")
+ cache_hash = uid = etag = text = name = tag = start = end = None
+ try:
+ with open(os.path.join(cache_folder, href), "rb") as f:
+ cache_hash, *content = pickle.load(f)
+ if cache_hash == input_hash:
+ uid, etag, text, name, tag, start, end = content
+ except FileNotFoundError as e:
+ pass
+ except (pickle.UnpicklingError, ValueError) as e:
+ self.logger.warning(
+ "Failed to load item cache entry %r in %r: %s",
+ href, self.path, e, exc_info=True)
+ return cache_hash, uid, etag, text, name, tag, start, end
+
+ def _clean_item_cache(self):
+ cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
+ "item")
+ self._clean_cache(cache_folder, (
+ href for href in scandir(cache_folder) if not
+ os.path.isfile(os.path.join(self._filesystem_path, href))))
+
+ def _get_with_metadata(self, href, verify_href=True):
+ """Like ``get`` but additonally returns the following metadata:
+ tag, start, end: see ``xmlutils.find_tag_and_time_range``. If
+ extraction of the metadata failed, the values are all ``None``."""
+ if verify_href:
+ try:
+ if not is_safe_filesystem_path_component(href):
+ raise UnsafePathError(href)
+ path = path_to_filesystem(self._filesystem_path, href)
+ except ValueError as e:
+ self.logger.debug(
+ "Can't translate name %r safely to filesystem in %r: %s",
+ href, self.path, e, exc_info=True)
+ return None, None
+ else:
+ path = os.path.join(self._filesystem_path, href)
+ try:
+ with open(path, "rb") as f:
+ raw_text = f.read()
+ except (FileNotFoundError, IsADirectoryError):
+ return None, None
+ except PermissionError:
+ # Windows raises ``PermissionError`` when ``path`` is a directory
+ if (os.name == "nt" and
+ os.path.isdir(path) and os.access(path, os.R_OK)):
+ return None, None
+ raise
+ # The hash of the component in the file system. This is used to check,
+ # if the entry in the cache is still valid.
+ input_hash = self._item_cache_hash(raw_text)
+ cache_hash, uid, etag, text, name, tag, start, end = \
+ self._load_item_cache(href, input_hash)
+ vobject_item = None
+ if input_hash != cache_hash:
+ with contextlib.ExitStack() as lock_stack:
+ # Lock the item cache to prevent multpile processes from
+ # generating the same data in parallel.
+ # This improves the performance for multiple requests.
+ if self._lock.locked() == "r":
+ lock_stack.enter_context(self._acquire_cache_lock("item"))
+ # Check if another process created the file in the meantime
+ cache_hash, uid, etag, text, name, tag, start, end = \
+ self._load_item_cache(href, input_hash)
+ if input_hash != cache_hash:
+ try:
+ vobject_items = tuple(vobject.readComponents(
+ raw_text.decode(self._encoding)))
+ if len(vobject_items) != 1:
+ raise RuntimeError("Content contains %d components"
+ % len(vobject_items))
+ vobject_item = vobject_items[0]
+ check_and_sanitize_item(vobject_item, uid=uid,
+ tag=self.get_meta("tag"))
+ cache_hash, uid, etag, text, name, tag, start, end = \
+ self._store_item_cache(
+ href, vobject_item, input_hash)
+ except Exception as e:
+ raise RuntimeError("Failed to load item %r in %r: %s" %
+ (href, self.path, e)) from e
+ # Clean cache entries once after the data in the file
+ # system was edited externally.
+ if not self._item_cache_cleaned:
+ self._item_cache_cleaned = True
+ self._clean_item_cache()
+ last_modified = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT",
+ time.gmtime(os.path.getmtime(path)))
+ return Item(
+ self, href=href, last_modified=last_modified, etag=etag,
+ text=text, item=vobject_item, uid=uid, name=name,
+ component_name=tag), (tag, start, end)
+
+ def get_multi2(self, hrefs):
+ # It's faster to check for file name collissions here, because
+ # we only need to call os.listdir once.
+ files = None
+ for href in hrefs:
+ if files is None:
+ # List dir after hrefs returned one item, the iterator may be
+ # empty and the for-loop is never executed.
+ files = os.listdir(self._filesystem_path)
+ path = os.path.join(self._filesystem_path, href)
+ if (not is_safe_filesystem_path_component(href) or
+ href not in files and os.path.lexists(path)):
+ self.logger.debug(
+ "Can't translate name safely to filesystem: %r", href)
+ yield (href, None)
+ else:
+ yield (href, self.get(href, verify_href=False))
+
+ def get_all(self):
+ # We don't need to check for collissions, because the the file names
+ # are from os.listdir.
+ return (self.get(href, verify_href=False) for href in self.list())
+
+ def get_all_filtered(self, filters):
+ tag, start, end, simple = xmlutils.simplify_prefilters(
+ filters, collection_tag=self.get_meta("tag"))
+ if not tag:
+ # no filter
+ yield from ((item, simple) for item in self.get_all())
+ return
+ for item, (itag, istart, iend) in (
+ self._get_with_metadata(href, verify_href=False)
+ for href in self.list()):
+ if tag == itag and istart < end and iend > start:
+ yield item, simple and (start <= istart or iend <= end)
+
+ def upload(self, href, vobject_item):
+ if not is_safe_filesystem_path_component(href):
+ raise UnsafePathError(href)
+ try:
+ cache_hash, uid, etag, text, name, tag, _, _ = \
+ self._store_item_cache(href, vobject_item)
+ except Exception as e:
+ raise ValueError("Failed to store item %r in collection %r: %s" %
+ (href, self.path, e)) from e
+ path = path_to_filesystem(self._filesystem_path, href)
+ with self._atomic_write(path, newline="") as fd:
+ fd.write(text)
+ # Clean the cache after the actual item is stored, or the cache entry
+ # will be removed again.
+ self._clean_item_cache()
+ item = Item(self, href=href, etag=etag, text=text, item=vobject_item,
+ uid=uid, name=name, component_name=tag)
+ # Track the change
+ self._update_history_etag(href, item)
+ self._clean_history_cache()
+ return item
+
+ def delete(self, href=None):
+ if href is None:
+ # Delete the collection
+ parent_dir = os.path.dirname(self._filesystem_path)
+ try:
+ os.rmdir(self._filesystem_path)
+ except OSError:
+ with TemporaryDirectory(
+ prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
+ os.rename(self._filesystem_path, os.path.join(
+ tmp, os.path.basename(self._filesystem_path)))
+ self._sync_directory(parent_dir)
+ else:
+ self._sync_directory(parent_dir)
+ else:
+ # Delete an item
+ if not is_safe_filesystem_path_component(href):
+ raise UnsafePathError(href)
+ path = path_to_filesystem(self._filesystem_path, href)
+ if not os.path.isfile(path):
+ raise ComponentNotFoundError(href)
+ os.remove(path)
+ self._sync_directory(os.path.dirname(path))
+ # Track the change
+ self._update_history_etag(href, None)
+ self._clean_history_cache()
+
+ def get_meta(self, key=None):
+ # reuse cached value if the storage is read-only
+ if self._lock.locked() == "w" or self._meta_cache is None:
+ try:
+ try:
+ with open(self._props_path, encoding=self._encoding) as f:
+ self._meta_cache = json.load(f)
+ except FileNotFoundError:
+ self._meta_cache = {}
+ check_and_sanitize_props(self._meta_cache)
+ except ValueError as e:
+ raise RuntimeError("Failed to load properties of collection "
+ "%r: %s" % (self.path, e)) from e
+ return self._meta_cache.get(key) if key else self._meta_cache
+
+ def set_meta_all(self, props):
+ with self._atomic_write(self._props_path, "w") as f:
+ json.dump(props, f, sort_keys=True)
+
+ @property
+ def last_modified(self):
+ relevant_files = chain(
+ (self._filesystem_path,),
+ (self._props_path,) if os.path.exists(self._props_path) else (),
+ (os.path.join(self._filesystem_path, h) for h in self.list()))
+ last = max(map(os.path.getmtime, relevant_files))
+ return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
+
+ @property
+ def etag(self):
+ # reuse cached value if the storage is read-only
+ if self._lock.locked() == "w" or self._etag_cache is None:
+ self._etag_cache = super().etag
+ return self._etag_cache
+
+ _lock = None
+
+ @classmethod
+ @contextmanager
+ def acquire_lock(cls, mode, user=None):
+ folder = os.path.expanduser(cls.configuration.get(
+ "storage", "filesystem_folder"))
+ if not cls._lock:
+ cls._makedirs_synced(folder)
+ lock_path = None
+ if cls.configuration.getboolean("storage", "filesystem_locking"):
+ lock_path = os.path.join(folder, ".Radicale.lock")
+ close_lock_file = cls.configuration.getboolean(
+ "storage", "filesystem_close_lock_file")
+ cls._lock = FileBackedRwLock(lock_path, close_lock_file)
+ with cls._lock.acquire_lock(mode):
+ yield
+ # execute hook
+ hook = cls.configuration.get("storage", "hook")
+ if mode == "w" and hook:
+ cls.logger.debug("Running hook")
+ subprocess.check_call(
+ hook % {"user": shlex.quote(user or "Anonymous")},
+ shell=True, cwd=folder)
+
+
+class FileBackedRwLock:
+ """A readers-Writer lock that can additionally lock a file.
+
+ All requests are processed in FIFO order.
+
+ """
+
+ def __init__(self, path=None, close_lock_file=True):
+ """Initilize a lock.
+
+ ``path`` the file that is used for locking (optional)
+
+ ``close_lock_file`` close the lock file, when unlocked and no requests
+ are pending
+
+ """
+ self._path = path
+ self._close_lock_file = close_lock_file
+
+ self._lock = threading.Lock()
+ self._waiters = []
+ self._lock_file = None
+ self._lock_file_locked = False
+ self._readers = 0
+ self._writer = False
+
+ def locked(self):
+ if self._writer:
+ return "w"
+ if self._readers:
+ return "r"
+ return ""
+
+ def in_use(self):
+ with self._lock:
+ return self._waiters or self._readers or self._writer
+
+ @contextmanager
+ def acquire_lock(self, mode, sync_callback=None):
+ def condition():
+ if mode == "r":
+ return not self._writer
+ else:
+ return not self._writer and self._readers == 0
+
+ # Use a primitive lock which only works within one process as a
+ # precondition for inter-process file-based locking
+ with self._lock:
+ if sync_callback:
+ sync_callback()
+ if self._waiters or not condition():
+ # Use FIFO for access requests
+ waiter = threading.Condition(lock=self._lock)
+ self._waiters.append(waiter)
+ while True:
+ waiter.wait()
+ if condition():
+ break
+ self._waiters.pop(0)
+ if mode == "r":
+ self._readers += 1
+ # Notify additional potential readers
+ if self._waiters:
+ self._waiters[0].notify()
+ else:
+ self._writer = True
+ if self._path and not self._lock_file_locked:
+ if not self._lock_file:
+ self._lock_file = open(self._path, "w+")
+ if os.name == "nt":
+ handle = msvcrt.get_osfhandle(self._lock_file.fileno())
+ flags = LOCKFILE_EXCLUSIVE_LOCK if mode == "w" else 0
+ overlapped = Overlapped()
+ if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
+ raise RuntimeError("Locking the storage failed "
+ "(can be disabled in the config): "
+ "%s" % ctypes.FormatError())
+ elif os.name == "posix":
+ _cmd = fcntl.LOCK_EX if mode == "w" else fcntl.LOCK_SH
+ try:
+ fcntl.flock(self._lock_file.fileno(), _cmd)
+ except OSError as e:
+ raise RuntimeError("Locking the storage failed "
+ "(can be disabled in the config): "
+ "%s" % e) from e
+ else:
+ raise RuntimeError("Locking the storage failed "
+ "(can be disabled in the config): "
+ "Unsupported operating system")
+ self._lock_file_locked = True
+ try:
+ yield
+ finally:
+ with self._lock:
+ if mode == "r":
+ self._readers -= 1
+ else:
+ self._writer = False
+ if self._lock_file_locked and self._readers == 0:
+ if os.name == "nt":
+ handle = msvcrt.get_osfhandle(self._lock_file.fileno())
+ overlapped = Overlapped()
+ if not unlock_file_ex(handle, 0, 1, 0, overlapped):
+ raise RuntimeError("Unlocking the storage failed: "
+ "%s" % ctypes.FormatError())
+ elif os.name == "posix":
+ try:
+ fcntl.flock(self._lock_file.fileno(),
+ fcntl.LOCK_UN)
+ except OSError as e:
+ raise RuntimeError("Unlocking the storage failed: "
+ "%s" % e) from e
+ else:
+ raise RuntimeError("Unlocking the storage failed: "
+ "Unsupported operating system")
+ if self._close_lock_file and not self._waiters:
+ self._lock_file.close()
+ self._lock_file = None
+ self._lock_file_locked = False
+ if self._waiters:
+ self._waiters[0].notify()
diff --git a/radicale/tests/__init__.py b/radicale/tests/__init__.py
new file mode 100644
index 0000000..bb8e586
--- /dev/null
+++ b/radicale/tests/__init__.py
@@ -0,0 +1,66 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2012-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Tests for Radicale.
+
+"""
+
+import logging
+import os
+import sys
+from io import BytesIO
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
+
+logger = logging.getLogger("radicale_test")
+if not logger.hasHandlers():
+ handler = logging.StreamHandler(sys.stderr)
+ handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
+ logger.addHandler(handler)
+logger.setLevel(logging.DEBUG)
+
+
+class BaseTest:
+ """Base class for tests."""
+ logger = logger
+
+ def request(self, method, path, data=None, **args):
+ """Send a request."""
+ self.application._status = None
+ self.application._headers = None
+ self.application._answer = None
+
+ for key in args:
+ args[key.upper()] = args[key]
+ args["REQUEST_METHOD"] = method.upper()
+ args["PATH_INFO"] = path
+ if data:
+ data = data.encode("utf-8")
+ args["wsgi.input"] = BytesIO(data)
+ args["CONTENT_LENGTH"] = str(len(data))
+ self.application._answer = self.application(args, self.start_response)
+
+ return (
+ int(self.application._status.split()[0]),
+ dict(self.application._headers),
+ self.application._answer[0].decode("utf-8")
+ if self.application._answer else None)
+
+ def start_response(self, status, headers):
+ """Put the response values into the current application."""
+ self.application._status = status
+ self.application._headers = headers
diff --git a/radicale/tests/custom/__init__.py b/radicale/tests/custom/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/radicale/tests/custom/auth.py b/radicale/tests/custom/auth.py
new file mode 100644
index 0000000..c61f637
--- /dev/null
+++ b/radicale/tests/custom/auth.py
@@ -0,0 +1,31 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2008 Nicolas Kandel
+# Copyright © 2008 Pascal Halter
+# Copyright © 2008-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Custom authentication.
+
+Just check username for testing
+
+"""
+
+from radicale import auth
+
+
+class Auth(auth.BaseAuth):
+ def is_authenticated(self, user, password):
+ return user == "tmp"
diff --git a/radicale/tests/custom/rights.py b/radicale/tests/custom/rights.py
new file mode 100644
index 0000000..8fdda24
--- /dev/null
+++ b/radicale/tests/custom/rights.py
@@ -0,0 +1,27 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright (C) 2017 Unrud
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Custom rights management.
+
+"""
+
+from radicale import rights
+
+
+class Rights(rights.BaseRights):
+ def authorized(self, user, path, permission):
+ return path.strip("/") in ("tmp", "other")
diff --git a/radicale/tests/custom/storage.py b/radicale/tests/custom/storage.py
new file mode 100644
index 0000000..621fdc2
--- /dev/null
+++ b/radicale/tests/custom/storage.py
@@ -0,0 +1,31 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2012-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Custom storage backend.
+
+Copy of filesystem storage backend for testing
+
+"""
+
+from radicale import storage
+
+
+# TODO: make something more in this collection (and test it)
+class Collection(storage.Collection):
+ """Collection stored in a folder."""
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
diff --git a/radicale/tests/helpers.py b/radicale/tests/helpers.py
new file mode 100644
index 0000000..feb25ec
--- /dev/null
+++ b/radicale/tests/helpers.py
@@ -0,0 +1,36 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2008 Nicolas Kandel
+# Copyright © 2008 Pascal Halter
+# Copyright © 2008-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale Helpers module.
+
+This module offers helpers to use in tests.
+
+"""
+
+import os
+
+EXAMPLES_FOLDER = os.path.join(os.path.dirname(__file__), "static")
+
+
+def get_file_content(file_name):
+ try:
+ with open(os.path.join(EXAMPLES_FOLDER, file_name)) as fd:
+ return fd.read()
+ except IOError:
+ print("Couldn't open the file %s" % file_name)
diff --git a/radicale/tests/static/allprop.xml b/radicale/tests/static/allprop.xml
new file mode 100644
index 0000000..1b7692d
--- /dev/null
+++ b/radicale/tests/static/allprop.xml
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/radicale/tests/static/broken-vcard.vcf b/radicale/tests/static/broken-vcard.vcf
new file mode 100644
index 0000000..140ddc2
--- /dev/null
+++ b/radicale/tests/static/broken-vcard.vcf
@@ -0,0 +1,8 @@
+BEGIN:VCARD
+VERSION:3.0
+PRODID:-//Inverse inc.//SOGo Connector 1.0//EN
+UID:C68582D2-2E60-0001-C2C0-000000000000.vcf
+X-MOZILLA-HTML:FALSE
+EMAIL;TYPE=work:test-misses-N-or-FN@example.com
+X-RADICALE-NAME:C68582D2-2E60-0001-C2C0-000000000000.vcf
+END:VCARD
diff --git a/radicale/tests/static/broken-vevent.ics b/radicale/tests/static/broken-vevent.ics
new file mode 100644
index 0000000..a6244ea
--- /dev/null
+++ b/radicale/tests/static/broken-vevent.ics
@@ -0,0 +1,15 @@
+BEGIN:VCALENDAR
+PRODID:-//Radicale//NONSGML Radicale Server//EN
+VERSION:2.0
+BEGIN:VEVENT
+CREATED:20160725T060147Z
+LAST-MODIFIED:20160727T193435Z
+DTSTAMP:20160727T193435Z
+UID:040000008200E00074C5B7101A82E00800000000
+SUMMARY:Broken ICS END of VEVENT missing by accident
+STATUS:CONFIRMED
+X-MOZ-LASTACK:20160727T193435Z
+DTSTART;TZID=Europe/Budapest:20160727T170000
+DTEND;TZID=Europe/Budapest:20160727T223000
+CLASS:PUBLIC
+X-LIC-ERROR:No value for LOCATION property. Removing entire property:
diff --git a/radicale/tests/static/contact1.vcf b/radicale/tests/static/contact1.vcf
new file mode 100644
index 0000000..35472de
--- /dev/null
+++ b/radicale/tests/static/contact1.vcf
@@ -0,0 +1,7 @@
+BEGIN:VCARD
+VERSION:3.0
+UID:contact1
+N:Contact;;;;
+FN:Contact
+NICKNAME:test
+END:VCARD
diff --git a/radicale/tests/static/contact_multiple.vcf b/radicale/tests/static/contact_multiple.vcf
new file mode 100644
index 0000000..e353e1c
--- /dev/null
+++ b/radicale/tests/static/contact_multiple.vcf
@@ -0,0 +1,12 @@
+BEGIN:VCARD
+VERSION:3.0
+UID:contact1
+N:Contact1;;;;
+FN:Contact1
+END:VCARD
+BEGIN:VCARD
+VERSION:3.0
+UID:contact2
+N:Contact2;;;;
+FN:Contact2
+END:VCARD
diff --git a/radicale/tests/static/event1-prime.ics b/radicale/tests/static/event1-prime.ics
new file mode 100644
index 0000000..92ec904
--- /dev/null
+++ b/radicale/tests/static/event1-prime.ics
@@ -0,0 +1,34 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+CREATED:20130902T150157Z
+LAST-MODIFIED:20130902T150158Z
+DTSTAMP:20130902T150158Z
+UID:event1
+SUMMARY:Event
+ORGANIZER:mailto:unclesam@example.com
+ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Jane Doe:MAILTO:janedoe@example.com
+ATTENDEE;ROLE=REQ-PARTICIPANT;DELEGATED-FROM="MAILTO:bob@host.com";PARTSTAT=ACCEPTED;CN=John Doe:MAILTO:johndoe@example.com
+DTSTART;TZID=Europe/Paris:20140901T180000
+DTEND;TZID=Europe/Paris:20140901T210000
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event1.ics b/radicale/tests/static/event1.ics
new file mode 100644
index 0000000..bc04d80
--- /dev/null
+++ b/radicale/tests/static/event1.ics
@@ -0,0 +1,34 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+CREATED:20130902T150157Z
+LAST-MODIFIED:20130902T150158Z
+DTSTAMP:20130902T150158Z
+UID:event1
+SUMMARY:Event
+ORGANIZER:mailto:unclesam@example.com
+ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Jane Doe:MAILTO:janedoe@example.com
+ATTENDEE;ROLE=REQ-PARTICIPANT;DELEGATED-FROM="MAILTO:bob@host.com";PARTSTAT=ACCEPTED;CN=John Doe:MAILTO:johndoe@example.com
+DTSTART;TZID=Europe/Paris:20130901T180000
+DTEND;TZID=Europe/Paris:20130901T190000
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event2.ics b/radicale/tests/static/event2.ics
new file mode 100644
index 0000000..8695944
--- /dev/null
+++ b/radicale/tests/static/event2.ics
@@ -0,0 +1,42 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+CREATED:20130902T150157Z
+LAST-MODIFIED:20130902T150158Z
+DTSTAMP:20130902T150158Z
+UID:event2
+SUMMARY:Event2
+DTSTART;TZID=Europe/Paris:20130902T180000
+DTEND;TZID=Europe/Paris:20130902T190000
+RRULE:FREQ=WEEKLY
+SEQUENCE:1
+END:VEVENT
+BEGIN:VEVENT
+DTSTART;TZID=Europe/Paris:20130910T170000
+DTEND;TZID=Europe/Paris:20130910T180000
+DTSTAMP:20140902T150158Z
+SUMMARY:Event2
+UID:event2
+RECURRENCE-ID;TZID=Europe/Paris:20130909T180000
+SEQUENCE:2
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event3.ics b/radicale/tests/static/event3.ics
new file mode 100644
index 0000000..18bbbe9
--- /dev/null
+++ b/radicale/tests/static/event3.ics
@@ -0,0 +1,31 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+CREATED:20130902T150157Z
+LAST-MODIFIED:20130902T150158Z
+DTSTAMP:20130902T150158Z
+UID:event3
+SUMMARY:Event3
+DTSTART;TZID=Europe/Paris:20130903
+DURATION:PT1H
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event4.ics b/radicale/tests/static/event4.ics
new file mode 100644
index 0000000..b4f3f82
--- /dev/null
+++ b/radicale/tests/static/event4.ics
@@ -0,0 +1,30 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+CREATED:20130902T150157Z
+LAST-MODIFIED:20130902T150158Z
+DTSTAMP:20130902T150158Z
+UID:event4
+SUMMARY:Event4
+DTSTART;TZID=Europe/Paris:20130904T180000
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event5.ics b/radicale/tests/static/event5.ics
new file mode 100644
index 0000000..e87af37
--- /dev/null
+++ b/radicale/tests/static/event5.ics
@@ -0,0 +1,30 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+CREATED:20130902T150157Z
+LAST-MODIFIED:20130902T150158Z
+DTSTAMP:20130902T150158Z
+UID:event5
+SUMMARY:Event5
+DTSTART;TZID=Europe/Paris:20130905
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event6.ics b/radicale/tests/static/event6.ics
new file mode 100644
index 0000000..5d71f11
--- /dev/null
+++ b/radicale/tests/static/event6.ics
@@ -0,0 +1,46 @@
+BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+BEGIN:STANDARD
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+TZNAME:CET
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+END:STANDARD
+BEGIN:DAYLIGHT
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+TZNAME:CEST
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+END:DAYLIGHT
+END:VTIMEZONE
+BEGIN:VEVENT
+UID:event6
+DTSTART;TZID=Europe/Paris:20170601T080000
+DTEND;TZID=Europe/Paris:20170601T090000
+CREATED:20170601T060000Z
+DTSTAMP:20170601T060000Z
+LAST-MODIFIED:20170601T060000Z
+RRULE:FREQ=DAILY;UNTIL=20170602T060000Z
+SUMMARY:event6
+TRANSP:OPAQUE
+X-MOZ-GENERATION:1
+END:VEVENT
+BEGIN:VEVENT
+UID:event6
+RECURRENCE-ID;TZID=Europe/Paris:20170602T080000
+DTSTART;TZID=Europe/Paris:20170701T080000
+DTEND;TZID=Europe/Paris:20170701T090000
+CREATED:20170601T060000Z
+DTSTAMP:20170601T060000Z
+LAST-MODIFIED:20170601T060000Z
+SEQUENCE:1
+SUMMARY:event6
+TRANSP:OPAQUE
+X-MOZ-GENERATION:1
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event7.ics b/radicale/tests/static/event7.ics
new file mode 100644
index 0000000..734ccbd
--- /dev/null
+++ b/radicale/tests/static/event7.ics
@@ -0,0 +1,59 @@
+BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+BEGIN:STANDARD
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+TZNAME:CET
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+END:STANDARD
+BEGIN:DAYLIGHT
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+TZNAME:CEST
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+END:DAYLIGHT
+END:VTIMEZONE
+BEGIN:VEVENT
+UID:event7
+DTSTART;TZID=Europe/Paris:20170701T080000
+DTEND;TZID=Europe/Paris:20170701T090000
+CREATED:20170601T060000Z
+DTSTAMP:20170601T060000Z
+LAST-MODIFIED:20170601T060000Z
+RRULE:FREQ=DAILY
+SUMMARY:event7
+TRANSP:OPAQUE
+X-MOZ-GENERATION:1
+END:VEVENT
+BEGIN:VEVENT
+UID:event7
+RECURRENCE-ID;TZID=Europe/Paris:20170702T080000
+DTSTART;TZID=Europe/Paris:20170702T080000
+DTEND;TZID=Europe/Paris:20170702T090000
+CREATED:20170601T060000Z
+DTSTAMP:20170601T060000Z
+LAST-MODIFIED:20170601T060000Z
+SEQUENCE:1
+SUMMARY:event7
+TRANSP:OPAQUE
+X-MOZ-GENERATION:1
+END:VEVENT
+BEGIN:VEVENT
+UID:event7
+RECURRENCE-ID;TZID=Europe/Paris:20170703T080000
+DTSTART;TZID=Europe/Paris:20170601T080000
+DTEND;TZID=Europe/Paris:20170601T090000
+CREATED:20170601T060000Z
+DTSTAMP:20170601T060000Z
+LAST-MODIFIED:20170601T060000Z
+SEQUENCE:1
+SUMMARY:event7
+TRANSP:OPAQUE
+X-MOZ-GENERATION:1
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event8.ics b/radicale/tests/static/event8.ics
new file mode 100644
index 0000000..39136e1
--- /dev/null
+++ b/radicale/tests/static/event8.ics
@@ -0,0 +1,33 @@
+BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+BEGIN:STANDARD
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+TZNAME:CET
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+END:STANDARD
+BEGIN:DAYLIGHT
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+TZNAME:CEST
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+END:DAYLIGHT
+END:VTIMEZONE
+BEGIN:VEVENT
+UID:event8
+DTSTART;TZID=Europe/Paris:20170601T080000
+DTEND;TZID=Europe/Paris:20170601T090000
+CREATED:20170601T060000Z
+DTSTAMP:20170601T060000Z
+LAST-MODIFIED:20170601T060000Z
+RDATE;TZID=Europe/Paris:20170701T080000
+SUMMARY:event8
+TRANSP:OPAQUE
+X-MOZ-GENERATION:1
+END:VEVENT
+END:VCALENDAR
diff --git a/radicale/tests/static/event_multiple.ics b/radicale/tests/static/event_multiple.ics
new file mode 100644
index 0000000..c6527c8
--- /dev/null
+++ b/radicale/tests/static/event_multiple.ics
@@ -0,0 +1,34 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VEVENT
+UID:event
+SUMMARY:Event
+DTSTART;TZID=Europe/Paris:20130901T190000
+DTEND;TZID=Europe/Paris:20130901T200000
+END:VEVENT
+BEGIN:VTODO
+UID:todo
+DTSTART;TZID=Europe/Paris:20130901T220000
+DURATION:PT1H
+SUMMARY:Todo
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/journal1.ics b/radicale/tests/static/journal1.ics
new file mode 100644
index 0000000..ab45833
--- /dev/null
+++ b/radicale/tests/static/journal1.ics
@@ -0,0 +1,30 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700101T000000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19700101T000000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+
+BEGIN:VJOURNAL
+UID:journal1
+DTSTAMP;TZID=Europe/Paris:19940817T000000
+SUMMARY:happy new year
+DESCRIPTION: Happy new year 2000 !
+END:VJOURNAL
+
+END:VCALENDAR
diff --git a/radicale/tests/static/journal2.ics b/radicale/tests/static/journal2.ics
new file mode 100644
index 0000000..27cb7cc
--- /dev/null
+++ b/radicale/tests/static/journal2.ics
@@ -0,0 +1,32 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+
+BEGIN:VJOURNAL
+UID:journal2
+DTSTAMP:19950817T000000
+DTSTART;TZID=Europe/Paris:20000101T100000
+SUMMARY:happy new year
+DESCRIPTION: Happy new year !
+RRULE:FREQ=YEARLY
+END:VJOURNAL
+
+END:VCALENDAR
diff --git a/radicale/tests/static/journal3.ics b/radicale/tests/static/journal3.ics
new file mode 100644
index 0000000..a319d3a
--- /dev/null
+++ b/radicale/tests/static/journal3.ics
@@ -0,0 +1,31 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+
+BEGIN:VJOURNAL
+UID:journal2
+DTSTAMP:19950817T000000
+DTSTART;VALUE=DATE:20000101
+SUMMARY:happy new year
+DESCRIPTION: Happy new year 2001 !
+END:VJOURNAL
+
+END:VCALENDAR
diff --git a/radicale/tests/static/journal4.ics b/radicale/tests/static/journal4.ics
new file mode 100644
index 0000000..5a1eeca
--- /dev/null
+++ b/radicale/tests/static/journal4.ics
@@ -0,0 +1,23 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+
+END:VCALENDAR
diff --git a/radicale/tests/static/journal5.ics b/radicale/tests/static/journal5.ics
new file mode 100644
index 0000000..5a1eeca
--- /dev/null
+++ b/radicale/tests/static/journal5.ics
@@ -0,0 +1,23 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+
+END:VCALENDAR
diff --git a/radicale/tests/static/propfind1.xml b/radicale/tests/static/propfind1.xml
new file mode 100644
index 0000000..1535f7f
--- /dev/null
+++ b/radicale/tests/static/propfind1.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/radicale/tests/static/propname.xml b/radicale/tests/static/propname.xml
new file mode 100644
index 0000000..2f56bc0
--- /dev/null
+++ b/radicale/tests/static/propname.xml
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/radicale/tests/static/proppatch1.xml b/radicale/tests/static/proppatch1.xml
new file mode 100644
index 0000000..c549dec
--- /dev/null
+++ b/radicale/tests/static/proppatch1.xml
@@ -0,0 +1,8 @@
+
+
+
+
+ #BADA55
+
+
+
\ No newline at end of file
diff --git a/radicale/tests/static/todo1.ics b/radicale/tests/static/todo1.ics
new file mode 100644
index 0000000..0ffdede
--- /dev/null
+++ b/radicale/tests/static/todo1.ics
@@ -0,0 +1,28 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+DTSTART;TZID=Europe/Paris:20130901T220000
+DURATION:PT1H
+SUMMARY:Todo
+UID:todo
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo2.ics b/radicale/tests/static/todo2.ics
new file mode 100644
index 0000000..32274f7
--- /dev/null
+++ b/radicale/tests/static/todo2.ics
@@ -0,0 +1,28 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+DTSTART;TZID=Europe/Paris:20130901T180000
+DUE;TZID=Europe/Paris:20130903T180000
+RRULE:FREQ=MONTHLY
+UID:todo2
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo3.ics b/radicale/tests/static/todo3.ics
new file mode 100644
index 0000000..f9252fd
--- /dev/null
+++ b/radicale/tests/static/todo3.ics
@@ -0,0 +1,26 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+DTSTART;TZID=Europe/Paris:20130901T180000
+UID:todo3
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo4.ics b/radicale/tests/static/todo4.ics
new file mode 100644
index 0000000..1c651dc
--- /dev/null
+++ b/radicale/tests/static/todo4.ics
@@ -0,0 +1,26 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+DUE;TZID=Europe/Paris:20130901T180000
+UID:todo4
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo5.ics b/radicale/tests/static/todo5.ics
new file mode 100644
index 0000000..29c307f
--- /dev/null
+++ b/radicale/tests/static/todo5.ics
@@ -0,0 +1,27 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+CREATED;TZID=Europe/Paris:20130903T180000
+COMPLETED;TZID=Europe/Paris:20130920T180000
+UID:todo5
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo6.ics b/radicale/tests/static/todo6.ics
new file mode 100644
index 0000000..805b4cf
--- /dev/null
+++ b/radicale/tests/static/todo6.ics
@@ -0,0 +1,26 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+COMPLETED;TZID=Europe/Paris:20130920T180000
+UID:todo6
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo7.ics b/radicale/tests/static/todo7.ics
new file mode 100644
index 0000000..f94b271
--- /dev/null
+++ b/radicale/tests/static/todo7.ics
@@ -0,0 +1,26 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+CREATED;TZID=Europe/Paris:20130803T180000
+UID:todo7
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/static/todo8.ics b/radicale/tests/static/todo8.ics
new file mode 100644
index 0000000..27d4962
--- /dev/null
+++ b/radicale/tests/static/todo8.ics
@@ -0,0 +1,25 @@
+BEGIN:VCALENDAR
+PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
+VERSION:2.0
+BEGIN:VTIMEZONE
+TZID:Europe/Paris
+X-LIC-LOCATION:Europe/Paris
+BEGIN:DAYLIGHT
+TZOFFSETFROM:+0100
+TZOFFSETTO:+0200
+TZNAME:CEST
+DTSTART:19700329T020000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
+END:DAYLIGHT
+BEGIN:STANDARD
+TZOFFSETFROM:+0200
+TZOFFSETTO:+0100
+TZNAME:CET
+DTSTART:19701025T030000
+RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
+END:STANDARD
+END:VTIMEZONE
+BEGIN:VTODO
+UID:todo8
+END:VTODO
+END:VCALENDAR
diff --git a/radicale/tests/test_auth.py b/radicale/tests/test_auth.py
new file mode 100644
index 0000000..a70693b
--- /dev/null
+++ b/radicale/tests/test_auth.py
@@ -0,0 +1,167 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2012-2016 Jean-Marc Martins
+# Copyright © 2012-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale tests with simple requests and authentication.
+
+"""
+
+import base64
+import os
+import shutil
+import tempfile
+
+import pytest
+
+from radicale import Application, config
+
+from .test_base import BaseTest
+
+
+class TestBaseAuthRequests(BaseTest):
+ """Tests basic requests with auth.
+
+ We should setup auth for each type before creating the Application object.
+
+ """
+ def setup(self):
+ self.configuration = config.load()
+ self.colpath = tempfile.mkdtemp()
+ self.configuration["storage"]["filesystem_folder"] = self.colpath
+ # Disable syncing to disk for better performance
+ self.configuration["storage"]["filesystem_fsync"] = "False"
+ # Required on Windows, doesn't matter on Unix
+ self.configuration["storage"]["filesystem_close_lock_file"] = "True"
+ # Set incorrect authentication delay to a very low value
+ self.configuration["auth"]["delay"] = "0.002"
+
+ def teardown(self):
+ shutil.rmtree(self.colpath)
+
+ def _test_htpasswd(self, htpasswd_encryption, htpasswd_content,
+ test_matrix=None):
+ """Test htpasswd authentication with user "tmp" and password "bepo"."""
+ htpasswd_file_path = os.path.join(self.colpath, ".htpasswd")
+ with open(htpasswd_file_path, "w") as f:
+ f.write(htpasswd_content)
+ self.configuration["auth"]["type"] = "htpasswd"
+ self.configuration["auth"]["htpasswd_filename"] = htpasswd_file_path
+ self.configuration["auth"]["htpasswd_encryption"] = htpasswd_encryption
+ self.application = Application(self.configuration, self.logger)
+ if test_matrix is None:
+ test_matrix = (
+ ("tmp", "bepo", 207), ("tmp", "tmp", 401), ("tmp", "", 401),
+ ("unk", "unk", 401), ("unk", "", 401), ("", "", 401))
+ for user, password, expected_status in test_matrix:
+ status, _, answer = self.request(
+ "PROPFIND", "/",
+ HTTP_AUTHORIZATION="Basic %s" % base64.b64encode(
+ ("%s:%s" % (user, password)).encode()).decode())
+ assert status == expected_status
+
+ def test_htpasswd_plain(self):
+ self._test_htpasswd("plain", "tmp:bepo")
+
+ def test_htpasswd_plain_password_split(self):
+ self._test_htpasswd("plain", "tmp:be:po", (
+ ("tmp", "be:po", 207), ("tmp", "bepo", 401)))
+
+ def test_htpasswd_sha1(self):
+ self._test_htpasswd("sha1", "tmp:{SHA}UWRS3uSJJq2itZQEUyIH8rRajCM=")
+
+ def test_htpasswd_ssha(self):
+ self._test_htpasswd("ssha", "tmp:{SSHA}qbD1diw9RJKi0DnW4qO8WX9SE18W")
+
+ def test_htpasswd_md5(self):
+ try:
+ import passlib # noqa: F401
+ except ImportError:
+ pytest.skip("passlib is not installed")
+ self._test_htpasswd("md5", "tmp:$apr1$BI7VKCZh$GKW4vq2hqDINMr8uv7lDY/")
+
+ def test_htpasswd_crypt(self):
+ try:
+ import crypt # noqa: F401
+ except ImportError:
+ pytest.skip("crypt is not installed")
+ self._test_htpasswd("crypt", "tmp:dxUqxoThMs04k")
+
+ def test_htpasswd_bcrypt(self):
+ try:
+ from passlib.hash import bcrypt
+ from passlib.exc import MissingBackendError
+ except ImportError:
+ pytest.skip("passlib is not installed")
+ try:
+ bcrypt.encrypt("test-bcrypt-backend")
+ except MissingBackendError:
+ pytest.skip("bcrypt backend for passlib is not installed")
+ self._test_htpasswd(
+ "bcrypt",
+ "tmp:$2y$05$oD7hbiQFQlvCM7zoalo/T.MssV3VNTRI3w5KDnj8NTUKJNWfVpvRq")
+
+ def test_htpasswd_multi(self):
+ self._test_htpasswd("plain", "ign:ign\ntmp:bepo")
+
+ @pytest.mark.skipif(os.name == "nt", reason="leading and trailing "
+ "whitespaces not allowed in file names")
+ def test_htpasswd_whitespace_preserved(self):
+ self._test_htpasswd("plain", " tmp : bepo ",
+ ((" tmp ", " bepo ", 207),))
+
+ def test_htpasswd_whitespace_not_trimmed(self):
+ self._test_htpasswd("plain", " tmp : bepo ", (("tmp", "bepo", 401),))
+
+ def test_htpasswd_comment(self):
+ self._test_htpasswd("plain", "#comment\n #comment\n \ntmp:bepo\n\n")
+
+ def test_remote_user(self):
+ self.configuration["auth"]["type"] = "remote_user"
+ self.application = Application(self.configuration, self.logger)
+ status, _, answer = self.request(
+ "PROPFIND", "/",
+ """
+
+
+
+
+ """, REMOTE_USER="test")
+ assert status == 207
+ assert ">/test/<" in answer
+
+ def test_http_x_remote_user(self):
+ self.configuration["auth"]["type"] = "http_x_remote_user"
+ self.application = Application(self.configuration, self.logger)
+ status, _, answer = self.request(
+ "PROPFIND", "/",
+ """
+
+
+
+
+ """, HTTP_X_REMOTE_USER="test")
+ assert status == 207
+ assert ">/test/<" in answer
+
+ def test_custom(self):
+ """Custom authentication."""
+ self.configuration["auth"]["type"] = "tests.custom.auth"
+ self.application = Application(self.configuration, self.logger)
+ status, _, answer = self.request(
+ "PROPFIND", "/tmp", HTTP_AUTHORIZATION="Basic %s" %
+ base64.b64encode(("tmp:").encode()).decode())
+ assert status == 207
diff --git a/radicale/tests/test_base.py b/radicale/tests/test_base.py
new file mode 100644
index 0000000..ab0c26d
--- /dev/null
+++ b/radicale/tests/test_base.py
@@ -0,0 +1,1530 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2012-2017 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale tests with simple requests.
+
+"""
+
+import base64
+import os
+import posixpath
+import shutil
+import tempfile
+import xml.etree.ElementTree as ET
+from functools import partial
+
+import pytest
+
+from radicale import Application, config
+
+from . import BaseTest
+from .helpers import get_file_content
+
+
+class BaseRequestsMixIn:
+ """Tests with simple requests."""
+
+ def test_root(self):
+ """GET request at "/"."""
+ status, _, answer = self.request("GET", "/")
+ assert status == 302
+ assert answer == "Redirected to .web"
+
+ def test_script_name(self):
+ """GET request at "/" with SCRIPT_NAME."""
+ status, _, answer = self.request("GET", "/", SCRIPT_NAME="/radicale")
+ assert status == 302
+ assert answer == "Redirected to .web"
+ status, _, answer = self.request("GET", "", SCRIPT_NAME="/radicale")
+ assert status == 302
+ assert answer == "Redirected to radicale/.web"
+
+ def test_add_event(self):
+ """Add an event."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ path = "/calendar.ics/event1.ics"
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ status, headers, answer = self.request("GET", path)
+ assert status == 200
+ assert "ETag" in headers
+ assert headers["Content-Type"] == "text/calendar; charset=utf-8"
+ assert "VEVENT" in answer
+ assert "Event" in answer
+ assert "UID:event" in answer
+
+ def test_add_event_without_uid(self):
+ """Add an event without UID."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics").replace("UID:event1\n", "")
+ assert "\nUID:" not in event
+ path = "/calendar.ics/event.ics"
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ uids = []
+ for line in answer.split("\r\n"):
+ if line.startswith("UID:"):
+ uids.append(line[len("UID:"):])
+ assert len(uids) == 1 and uids[0]
+ # Overwrite the event with an event without UID and check that the UID
+ # is still the same
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ assert "\r\nUID:%s\r\n" % uids[0] in answer
+
+ def test_add_todo(self):
+ """Add a todo."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ todo = get_file_content("todo1.ics")
+ path = "/calendar.ics/todo1.ics"
+ status, _, _ = self.request("PUT", path, todo)
+ assert status == 201
+ status, headers, answer = self.request("GET", path)
+ assert status == 200
+ assert "ETag" in headers
+ assert headers["Content-Type"] == "text/calendar; charset=utf-8"
+ assert "VTODO" in answer
+ assert "Todo" in answer
+ assert "UID:todo" in answer
+
+ def _create_addressbook(self, path):
+ return self.request(
+ "MKCOL", path, """\
+
+
+
+
+
+
+
+
+
+
+""")
+
+ def test_add_contact(self):
+ """Add a contact."""
+ status, _, _ = self._create_addressbook("/contacts.vcf/")
+ assert status == 201
+ contact = get_file_content("contact1.vcf")
+ path = "/contacts.vcf/contact.vcf"
+ status, _, _ = self.request("PUT", path, contact)
+ assert status == 201
+ status, headers, answer = self.request("GET", path)
+ assert status == 200
+ assert "ETag" in headers
+ assert headers["Content-Type"] == "text/vcard; charset=utf-8"
+ assert "VCARD" in answer
+ assert "UID:contact1" in answer
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ assert "UID:contact1" in answer
+
+ def test_add_contact_without_uid(self):
+ """Add a contact."""
+ status, _, _ = self._create_addressbook("/contacts.vcf/")
+ assert status == 201
+ contact = get_file_content("contact1.vcf").replace("UID:contact1\n",
+ "")
+ assert "\nUID" not in contact
+ path = "/contacts.vcf/contact.vcf"
+ status, _, _ = self.request("PUT", path, contact)
+ assert status == 201
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ uids = []
+ for line in answer.split("\r\n"):
+ if line.startswith("UID:"):
+ uids.append(line[len("UID:"):])
+ assert len(uids) == 1 and uids[0]
+ # Overwrite the contact with an contact without UID and check that the
+ # UID is still the same
+ status, _, _ = self.request("PUT", path, contact)
+ assert status == 201
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ assert "\r\nUID:%s\r\n" % uids[0] in answer
+
+ def test_update(self):
+ """Update an event."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ path = "/calendar.ics/event1.ics"
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ status, headers, answer = self.request("GET", path)
+ assert "ETag" in headers
+ assert status == 200
+ assert "VEVENT" in answer
+ assert "Event" in answer
+ assert "UID:event" in answer
+ assert "DTSTART;TZID=Europe/Paris:20130901T180000" in answer
+ assert "DTEND;TZID=Europe/Paris:20130901T190000" in answer
+
+ # Then we send another PUT request
+ event = get_file_content("event1-prime.ics")
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ status, _, answer = self.request("GET", "/calendar.ics/")
+ assert status == 200
+ assert answer.count("BEGIN:VEVENT") == 1
+
+ status, headers, answer = self.request("GET", path)
+ assert status == 200
+ assert "ETag" in headers
+ assert "VEVENT" in answer
+ assert "Event" in answer
+ assert "UID:event" in answer
+ assert "DTSTART;TZID=Europe/Paris:20130901T180000" not in answer
+ assert "DTEND;TZID=Europe/Paris:20130901T190000" not in answer
+ assert "DTSTART;TZID=Europe/Paris:20140901T180000" in answer
+ assert "DTEND;TZID=Europe/Paris:20140901T210000" in answer
+
+ def test_put_whole_calendar(self):
+ """Create and overwrite a whole calendar."""
+ status, _, _ = self.request(
+ "PUT", "/calendar.ics/", "BEGIN:VCALENDAR\r\nEND:VCALENDAR")
+ assert status == 201
+ event1 = get_file_content("event1.ics")
+ status, _, _ = self.request(
+ "PUT", "/calendar.ics/test_event.ics", event1)
+ assert status == 201
+ # Overwrite
+ events = get_file_content("event_multiple.ics")
+ status, _, _ = self.request("PUT", "/calendar.ics/", events)
+ assert status == 201
+ status, _, _ = self.request("GET", "/calendar.ics/test_event.ics")
+ assert status == 404
+ status, _, answer = self.request("GET", "/calendar.ics/")
+ assert status == 200
+ assert "\r\nUID:event\r\n" in answer and "\r\nUID:todo\r\n" in answer
+ assert "\r\nUID:event1\r\n" not in answer
+
+ def test_put_whole_calendar_without_uids(self):
+ """Create a whole calendar without UID."""
+ event = get_file_content("event_multiple.ics")
+ event = event.replace("UID:event\n", "").replace("UID:todo\n", "")
+ assert "\nUID:" not in event
+ status, _, _ = self.request("PUT", "/calendar.ics/", event)
+ assert status == 201
+ status, _, answer = self.request("GET", "/calendar.ics")
+ assert status == 200
+ uids = []
+ for line in answer.split("\r\n"):
+ if line.startswith("UID:"):
+ uids.append(line[len("UID:"):])
+ assert len(uids) == 2
+ for i, uid1 in enumerate(uids):
+ assert uid1
+ for uid2 in uids[i + 1:]:
+ assert uid1 != uid2
+
+ def test_put_whole_addressbook(self):
+ """Create and overwrite a whole addressbook."""
+ contacts = get_file_content("contact_multiple.vcf")
+ status, _, _ = self.request("PUT", "/contacts.vcf/", contacts)
+ assert status == 201
+ status, _, answer = self.request("GET", "/contacts.vcf/")
+ assert status == 200
+ assert ("\r\nUID:contact1\r\n" in answer and
+ "\r\nUID:contact2\r\n" in answer)
+
+ def test_put_whole_addressbook_without_uids(self):
+ """Create a whole addressbook without UID."""
+ contacts = get_file_content("contact_multiple.vcf")
+ contacts = contacts.replace("UID:contact1\n", "").replace(
+ "UID:contact2\n", "")
+ assert "\nUID:" not in contacts
+ status, _, _ = self.request("PUT", "/contacts.vcf/", contacts)
+ assert status == 201
+ status, _, answer = self.request("GET", "/contacts.vcf")
+ assert status == 200
+ uids = []
+ for line in answer.split("\r\n"):
+ if line.startswith("UID:"):
+ uids.append(line[len("UID:"):])
+ assert len(uids) == 2
+ for i, uid1 in enumerate(uids):
+ assert uid1
+ for uid2 in uids[i + 1:]:
+ assert uid1 != uid2
+
+ def test_delete(self):
+ """Delete an event."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ path = "/calendar.ics/event1.ics"
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ # Then we send a DELETE request
+ status, _, answer = self.request("DELETE", path)
+ assert status == 200
+ assert "href>%s" % path in answer
+ status, _, answer = self.request("GET", "/calendar.ics/")
+ assert status == 200
+ assert "VEVENT" not in answer
+
+ def test_mkcalendar(self):
+ """Make a calendar."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ status, _, answer = self.request("GET", "/calendar.ics/")
+ assert status == 200
+ assert "BEGIN:VCALENDAR" in answer
+ assert "END:VCALENDAR" in answer
+
+ def test_move(self):
+ """Move a item."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ path1 = "/calendar.ics/event1.ics"
+ path2 = "/calendar.ics/event2.ics"
+ status, _, _ = self.request("PUT", path1, event)
+ assert status == 201
+ status, _, _ = self.request(
+ "MOVE", path1, HTTP_DESTINATION=path2, HTTP_HOST="")
+ assert status == 201
+ status, _, _ = self.request("GET", path1)
+ assert status == 404
+ status, _, _ = self.request("GET", path2)
+ assert status == 200
+
+ def test_head(self):
+ status, _, _ = self.request("HEAD", "/")
+ assert status == 302
+
+ def test_options(self):
+ status, headers, _ = self.request("OPTIONS", "/")
+ assert status == 200
+ assert "DAV" in headers
+
+ def test_delete_collection(self):
+ """Delete a collection."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ self.request("PUT", "/calendar.ics/event1.ics", event)
+ status, _, answer = self.request("DELETE", "/calendar.ics/")
+ assert status == 200
+ assert "href>/calendar.ics/" in answer
+ status, _, _ = self.request("GET", "/calendar.ics/")
+ assert status == 404
+
+ def test_delete_root_collection(self):
+ """Delete the root collection."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ status, _, _ = self.request("PUT", "/event1.ics", event)
+ assert status == 201
+ status, _, _ = self.request("PUT", "/calendar.ics/event1.ics", event)
+ assert status == 201
+ status, _, answer = self.request("DELETE", "/")
+ assert status == 200
+ assert "href>/" in answer
+ status, _, _ = self.request("GET", "/calendar.ics/")
+ assert status == 404
+ status, _, _ = self.request("GET", "/event1.ics")
+ assert status == 404
+
+ def test_propfind(self):
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ status, _, answer = self.request("PROPFIND", "/", HTTP_DEPTH="1")
+ assert status == 207
+ assert "href>/" in answer
+ assert "href>%s" % calendar_path in answer
+ status, _, answer = self.request(
+ "PROPFIND", calendar_path, HTTP_DEPTH="1")
+ assert status == 207
+ assert "href>%s" % calendar_path in answer
+ assert "href>%s" % event_path in answer
+
+ def test_propfind_propname(self):
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ status, _, _ = self.request("PUT", "/calendar.ics/event.ics", event)
+ assert status == 201
+ propfind = get_file_content("propname.xml")
+ status, _, answer = self.request(
+ "PROPFIND", "/calendar.ics/", propfind)
+ assert "" in answer
+ status, _, answer = self.request(
+ "PROPFIND", "/calendar.ics/event.ics", propfind)
+ assert "" in answer
+
+ def test_propfind_allprop(self):
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ status, _, _ = self.request("PUT", "/calendar.ics/event.ics", event)
+ assert status == 201
+ propfind = get_file_content("allprop.xml")
+ status, _, answer = self.request(
+ "PROPFIND", "/calendar.ics/", propfind)
+ assert "" in answer
+ status, _, answer = self.request(
+ "PROPFIND", "/calendar.ics/event.ics", propfind)
+ assert "" in answer
+
+ def test_proppatch(self):
+ """Write a property and read it back."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ proppatch = get_file_content("proppatch1.xml")
+ status, _, answer = self.request(
+ "PROPPATCH", "/calendar.ics/", proppatch)
+ assert status == 207
+ assert "calendar-color" in answer
+ assert "200 OK#BADA55" in answer
+ assert "200 OK" in answer
+
+ def test_put_whole_calendar_multiple_events_with_same_uid(self):
+ """Add two events with the same UID."""
+ status, _, _ = self.request(
+ "PUT", "/calendar.ics/", get_file_content("event2.ics"))
+ assert status == 201
+ status, _, answer = self.request(
+ "REPORT", "/calendar.ics/",
+ """
+
+
+ """)
+ assert status == 207
+ assert answer.count("") == 1
+ status, _, answer = self.request("GET", "/calendar.ics/")
+ assert status == 200
+ assert answer.count("BEGIN:VEVENT") == 2
+
+ def _test_filter(self, filters, kind="event", test=None, items=(1,)):
+ filter_template = "{}"
+ if kind in ("event", "journal", "todo"):
+ create_collection_fn = partial(self.request, "MKCALENDAR")
+ path = "/calendar.ics/"
+ filename_template = "{}{}.ics"
+ namespace = "urn:ietf:params:xml:ns:caldav"
+ report = "calendar-query"
+ elif kind == "contact":
+ create_collection_fn = self._create_addressbook
+ if test:
+ filter_template = '{{}}'.format(
+ test)
+ path = "/contacts.vcf/"
+ filename_template = "{}{}.vcf"
+ namespace = "urn:ietf:params:xml:ns:carddav"
+ report = "addressbook-query"
+ else:
+ raise ValueError("Unsupported kind: %r" % kind)
+ status, _, _ = self.request("DELETE", path)
+ assert status in (200, 404)
+ status, _, _ = create_collection_fn(path)
+ assert status == 201
+ for i in items:
+ filename = filename_template.format(kind, i)
+ event = get_file_content(filename)
+ status, _, _ = self.request(
+ "PUT", posixpath.join(path, filename), event)
+ assert status == 201
+ filters_text = "".join(
+ filter_template.format(filter_) for filter_ in filters)
+ status, _, answer = self.request(
+ "REPORT", path,
+ """
+
+
+
+
+ {2}
+ """.format(namespace, report, filters_text))
+ assert status == 207
+ return answer
+
+ def test_addressbook_empty_filter(self):
+ self._test_filter([""], kind="contact")
+
+ def test_addressbook_prop_filter(self):
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ es
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ es
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ a
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ test
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ tes
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ est
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ tes
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ est
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ est
+ """], "contact")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ tes
+ """], "contact")
+
+ def test_addressbook_prop_filter_any(self):
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ test
+
+
+ test
+ """], "contact", test="anyof")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ a
+
+
+ test
+ """], "contact", test="anyof")
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ test
+
+
+ test
+ """], "contact")
+
+ def test_addressbook_prop_filter_all(self):
+ assert "href>/contacts.vcf/contact1.vcf" in self._test_filter(["""
+
+ tes
+
+
+ est
+ """], "contact", test="allof")
+ assert "href>/contacts.vcf/contact1.vcf" not in self._test_filter(["""
+
+ test
+
+
+ test
+ """], "contact", test="allof")
+
+ def test_calendar_empty_filter(self):
+ self._test_filter([""])
+
+ def test_calendar_tag_filter(self):
+ """Report request with tag-based filter on calendar."""
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+ """])
+
+ def test_item_tag_filter(self):
+ """Report request with tag-based filter on an item."""
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+ """])
+
+ def test_item_not_tag_filter(self):
+ """Report request with tag-based is-not filter on an item."""
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+ """])
+
+ def test_item_prop_filter(self):
+ """Report request with prop-based filter on an item."""
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+
+ """])
+
+ def test_item_not_prop_filter(self):
+ """Report request with prop-based is-not filter on an item."""
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+
+
+ """])
+
+ def test_mutiple_filters(self):
+ """Report request with multiple filters on an item."""
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+
+
+
+ """, """
+
+
+
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+ """, """
+
+
+
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+
+
+
+ """])
+
+ def test_text_match_filter(self):
+ """Report request with text-match filter on calendar."""
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+ event
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+ event
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+ unknown
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+ event
+
+
+ """])
+
+ def test_param_filter(self):
+ """Report request with param-filter on calendar."""
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+ ACCEPTED
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+
+ UNKNOWN
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" not in self._test_filter(["""
+
+
+
+
+
+
+
+
+ """])
+ assert "href>/calendar.ics/event1.ics" in self._test_filter(["""
+
+
+
+
+
+
+
+
+ """])
+
+ def test_time_range_filter_events(self):
+ """Report request with time-range filter on events."""
+ answer = self._test_filter(["""
+
+
+
+
+ """], "event", items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" in answer
+ assert "href>/calendar.ics/event2.ics" in answer
+ assert "href>/calendar.ics/event3.ics" in answer
+ assert "href>/calendar.ics/event4.ics" in answer
+ assert "href>/calendar.ics/event5.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "event", items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+
+
+
+
+
+ """], items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" not in answer
+ assert "href>/calendar.ics/event3.ics" not in answer
+ assert "href>/calendar.ics/event4.ics" not in answer
+ assert "href>/calendar.ics/event5.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" in answer
+ assert "href>/calendar.ics/event3.ics" in answer
+ assert "href>/calendar.ics/event4.ics" in answer
+ assert "href>/calendar.ics/event5.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" not in answer
+ assert "href>/calendar.ics/event3.ics" in answer
+ assert "href>/calendar.ics/event4.ics" in answer
+ assert "href>/calendar.ics/event5.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" not in answer
+ assert "href>/calendar.ics/event3.ics" in answer
+ assert "href>/calendar.ics/event4.ics" not in answer
+ assert "href>/calendar.ics/event5.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=range(1, 6))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" not in answer
+ assert "href>/calendar.ics/event3.ics" not in answer
+ assert "href>/calendar.ics/event4.ics" not in answer
+ assert "href>/calendar.ics/event5.ics" not in answer
+ # HACK: VObject doesn't match RECURRENCE-ID to recurrences, the
+ # overwritten recurrence is still used for filtering.
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=(6, 7, 8))
+ assert "href>/calendar.ics/event6.ics" in answer
+ assert "href>/calendar.ics/event7.ics" in answer
+ assert "href>/calendar.ics/event8.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=(6, 7, 8))
+ assert "href>/calendar.ics/event6.ics" in answer
+ assert "href>/calendar.ics/event7.ics" in answer
+ assert "href>/calendar.ics/event8.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], items=(6, 7, 8))
+ assert "href>/calendar.ics/event6.ics" not in answer
+ assert "href>/calendar.ics/event7.ics" not in answer
+ assert "href>/calendar.ics/event8.ics" not in answer
+
+ def test_time_range_filter_events_rrule(self):
+ """Report request with time-range filter on events with rrules."""
+ answer = self._test_filter(["""
+
+
+
+
+ """], "event", items=(1, 2))
+ assert "href>/calendar.ics/event1.ics" in answer
+ assert "href>/calendar.ics/event2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "event", items=(1, 2))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "event", items=(1, 2))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "event", items=(1, 2))
+ assert "href>/calendar.ics/event1.ics" not in answer
+ assert "href>/calendar.ics/event2.ics" not in answer
+
+ def test_time_range_filter_todos(self):
+ """Report request with time-range filter on todos."""
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=range(1, 9))
+ assert "href>/calendar.ics/todo1.ics" in answer
+ assert "href>/calendar.ics/todo2.ics" in answer
+ assert "href>/calendar.ics/todo3.ics" in answer
+ assert "href>/calendar.ics/todo4.ics" in answer
+ assert "href>/calendar.ics/todo5.ics" in answer
+ assert "href>/calendar.ics/todo6.ics" in answer
+ assert "href>/calendar.ics/todo7.ics" in answer
+ assert "href>/calendar.ics/todo8.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=range(1, 9))
+ assert "href>/calendar.ics/todo1.ics" not in answer
+ assert "href>/calendar.ics/todo2.ics" in answer
+ assert "href>/calendar.ics/todo3.ics" in answer
+ assert "href>/calendar.ics/todo4.ics" not in answer
+ assert "href>/calendar.ics/todo5.ics" not in answer
+ assert "href>/calendar.ics/todo6.ics" not in answer
+ assert "href>/calendar.ics/todo7.ics" in answer
+ assert "href>/calendar.ics/todo8.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=range(1, 9))
+ assert "href>/calendar.ics/todo2.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=range(1, 9))
+ assert "href>/calendar.ics/todo2.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=range(1, 9))
+ assert "href>/calendar.ics/todo3.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=range(1, 9))
+ assert "href>/calendar.ics/todo7.ics" in answer
+
+ def test_time_range_filter_todos_rrule(self):
+ """Report request with time-range filter on todos with rrules."""
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=(1, 2))
+ assert "href>/calendar.ics/todo1.ics" in answer
+ assert "href>/calendar.ics/todo2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=(1, 2))
+ assert "href>/calendar.ics/todo1.ics" not in answer
+ assert "href>/calendar.ics/todo2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=(1, 2))
+ assert "href>/calendar.ics/todo1.ics" not in answer
+ assert "href>/calendar.ics/todo2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "todo", items=(1, 2))
+ assert "href>/calendar.ics/todo1.ics" not in answer
+ assert "href>/calendar.ics/todo2.ics" not in answer
+
+ def test_time_range_filter_journals(self):
+ """Report request with time-range filter on journals."""
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2, 3))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" in answer
+ assert "href>/calendar.ics/journal3.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2, 3))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" in answer
+ assert "href>/calendar.ics/journal3.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2, 3))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" not in answer
+ assert "href>/calendar.ics/journal3.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2, 3))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" in answer
+ assert "href>/calendar.ics/journal3.ics" not in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2, 3))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" in answer
+ assert "href>/calendar.ics/journal3.ics" in answer
+
+ def test_time_range_filter_journals_rrule(self):
+ """Report request with time-range filter on journals with rrules."""
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" in answer
+ answer = self._test_filter(["""
+
+
+
+
+ """], "journal", items=(1, 2))
+ assert "href>/calendar.ics/journal1.ics" not in answer
+ assert "href>/calendar.ics/journal2.ics" not in answer
+
+ def test_report_item(self):
+ """Test report request on an item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ status, _, answer = self.request(
+ "REPORT", event_path,
+ """
+
+
+
+
+ """)
+ assert status == 207
+ assert "href>%s<" % event_path in answer
+
+ def _report_sync_token(self, calendar_path, sync_token=None):
+ sync_token_xml = (
+ "" % sync_token
+ if sync_token else "")
+ status, _, answer = self.request(
+ "REPORT", calendar_path,
+ """
+
+
+
+
+ %s
+ """ % sync_token_xml)
+ if sync_token and status == 412:
+ return None, None
+ assert status == 207
+ xml = ET.fromstring(answer)
+ sync_token = xml.find("{DAV:}sync-token").text.strip()
+ assert sync_token
+ return sync_token, xml
+
+ def test_report_sync_collection_no_change(self):
+ """Test sync-collection report without modifying the collection"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ assert xml.find("{DAV:}response") is not None
+ new_sync_token, xml = self._report_sync_token(calendar_path,
+ sync_token)
+ assert sync_token == new_sync_token
+ assert xml.find("{DAV:}response") is None
+
+ def test_report_sync_collection_add(self):
+ """Test sync-collection report with an added item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path, sync_token)
+ if not sync_token:
+ pytest.skip("storage backend does not support sync-token")
+ assert xml.find("{DAV:}response") is not None
+ assert xml.find("{DAV:}response/{DAV:}status") is None
+
+ def test_report_sync_collection_delete(self):
+ """Test sync-collection report with a deleted item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ status, _, _ = self.request("DELETE", event_path)
+ assert status == 200
+ sync_token, xml = self._report_sync_token(calendar_path, sync_token)
+ if not sync_token:
+ pytest.skip("storage backend does not support sync-token")
+ assert "404" in xml.find("{DAV:}response/{DAV:}status").text
+
+ def test_report_sync_collection_create_delete(self):
+ """Test sync-collection report with a created and deleted item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ status, _, _ = self.request("DELETE", event_path)
+ assert status == 200
+ sync_token, xml = self._report_sync_token(calendar_path, sync_token)
+ if not sync_token:
+ pytest.skip("storage backend does not support sync-token")
+ assert "404" in xml.find("{DAV:}response/{DAV:}status").text
+
+ def test_report_sync_collection_modify_undo(self):
+ """Test sync-collection report with a modified and changed back item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event1 = get_file_content("event1.ics")
+ event2 = get_file_content("event2.ics")
+ event_path = posixpath.join(calendar_path, "event1.ics")
+ status, _, _ = self.request("PUT", event_path, event1)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ status, _, _ = self.request("PUT", event_path, event2)
+ assert status == 201
+ status, _, _ = self.request("PUT", event_path, event1)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path, sync_token)
+ if not sync_token:
+ pytest.skip("storage backend does not support sync-token")
+ assert xml.find("{DAV:}response") is not None
+ assert xml.find("{DAV:}response/{DAV:}status") is None
+
+ def test_report_sync_collection_move(self):
+ """Test sync-collection report a moved item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event = get_file_content("event1.ics")
+ event1_path = posixpath.join(calendar_path, "event1.ics")
+ event2_path = posixpath.join(calendar_path, "event2.ics")
+ status, _, _ = self.request("PUT", event1_path, event)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ status, _, _ = self.request(
+ "MOVE", event1_path, HTTP_DESTINATION=event2_path, HTTP_HOST="")
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path, sync_token)
+ if not sync_token:
+ pytest.skip("storage backend does not support sync-token")
+ for response in xml.findall("{DAV:}response"):
+ if response.find("{DAV:}status") is None:
+ assert response.find("{DAV:}href").text == event2_path
+ else:
+ assert "404" in response.find("{DAV:}status").text
+ assert response.find("{DAV:}href").text == event1_path
+
+ def test_report_sync_collection_move_undo(self):
+ """Test sync-collection report with a moved and moved back item"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ event = get_file_content("event1.ics")
+ event1_path = posixpath.join(calendar_path, "event1.ics")
+ event2_path = posixpath.join(calendar_path, "event2.ics")
+ status, _, _ = self.request("PUT", event1_path, event)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ status, _, _ = self.request(
+ "MOVE", event1_path, HTTP_DESTINATION=event2_path, HTTP_HOST="")
+ assert status == 201
+ status, _, _ = self.request(
+ "MOVE", event2_path, HTTP_DESTINATION=event1_path, HTTP_HOST="")
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path, sync_token)
+ if not sync_token:
+ pytest.skip("storage backend does not support sync-token")
+ created = deleted = 0
+ for response in xml.findall("{DAV:}response"):
+ if response.find("{DAV:}status") is None:
+ assert response.find("{DAV:}href").text == event1_path
+ created += 1
+ else:
+ assert "404" in response.find("{DAV:}status").text
+ assert response.find("{DAV:}href").text == event2_path
+ deleted += 1
+ assert created == 1 and deleted == 1
+
+ def test_report_sync_collection_invalid_sync_token(self):
+ """Test sync-collection report with an invalid sync token"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(
+ calendar_path, "http://radicale.org/ns/sync/INVALID")
+ assert not sync_token
+
+ def test_propfind_sync_token(self):
+ """Retrieve the sync-token with a propfind request"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ event = get_file_content("event1.ics")
+ event_path = posixpath.join(calendar_path, "event.ics")
+ status, _, _ = self.request("PUT", event_path, event)
+ assert status == 201
+ new_sync_token, xml = self._report_sync_token(calendar_path,
+ sync_token)
+ assert sync_token != new_sync_token
+
+ def test_propfind_same_as_sync_collection_sync_token(self):
+ """Compare sync-token property with sync-collection sync-token"""
+ calendar_path = "/calendar.ics/"
+ status, _, _ = self.request("MKCALENDAR", calendar_path)
+ assert status == 201
+ sync_token, xml = self._report_sync_token(calendar_path)
+ new_sync_token, xml = self._report_sync_token(calendar_path,
+ sync_token)
+ assert sync_token == new_sync_token
+
+ def test_calendar_getcontenttype(self):
+ """Test report request on an item"""
+ status, _, _ = self.request("MKCALENDAR", "/test/")
+ assert status == 201
+ for component in ("event", "todo", "journal"):
+ event = get_file_content("{}1.ics".format(component))
+ status, _, _ = self.request("PUT", "/test/test.ics", event)
+ assert status == 201
+ status, _, answer = self.request(
+ "REPORT", "/test/",
+ """
+
+
+
+
+ """)
+ assert status == 207
+ assert ">text/calendar;charset=utf-8;component=V{}<".format(
+ component.upper()) in answer
+
+ def test_addressbook_getcontenttype(self):
+ """Test report request on an item"""
+ status, _, _ = self._create_addressbook("/test/")
+ assert status == 201
+ contact = get_file_content("contact1.vcf")
+ status, _, _ = self.request("PUT", "/test/test.vcf", contact)
+ assert status == 201
+ status, _, answer = self.request(
+ "REPORT", "/test/",
+ """
+
+
+
+
+ """)
+ assert status == 207
+ assert ">text/vcard;charset=utf-8<" in answer
+
+ def test_authorization(self):
+ authorization = "Basic " + base64.b64encode(b"user:").decode()
+ status, _, answer = self.request(
+ "PROPFIND", "/",
+ """
+
+
+
+
+ """,
+ HTTP_AUTHORIZATION=authorization)
+ assert status == 207
+ assert "href>/user/<" in answer
+
+ def test_authentication(self):
+ """Test if server sends authentication request."""
+ self.configuration["auth"]["type"] = "htpasswd"
+ self.configuration["auth"]["htpasswd_filename"] = os.devnull
+ self.configuration["auth"]["htpasswd_encryption"] = "plain"
+ self.configuration["rights"]["type"] = "owner_only"
+ self.application = Application(self.configuration, self.logger)
+ status, headers, _ = self.request("MKCOL", "/user/")
+ assert status in (401, 403)
+ assert headers.get("WWW-Authenticate")
+
+ def test_principal_collection_creation(self):
+ """Verify existence of the principal collection."""
+ status, _, _ = self.request("PROPFIND", "/user/", HTTP_AUTHORIZATION=(
+ "Basic " + base64.b64encode(b"user:").decode()))
+ assert status == 207
+
+ def test_existence_of_root_collections(self):
+ """Verify that the root collection always exists."""
+ # Use PROPFIND because GET returns message
+ status, _, _ = self.request("PROPFIND", "/")
+ assert status == 207
+ # it should still exist after deletion
+ status, _, _ = self.request("DELETE", "/")
+ assert status == 200
+ status, _, _ = self.request("PROPFIND", "/")
+ assert status == 207
+
+ def test_custom_headers(self):
+ if not self.configuration.has_section("headers"):
+ self.configuration.add_section("headers")
+ self.configuration.set("headers", "test", "123")
+ # Test if header is set on success
+ status, headers, _ = self.request("OPTIONS", "/")
+ assert status == 200
+ assert headers.get("test") == "123"
+ # Test if header is set on failure
+ status, headers, _ = self.request(
+ "GET", "/.well-known/does not exist")
+ assert status == 404
+ assert headers.get("test") == "123"
+
+ def test_missing_uid(self):
+ """Verify that missing UIDs are added in a stable manner."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event_without_uid = get_file_content("event1.ics").replace(
+ "UID:event1\n", "")
+ assert "UID" not in event_without_uid
+ path = "/calendar.ics/event1.ics"
+ status, _, _ = self.request("PUT", path, event_without_uid)
+ assert status == 201
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ uid = None
+ for line in answer.split("\r\n"):
+ if line.startswith("UID:"):
+ uid = line[len("UID:"):]
+ assert uid
+ status, _, _ = self.request("PUT", path, event_without_uid)
+ assert status == 201
+ status, _, answer = self.request("GET", path)
+ assert status == 200
+ assert "UID:%s\r\n" % uid in answer
+
+
+class BaseFileSystemTest(BaseTest):
+ """Base class for filesystem backend tests."""
+ storage_type = None
+
+ def setup(self):
+ self.configuration = config.load()
+ self.configuration["storage"]["type"] = self.storage_type
+ self.colpath = tempfile.mkdtemp()
+ self.configuration["storage"]["filesystem_folder"] = self.colpath
+ # Disable syncing to disk for better performance
+ self.configuration["storage"]["filesystem_fsync"] = "False"
+ # Required on Windows, doesn't matter on Unix
+ self.configuration["storage"]["filesystem_close_lock_file"] = "True"
+ self.application = Application(self.configuration, self.logger)
+
+ def teardown(self):
+ shutil.rmtree(self.colpath)
+
+
+class TestMultiFileSystem(BaseFileSystemTest, BaseRequestsMixIn):
+ """Test BaseRequests on multifilesystem."""
+ storage_type = "multifilesystem"
+
+ def test_fsync(self):
+ """Create a directory and file with syncing enabled."""
+ self.configuration["storage"]["filesystem_fsync"] = "True"
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+
+ def test_hook(self):
+ """Run hook."""
+ self.configuration["storage"]["hook"] = (
+ "mkdir %s" % os.path.join("collection-root", "created_by_hook"))
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ status, _, _ = self.request("PROPFIND", "/created_by_hook/")
+ assert status == 207
+
+ def test_hook_read_access(self):
+ """Verify that hook is not run for read accesses."""
+ self.configuration["storage"]["hook"] = (
+ "mkdir %s" % os.path.join("collection-root", "created_by_hook"))
+ status, _, _ = self.request("PROPFIND", "/")
+ assert status == 207
+ status, _, _ = self.request("PROPFIND", "/created_by_hook/")
+ assert status == 404
+
+ @pytest.mark.skipif(os.system("type flock") != 0,
+ reason="flock command not found")
+ def test_hook_storage_locked(self):
+ """Verify that the storage is locked when the hook runs."""
+ self.configuration["storage"]["hook"] = (
+ "flock -n .Radicale.lock || exit 0; exit 1")
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+
+ def test_hook_principal_collection_creation(self):
+ """Verify that the hooks runs when a new user is created."""
+ self.configuration["storage"]["hook"] = (
+ "mkdir %s" % os.path.join("collection-root", "created_by_hook"))
+ status, _, _ = self.request("PROPFIND", "/", HTTP_AUTHORIZATION=(
+ "Basic " + base64.b64encode(b"user:").decode()))
+ assert status == 207
+ status, _, _ = self.request("PROPFIND", "/created_by_hook/")
+ assert status == 207
+
+ def test_hook_fail(self):
+ """Verify that a request fails if the hook fails."""
+ self.configuration["storage"]["hook"] = "exit 1"
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status != 201
+
+ def test_item_cache_rebuild(self):
+ """Delete the item cache and verify that it is rebuild."""
+ status, _, _ = self.request("MKCALENDAR", "/calendar.ics/")
+ assert status == 201
+ event = get_file_content("event1.ics")
+ path = "/calendar.ics/event1.ics"
+ status, _, _ = self.request("PUT", path, event)
+ assert status == 201
+ status, _, answer1 = self.request("GET", path)
+ assert status == 200
+ cache_folder = os.path.join(self.colpath, "collection-root",
+ "calendar.ics", ".Radicale.cache", "item")
+ assert os.path.exists(os.path.join(cache_folder, "event1.ics"))
+ shutil.rmtree(cache_folder)
+ status, _, answer2 = self.request("GET", path)
+ assert status == 200
+ assert answer1 == answer2
+ assert os.path.exists(os.path.join(cache_folder, "event1.ics"))
+
+
+class TestCustomStorageSystem(BaseFileSystemTest):
+ """Test custom backend loading."""
+ storage_type = "tests.custom.storage"
+
+ def test_root(self):
+ """A simple test to verify that the custom backend works."""
+ BaseRequestsMixIn.test_root(self)
diff --git a/radicale/tests/test_rights.py b/radicale/tests/test_rights.py
new file mode 100644
index 0000000..36c2833
--- /dev/null
+++ b/radicale/tests/test_rights.py
@@ -0,0 +1,139 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright (C) 2017 Unrud
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+Radicale tests with simple requests and rights.
+"""
+
+import base64
+import os
+import shutil
+import tempfile
+
+from radicale import Application, config
+
+from .test_base import BaseTest
+
+
+class TestBaseAuthRequests(BaseTest):
+ """Tests basic requests with rights."""
+
+ def setup(self):
+ self.configuration = config.load()
+ self.colpath = tempfile.mkdtemp()
+ self.configuration["storage"]["filesystem_folder"] = self.colpath
+ # Disable syncing to disk for better performance
+ self.configuration["storage"]["filesystem_fsync"] = "False"
+ # Required on Windows, doesn't matter on Unix
+ self.configuration["storage"]["filesystem_close_lock_file"] = "True"
+
+ def teardown(self):
+ shutil.rmtree(self.colpath)
+
+ def _test_rights(self, rights_type, user, path, mode, expected_status):
+ assert mode in ("r", "w")
+ assert user in ("", "tmp")
+ htpasswd_file_path = os.path.join(self.colpath, ".htpasswd")
+ with open(htpasswd_file_path, "w") as f:
+ f.write("tmp:bepo\nother:bepo")
+ self.configuration["rights"]["type"] = rights_type
+ self.configuration["auth"]["type"] = "htpasswd"
+ self.configuration["auth"]["htpasswd_filename"] = htpasswd_file_path
+ self.configuration["auth"]["htpasswd_encryption"] = "plain"
+ self.application = Application(self.configuration, self.logger)
+ for u in ("tmp", "other"):
+ status, _, _ = self.request(
+ "PROPFIND", "/%s" % u, HTTP_AUTHORIZATION="Basic %s" %
+ base64.b64encode(("%s:bepo" % u).encode()).decode())
+ assert status == 207
+ status, _, _ = self.request(
+ "PROPFIND" if mode == "r" else "PROPPATCH", path,
+ HTTP_AUTHORIZATION="Basic %s" % base64.b64encode(
+ ("tmp:bepo").encode()).decode() if user else "")
+ assert status == expected_status
+
+ def test_owner_only(self):
+ self._test_rights("owner_only", "", "/", "r", 401)
+ self._test_rights("owner_only", "", "/", "w", 401)
+ self._test_rights("owner_only", "", "/tmp", "r", 401)
+ self._test_rights("owner_only", "", "/tmp", "w", 401)
+ self._test_rights("owner_only", "tmp", "/", "r", 207)
+ self._test_rights("owner_only", "tmp", "/", "w", 403)
+ self._test_rights("owner_only", "tmp", "/tmp", "r", 207)
+ self._test_rights("owner_only", "tmp", "/tmp", "w", 207)
+ self._test_rights("owner_only", "tmp", "/other", "r", 403)
+ self._test_rights("owner_only", "tmp", "/other", "w", 403)
+
+ def test_owner_write(self):
+ self._test_rights("owner_write", "", "/", "r", 401)
+ self._test_rights("owner_write", "", "/", "w", 401)
+ self._test_rights("owner_write", "", "/tmp", "r", 401)
+ self._test_rights("owner_write", "", "/tmp", "w", 401)
+ self._test_rights("owner_write", "tmp", "/", "r", 207)
+ self._test_rights("owner_write", "tmp", "/", "w", 403)
+ self._test_rights("owner_write", "tmp", "/tmp", "r", 207)
+ self._test_rights("owner_write", "tmp", "/tmp", "w", 207)
+ self._test_rights("owner_write", "tmp", "/other", "r", 207)
+ self._test_rights("owner_write", "tmp", "/other", "w", 403)
+
+ def test_authenticated(self):
+ self._test_rights("authenticated", "", "/", "r", 401)
+ self._test_rights("authenticated", "", "/", "w", 401)
+ self._test_rights("authenticated", "", "/tmp", "r", 401)
+ self._test_rights("authenticated", "", "/tmp", "w", 401)
+ self._test_rights("authenticated", "tmp", "/", "r", 207)
+ self._test_rights("authenticated", "tmp", "/", "w", 207)
+ self._test_rights("authenticated", "tmp", "/tmp", "r", 207)
+ self._test_rights("authenticated", "tmp", "/tmp", "w", 207)
+ self._test_rights("authenticated", "tmp", "/other", "r", 207)
+ self._test_rights("authenticated", "tmp", "/other", "w", 207)
+
+ def test_none(self):
+ self._test_rights("none", "", "/", "r", 207)
+ self._test_rights("none", "", "/", "w", 207)
+ self._test_rights("none", "", "/tmp", "r", 207)
+ self._test_rights("none", "", "/tmp", "w", 207)
+ self._test_rights("none", "tmp", "/", "r", 207)
+ self._test_rights("none", "tmp", "/", "w", 207)
+ self._test_rights("none", "tmp", "/tmp", "r", 207)
+ self._test_rights("none", "tmp", "/tmp", "w", 207)
+ self._test_rights("none", "tmp", "/other", "r", 207)
+ self._test_rights("none", "tmp", "/other", "w", 207)
+
+ def test_from_file(self):
+ rights_file_path = os.path.join(self.colpath, "rights")
+ with open(rights_file_path, "w") as f:
+ f.write("""\
+[owner]
+user: .+
+collection: %(login)s(/.*)?
+permission: rw
+[custom]
+user: .*
+collection: custom(/.*)?
+permission: r""")
+ self.configuration["rights"]["file"] = rights_file_path
+ self._test_rights("from_file", "", "/other", "r", 401)
+ self._test_rights("from_file", "tmp", "/other", "r", 403)
+ self._test_rights("from_file", "", "/custom/sub", "r", 404)
+ self._test_rights("from_file", "tmp", "/custom/sub", "r", 404)
+ self._test_rights("from_file", "", "/custom/sub", "w", 401)
+ self._test_rights("from_file", "tmp", "/custom/sub", "w", 403)
+
+ def test_custom(self):
+ """Custom rights management."""
+ self._test_rights("tests.custom.rights", "", "/", "r", 401)
+ self._test_rights("tests.custom.rights", "", "/tmp", "r", 207)
diff --git a/radicale/web.py b/radicale/web.py
new file mode 100644
index 0000000..acc7ce7
--- /dev/null
+++ b/radicale/web.py
@@ -0,0 +1,124 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright (C) 2017 Unrud
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+import os
+import posixpath
+import time
+from http import client
+from importlib import import_module
+
+import pkg_resources
+
+from . import storage
+
+NOT_FOUND = (
+ client.NOT_FOUND, (("Content-Type", "text/plain"),),
+ "The requested resource could not be found.")
+
+MIMETYPES = {
+ ".css": "text/css",
+ ".eot": "application/vnd.ms-fontobject",
+ ".gif": "image/gif",
+ ".html": "text/html",
+ ".js": "application/javascript",
+ ".manifest": "text/cache-manifest",
+ ".png": "image/png",
+ ".svg": "image/svg+xml",
+ ".ttf": "application/font-sfnt",
+ ".txt": "text/plain",
+ ".woff": "application/font-woff",
+ ".woff2": "font/woff2",
+ ".xml": "text/xml"}
+FALLBACK_MIMETYPE = "application/octet-stream"
+
+INTERNAL_TYPES = ("None", "none", "internal")
+
+
+def load(configuration, logger):
+ """Load the web module chosen in configuration."""
+ web_type = configuration.get("web", "type")
+ if web_type in ("None", "none"): # DEPRECATED: use "none"
+ web_class = NoneWeb
+ elif web_type == "internal":
+ web_class = Web
+ else:
+ try:
+ web_class = import_module(web_type).Web
+ except Exception as e:
+ raise RuntimeError("Failed to load web module %r: %s" %
+ (web_type, e)) from e
+ logger.info("Web type is %r", web_type)
+ return web_class(configuration, logger)
+
+
+class BaseWeb:
+ def __init__(self, configuration, logger):
+ self.configuration = configuration
+ self.logger = logger
+
+ def get(self, environ, base_prefix, path, user):
+ """GET request.
+
+ ``base_prefix`` is sanitized and never ends with "/".
+
+ ``path`` is sanitized and always starts with "/.web"
+
+ ``user`` is empty for anonymous users.
+
+ """
+ raise NotImplementedError
+
+
+class NoneWeb(BaseWeb):
+ def get(self, environ, base_prefix, path, user):
+ if path != "/.web":
+ return NOT_FOUND
+ return client.OK, {"Content-Type": "text/plain"}, "Radicale works!"
+
+
+class Web(BaseWeb):
+ def __init__(self, configuration, logger):
+ super().__init__(configuration, logger)
+ self.folder = pkg_resources.resource_filename(__name__, "web")
+
+ def get(self, environ, base_prefix, path, user):
+ try:
+ filesystem_path = storage.path_to_filesystem(
+ self.folder, path[len("/.web"):])
+ except ValueError as e:
+ self.logger.debug("Web content with unsafe path %r requested: %s",
+ path, e, exc_info=True)
+ return NOT_FOUND
+ if os.path.isdir(filesystem_path) and not path.endswith("/"):
+ location = posixpath.basename(path) + "/"
+ return (client.FOUND,
+ {"Location": location, "Content-Type": "text/plain"},
+ "Redirected to %s" % location)
+ if os.path.isdir(filesystem_path):
+ filesystem_path = os.path.join(filesystem_path, "index.html")
+ if not os.path.isfile(filesystem_path):
+ return NOT_FOUND
+ content_type = MIMETYPES.get(
+ os.path.splitext(filesystem_path)[1].lower(), FALLBACK_MIMETYPE)
+ with open(filesystem_path, "rb") as f:
+ answer = f.read()
+ last_modified = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT",
+ time.gmtime(os.fstat(f.fileno()).st_mtime))
+ headers = {
+ "Content-Type": content_type,
+ "Last-Modified": last_modified}
+ return client.OK, headers, answer
diff --git a/radicale/web/css/icon.png b/radicale/web/css/icon.png
new file mode 100644
index 0000000..a9c9c04
Binary files /dev/null and b/radicale/web/css/icon.png differ
diff --git a/radicale/web/css/main.css b/radicale/web/css/main.css
new file mode 100644
index 0000000..183e743
--- /dev/null
+++ b/radicale/web/css/main.css
@@ -0,0 +1,43 @@
+body { background: #e4e9f6; color: #424247; display: flex; flex-direction: column; font-size: 14pt; line-height: 1.4; margin: 0; min-height: 100vh; }
+
+a { color: inherit; }
+
+nav, footer { background: #a40000; color: white; padding: 0 20%; }
+nav ul, footer ul { display: flex; flex-wrap: wrap; margin: 0; padding: 0; }
+nav ul li, footer ul li { display: block; padding: 0 1em 0 0; }
+nav ul li a, footer ul li a { color: inherit; display: block; padding: 1em 0.5em 1em 0; text-decoration: inherit; transition: 0.2s; }
+nav ul li a:hover, nav ul li a:focus, footer ul li a:hover, footer ul li a:focus { color: black; outline: none; }
+
+header { background: url(logo.svg), linear-gradient(to bottom right, #050a02, black); background-position: 22% 45%; background-repeat: no-repeat; color: #efdddd; font-size: 1.5em; min-height: 250px; overflow: auto; padding: 3em 22%; text-shadow: 0.2em 0.2em 0.2em rgba(0, 0, 0, 0.5); }
+header > * { padding-left: 220px; }
+header h1 { font-size: 2.5em; font-weight: lighter; margin: 0.5em 0; }
+
+main { flex: 1; }
+
+section { padding: 0 20% 2em; }
+section:not(:last-child) { border-bottom: 1px dashed #ccc; }
+section h1 { background: linear-gradient(to bottom right, #050a02, black); color: #e5dddd; font-size: 2.5em; margin: 0 -33.33% 1em; padding: 1em 33.33%; }
+section h2, section h3, section h4 { font-weight: lighter; margin: 1.5em 0 1em; }
+
+article { border-top: 1px solid transparent; position: relative; margin: 3em 0; }
+article aside { box-sizing: border-box; color: #aaa; font-size: 0.8em; right: -30%; top: 0.5em; position: absolute; }
+article:before { border-top: 1px dashed #ccc; content: ""; display: block; left: -33.33%; position: absolute; right: -33.33%; }
+
+pre { border-radius: 3px; background: black; color: #d3d5db; margin: 0 -1em; overflow-x: auto; padding: 1em; }
+
+table { border-collapse: collapse; font-size: 0.8em; margin: auto; }
+table td { border: 1px solid #ccc; padding: 0.5em; }
+
+dl dt { margin-bottom: 0.5em; margin-top: 1em; }
+
+@media (max-width: 800px) { body { font-size: 12pt; }
+ header, section { padding-left: 2em; padding-right: 2em; }
+ nav, footer { padding-left: 0; padding-right: 0; }
+ nav ul, footer ul { justify-content: center; }
+ nav ul li, footer ul li { padding: 0 0.5em; }
+ nav ul li a, footer ul li a { padding: 1em 0; }
+ header { background-position: 50% 30px, 0 0; padding-bottom: 0; padding-top: 330px; text-align: center; }
+ header > * { margin: 0; padding-left: 0; }
+ section h1 { margin: 0 -0.8em 1.3em; padding: 0.5em 0; text-align: center; }
+ article aside { top: 0.5em; right: -1.5em; }
+ article:before { left: -2em; right: -2em; } }
diff --git a/radicale/web/fn.js b/radicale/web/fn.js
new file mode 100644
index 0000000..a436ae6
--- /dev/null
+++ b/radicale/web/fn.js
@@ -0,0 +1,1003 @@
+/**
+ * This file is part of Radicale Server - Calendar Server
+ * Copyright (C) 2017 Unrud
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+ */
+
+/**
+ * Server address
+ * @const
+ * @type {string}
+ */
+var SERVER = (location.protocol + '//' + location.hostname +
+ (location.port ? ':' + location.port : ''));
+
+/**
+ * Path of the root collection on the server (must end with /)
+ * @const
+ * @type {string}
+ */
+var ROOT_PATH = location.pathname.replace(new RegExp("/+[^/]+/*(/index\\.html?)?$"), "") + '/';
+
+/**
+ * time between updates of collections (milliseconds)
+ * @const
+ * @type {?int}
+ */
+var UPDATE_INTERVAL = null;
+
+/**
+ * Regex to match and normalize color
+ * @const
+ */
+var COLOR_RE = new RegExp("^(#[0-9A-Fa-f]{6})(?:[0-9A-Fa-f]{2})?$");
+
+/**
+ * Escape string for usage in XML
+ * @param {string} s
+ * @return {string}
+ */
+function escape_xml(s) {
+ return (s
+ .replace("&", "&")
+ .replace('"', """)
+ .replace("'", "'")
+ .replace("<", "<")
+ .replace(">", ">"));
+}
+
+/**
+ * @enum {string}
+ */
+var CollectionType = {
+ PRINCIPAL: "PRINCIPAL",
+ ADDRESSBOOK: "ADDRESSBOOK",
+ CALENDAR_JOURNAL_TASKS: "CALENDAR_JOURNAL_TASKS",
+ CALENDAR_JOURNAL: "CALENDAR_JOURNAL",
+ CALENDAR_TASKS: "CALENDAR_TASKS",
+ JOURNAL_TASKS: "JOURNAL_TASKS",
+ CALENDAR: "CALENDAR",
+ JOURNAL: "JOURNAL",
+ TASKS: "TASKS",
+ is_subset: function(a, b) {
+ var components = a.split("_");
+ var i;
+ for (i = 0; i < components.length; i++) {
+ if (b.search(components[i]) === -1) {
+ return false;
+ }
+ }
+ return true;
+ },
+ union: function(a, b) {
+ if (a.search(this.ADDRESSBOOK) !== -1 || b.search(this.ADDRESSBOOK) !== -1) {
+ if (a && a !== this.ADDRESSBOOK || b && b !== this.ADDRESSBOOK) {
+ throw "Invalid union: " + a + " " + b;
+ }
+ return this.ADDRESSBOOK;
+ }
+ var union = "";
+ if (a.search(this.CALENDAR) !== -1 || b.search(this.CALENDAR) !== -1) {
+ union += (union ? "_" : "") + this.CALENDAR;
+ }
+ if (a.search(this.JOURNAL) !== -1 || b.search(this.JOURNAL) !== -1) {
+ union += (union ? "_" : "") + this.JOURNAL;
+ }
+ if (a.search(this.TASKS) !== -1 || b.search(this.TASKS) !== -1) {
+ union += (union ? "_" : "") + this.TASKS;
+ }
+ return union;
+ }
+};
+
+/**
+ * @constructor
+ * @struct
+ * @param {string} href Must always start and end with /.
+ * @param {CollectionType} type
+ * @param {string} displayname
+ * @param {string} description
+ * @param {string} color
+ */
+function Collection(href, type, displayname, description, color) {
+ this.href = href;
+ this.type = type;
+ this.displayname = displayname;
+ this.color = color;
+ this.description = description;
+}
+
+/**
+ * Find the principal collection.
+ * @param {string} user
+ * @param {string} password
+ * @param {function(?Collection, ?string)} callback Returns result or error
+ * @return {XMLHttpRequest}
+ */
+function get_principal(user, password, callback) {
+ var request = new XMLHttpRequest();
+ request.open("PROPFIND", SERVER + ROOT_PATH, true, user, password);
+ request.onreadystatechange = function() {
+ if (request.readyState !== 4) {
+ return;
+ }
+ if (request.status === 207) {
+ var xml = request.responseXML;
+ var principal_element = xml.querySelector("*|multistatus:root > *|response:first-of-type > *|propstat > *|prop > *|current-user-principal > *|href");
+ var displayname_element = xml.querySelector("*|multistatus:root > *|response:first-of-type > *|propstat > *|prop > *|displayname");
+ if (principal_element) {
+ callback(new Collection(
+ principal_element.textContent,
+ CollectionType.PRINCIPAL,
+ displayname_element ? displayname_element.textContent : "",
+ "",
+ ""), null);
+ } else {
+ callback(null, "Internal error");
+ }
+ } else {
+ callback(null, request.status + " " + request.statusText);
+ }
+ };
+ request.send('' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '');
+ return request;
+}
+
+/**
+ * Find all calendars and addressbooks in collection.
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection
+ * @param {function(?Array, ?string)} callback Returns result or error
+ * @return {XMLHttpRequest}
+ */
+function get_collections(user, password, collection, callback) {
+ var request = new XMLHttpRequest();
+ request.open("PROPFIND", SERVER + collection.href, true, user, password);
+ request.setRequestHeader("depth", "1");
+ request.onreadystatechange = function() {
+ if (request.readyState !== 4) {
+ return;
+ }
+ if (request.status === 207) {
+ var xml = request.responseXML;
+ var collections = [];
+ var response_query = "*|multistatus:root > *|response";
+ var responses = xml.querySelectorAll(response_query);
+ var i;
+ for (i = 0; i < responses.length; i++) {
+ var response = responses[i];
+ var href_element = response.querySelector(response_query + " > *|href");
+ var resourcetype_query = response_query + " > *|propstat > *|prop > *|resourcetype";
+ var resourcetype_element = response.querySelector(resourcetype_query);
+ var displayname_element = response.querySelector(response_query + " > *|propstat > *|prop > *|displayname");
+ var calendarcolor_element = response.querySelector(response_query + " > *|propstat > *|prop > *|calendar-color");
+ var addressbookcolor_element = response.querySelector(response_query + " > *|propstat > *|prop > *|addressbook-color");
+ var calendardesc_element = response.querySelector(response_query + " > *|propstat > *|prop > *|calendar-description");
+ var addressbookdesc_element = response.querySelector(response_query + " > *|propstat > *|prop > *|addressbook-description");
+ var components_query = response_query + " > *|propstat > *|prop > *|supported-calendar-component-set";
+ var components_element = response.querySelector(components_query);
+ var href = href_element ? href_element.textContent : "";
+ var displayname = displayname_element ? displayname_element.textContent : "";
+ var type = "";
+ var color = "";
+ var description = "";
+ if (resourcetype_element) {
+ if (resourcetype_element.querySelector(resourcetype_query + " > *|addressbook")) {
+ type = CollectionType.ADDRESSBOOK;
+ color = addressbookcolor_element ? addressbookcolor_element.textContent : "";
+ description = addressbookdesc_element ? addressbookdesc_element.textContent : "";
+ } else if (resourcetype_element.querySelector(resourcetype_query + " > *|calendar")) {
+ if (components_element) {
+ if (components_element.querySelector(components_query + " > *|comp[name=VEVENT]")) {
+ type = CollectionType.union(type, CollectionType.CALENDAR);
+ }
+ if (components_element.querySelector(components_query + " > *|comp[name=VJOURNAL]")) {
+ type = CollectionType.union(type, CollectionType.JOURNAL);
+ }
+ if (components_element.querySelector(components_query + " > *|comp[name=VTODO]")) {
+ type = CollectionType.union(type, CollectionType.TASKS);
+ }
+ }
+ color = calendarcolor_element ? calendarcolor_element.textContent : "";
+ description = calendardesc_element ? calendardesc_element.textContent : "";
+ }
+ }
+ var sane_color = color.trim();
+ if (sane_color) {
+ var color_match = COLOR_RE.exec(sane_color);
+ if (color_match) {
+ sane_color = color_match[1];
+ } else {
+ sane_color = "";
+ }
+ }
+ if (href.substr(-1) === "/" && href !== collection.href && type) {
+ collections.push(new Collection(href, type, displayname, description, sane_color));
+ }
+ }
+ collections.sort(function(a, b) {
+ /** @type {string} */ var ca = a.displayname || a.href;
+ /** @type {string} */ var cb = b.displayname || b.href;
+ return ca.localeCompare(cb);
+ });
+ callback(collections, null);
+ } else {
+ callback(null, request.status + " " + request.statusText);
+ }
+ };
+ request.send('' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '' +
+ '');
+ return request;
+}
+
+/**
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection
+ * @param {function(?string)} callback Returns error or null
+ * @return {XMLHttpRequest}
+ */
+function delete_collection(user, password, collection, callback) {
+ var request = new XMLHttpRequest();
+ request.open("DELETE", SERVER + collection.href, true, user, password);
+ request.onreadystatechange = function() {
+ if (request.readyState !== 4) {
+ return;
+ }
+ if (200 <= request.status && request.status < 300) {
+ callback(null);
+ } else {
+ callback(request.status + " " + request.statusText);
+ }
+ };
+ request.send();
+ return request;
+}
+
+/**
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection
+ * @param {boolean} create
+ * @param {function(?string)} callback Returns error or null
+ * @return {XMLHttpRequest}
+ */
+function create_edit_collection(user, password, collection, create, callback) {
+ var request = new XMLHttpRequest();
+ request.open(create ? "MKCOL" : "PROPPATCH", SERVER + collection.href, true, user, password);
+ request.onreadystatechange = function() {
+ if (request.readyState !== 4) {
+ return;
+ }
+ if (200 <= request.status && request.status < 300) {
+ callback(null);
+ } else {
+ callback(request.status + " " + request.statusText);
+ }
+ };
+ var displayname = escape_xml(collection.displayname);
+ var calendar_color = "";
+ var addressbook_color = "";
+ var calendar_description = "";
+ var addressbook_description = "";
+ var resourcetype;
+ var components = "";
+ if (collection.type === CollectionType.ADDRESSBOOK) {
+ addressbook_color = escape_xml(collection.color + (collection.color ? "ff" : ""));
+ addressbook_description = escape_xml(collection.description);
+ resourcetype = '';
+ } else {
+ calendar_color = escape_xml(collection.color + (collection.color ? "ff" : ""));
+ calendar_description = escape_xml(collection.description);
+ resourcetype = '';
+ if (CollectionType.is_subset(CollectionType.CALENDAR, collection.type)) {
+ components += '';
+ }
+ if (CollectionType.is_subset(CollectionType.JOURNAL, collection.type)) {
+ components += '';
+ }
+ if (CollectionType.is_subset(CollectionType.TASKS, collection.type)) {
+ components += '';
+ }
+ }
+ var xml_request = create ? "mkcol" : "propertyupdate";
+ request.send('' +
+ '<' + xml_request + ' xmlns="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav" xmlns:CR="urn:ietf:params:xml:ns:carddav" xmlns:I="http://apple.com/ns/ical/" xmlns:INF="http://inf-it.com/ns/ab/">' +
+ '' +
+ '' +
+ (create ? '' + resourcetype + '' : '') +
+ (components ? '' + components + '' : '') +
+ (displayname ? '' + displayname + '' : '') +
+ (calendar_color ? '' + calendar_color + '' : '') +
+ (addressbook_color ? '' + addressbook_color + '' : '') +
+ (addressbook_description ? '' + addressbook_description + '' : '') +
+ (calendar_description ? '' + calendar_description + '' : '') +
+ '' +
+ '' +
+ (!create ? ('' +
+ '' +
+ (!components ? '' : '') +
+ (!displayname ? '' : '') +
+ (!calendar_color ? '' : '') +
+ (!addressbook_color ? '' : '') +
+ (!addressbook_description ? '' : '') +
+ (!calendar_description ? '' : '') +
+ '' +
+ ''): '') +
+ '' + xml_request + '>');
+ return request;
+}
+
+/**
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection
+ * @param {function(?string)} callback Returns error or null
+ * @return {XMLHttpRequest}
+ */
+function create_collection(user, password, collection, callback) {
+ return create_edit_collection(user, password, collection, true, callback);
+}
+
+/**
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection
+ * @param {function(?string)} callback Returns error or null
+ * @return {XMLHttpRequest}
+ */
+function edit_collection(user, password, collection, callback) {
+ return create_edit_collection(user, password, collection, false, callback);
+}
+
+/**
+ * @interface
+ */
+function Scene() {}
+/**
+ * Scene is on top of stack and visible.
+ */
+Scene.prototype.show = function() {};
+/**
+ * Scene is no longer visible.
+ */
+Scene.prototype.hide = function() {};
+/**
+ * Scene is removed from scene stack.
+ */
+Scene.prototype.release = function() {};
+
+
+/**
+ * @type {Array}
+ */
+var scene_stack = [];
+
+/**
+ * Push scene onto stack.
+ * @param {Scene} scene
+ * @param {boolean} replace Replace the scene on top of the stack.
+ */
+function push_scene(scene, replace) {
+ if (scene_stack.length >= 1) {
+ scene_stack[scene_stack.length - 1].hide();
+ if (replace) {
+ scene_stack.pop().release();
+ }
+ }
+ scene_stack.push(scene);
+ scene.show();
+}
+
+/**
+ * Remove scenes from stack.
+ * @param {number} index New top of stack
+ */
+function pop_scene(index) {
+ if (scene_stack.length - 1 <= index) {
+ return;
+ }
+ scene_stack[scene_stack.length - 1].hide();
+ while (scene_stack.length - 1 > index) {
+ var old_length = scene_stack.length;
+ scene_stack.pop().release();
+ if (old_length - 1 === index + 1) {
+ break;
+ }
+ }
+ if (scene_stack.length >= 1) {
+ var scene = scene_stack[scene_stack.length - 1];
+ scene.show();
+ } else {
+ throw "Scene stack is empty";
+ }
+}
+
+/**
+ * @constructor
+ * @implements {Scene}
+ */
+function LoginScene() {
+ var html_scene = document.getElementById("loginscene");
+ var form = html_scene.querySelector("[name=form]");
+ var user_form = html_scene.querySelector("[name=user]");
+ var password_form = html_scene.querySelector("[name=password]");
+ var error_form = html_scene.querySelector("[name=error]");
+ var logout_view = document.getElementById("logoutview");
+ var logout_user_form = logout_view.querySelector("[name=user]");
+ var logout_btn = logout_view.querySelector("[name=link]");
+ var first_show = true;
+
+ /** @type {?number} */ var scene_index = null;
+ var user = "";
+ var error = "";
+ /** @type {?XMLHttpRequest} */ var principal_req = null;
+
+ function read_form() {
+ user = user_form.value;
+ }
+
+ function fill_form() {
+ user_form.value = user;
+ password_form.value = "";
+ error_form.textContent = error ? "Error: " + error : "";
+ }
+
+ function onlogin() {
+ try {
+ read_form();
+ var password = password_form.value;
+ if (user) {
+ error = "";
+ // setup logout
+ logout_view.style.display = "block";
+ logout_btn.onclick = onlogout;
+ logout_user_form.textContent = user;
+ // Fetch principal
+ var loading_scene = new LoadingScene();
+ push_scene(loading_scene, false);
+ principal_req = get_principal(user, password, function(collection, error1) {
+ if (scene_index === null) {
+ return;
+ }
+ principal_req = null;
+ if (error1) {
+ error = error1;
+ pop_scene(scene_index);
+ } else {
+ // show collections
+ var saved_user = user;
+ user = "";
+ if (typeof(sessionStorage) !== "undefined") {
+ sessionStorage.setItem("radicale_user", saved_user);
+ sessionStorage.setItem("radicale_password", password);
+ }
+ var collections_scene = new CollectionsScene(
+ saved_user, password, collection, function(error1) {
+ error = error1;
+ user = saved_user;
+ });
+ push_scene(collections_scene, true);
+ }
+ });
+ } else {
+ error = "Username is empty";
+ fill_form();
+ }
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ function onlogout() {
+ try {
+ if (scene_index === null) {
+ return false;
+ }
+ user = "";
+ pop_scene(scene_index);
+ } catch (err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ this.show = function() {
+ var saved_first_show = first_show;
+ first_show = false;
+ this.release();
+ fill_form();
+ form.onsubmit = onlogin;
+ html_scene.style.display = "block";
+ user_form.focus();
+ scene_index = scene_stack.length - 1;
+ if (typeof(sessionStorage) !== "undefined") {
+ if (saved_first_show && sessionStorage.getItem("radicale_user")) {
+ user_form.value = sessionStorage.getItem("radicale_user");
+ password_form.value = sessionStorage.getItem("radicale_password");
+ onlogin();
+ } else {
+ sessionStorage.setItem("radicale_user", "");
+ sessionStorage.setItem("radicale_password", "");
+ }
+ }
+ };
+ this.hide = function() {
+ read_form();
+ html_scene.style.display = "none";
+ form.onsubmit = null;
+ };
+ this.release = function() {
+ scene_index = null;
+ // cancel pending requests
+ if (principal_req !== null) {
+ principal_req.abort();
+ principal_req = null;
+ }
+ // remove logout
+ logout_view.style.display = "none";
+ logout_btn.onclick = null;
+ logout_user_form.textContent = "";
+ };
+}
+
+/**
+ * @constructor
+ * @implements {Scene}
+ */
+function LoadingScene() {
+ var html_scene = document.getElementById("loadingscene");
+ this.show = function() {
+ html_scene.style.display = "block";
+ };
+ this.hide = function() {
+ html_scene.style.display = "none";
+ };
+ this.release = function() {};
+}
+
+/**
+ * @constructor
+ * @implements {Scene}
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection The principal collection.
+ * @param {function(string)} onerror Called when an error occurs, before the
+ * scene is popped.
+ */
+function CollectionsScene(user, password, collection, onerror) {
+ var html_scene = document.getElementById("collectionsscene");
+ var template = html_scene.querySelector("[name=collectiontemplate]");
+ var new_btn = html_scene.querySelector("[name=new]");
+
+ /** @type {?number} */ var scene_index = null;
+ var saved_template_display = null;
+ /** @type {?XMLHttpRequest} */ var collections_req = null;
+ var timer = null;
+ var from_update = false;
+ /** @type {?Array} */ var collections = null;
+ /** @type {Array} */ var nodes = [];
+
+ function onnew() {
+ try {
+ var create_collection_scene = new CreateEditCollectionScene(user, password, collection);
+ push_scene(create_collection_scene, false);
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ function onedit(collection) {
+ try {
+ var edit_collection_scene = new CreateEditCollectionScene(user, password, collection);
+ push_scene(edit_collection_scene, false);
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ function ondelete(collection) {
+ try {
+ var delete_collection_scene = new DeleteCollectionScene(user, password, collection);
+ push_scene(delete_collection_scene, false);
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ function show_collections(collections) {
+ nodes.forEach(function(node) {
+ template.parentNode.removeChild(node);
+ });
+ nodes = [];
+ collections.forEach(function (collection) {
+ var node = template.cloneNode(true);
+ var title_form = node.querySelector("[name=title]");
+ var description_form = node.querySelector("[name=description]");
+ var url_form = node.querySelector("[name=url]");
+ var color_form = node.querySelector("[name=color]");
+ var delete_btn = node.querySelector("[name=delete]");
+ var edit_btn = node.querySelector("[name=edit]");
+ if (collection.color) {
+ color_form.style.color = collection.color;
+ } else {
+ color_form.style.display = "none";
+ }
+ var possible_types = [CollectionType.ADDRESSBOOK];
+ [CollectionType.CALENDAR, ""].forEach(function(e) {
+ [CollectionType.union(e, CollectionType.JOURNAL), e].forEach(function(e) {
+ [CollectionType.union(e, CollectionType.TASKS), e].forEach(function(e) {
+ if (e) {
+ possible_types.push(e);
+ }
+ });
+ });
+ });
+ possible_types.forEach(function(e) {
+ if (e !== collection.type) {
+ node.querySelector("[name=" + e + "]").style.display = "none";
+ }
+ });
+ title_form.textContent = collection.displayname || collection.href;
+ description_form.textContent = collection.description;
+ var href = SERVER + collection.href;
+ url_form.href = href;
+ url_form.textContent = href;
+ delete_btn.onclick = function(ev) {return ondelete(collection);};
+ edit_btn.onclick = function(ev) {return onedit(collection);};
+ node.style.display = saved_template_display;
+ nodes.push(node);
+ template.parentNode.insertBefore(node, template);
+ });
+ }
+
+ function update() {
+ if (collections === null) {
+ var loading_scene = new LoadingScene();
+ push_scene(loading_scene, false);
+ }
+ collections_req = get_collections(user, password, collection, function(collections1, error) {
+ if (scene_index === null) {
+ return;
+ }
+ collections_req = null;
+ if (error) {
+ onerror(error);
+ pop_scene(scene_index - 1);
+ } else {
+ var old_collections = collections;
+ collections = collections1;
+ if (UPDATE_INTERVAL !== null) {
+ timer = window.setTimeout(update, UPDATE_INTERVAL);
+ }
+ from_update = true;
+ if (old_collections === null) {
+ pop_scene(scene_index);
+ } else {
+ show_collections(collections);
+ }
+ }
+ });
+ }
+
+ this.show = function() {
+ saved_template_display = template.style.display;
+ template.style.display = "none";
+ html_scene.style.display = "block";
+ new_btn.onclick = onnew;
+ if (scene_index === null) {
+ scene_index = scene_stack.length - 1;
+ if (collections === null && collections_req !== null) {
+ pop_scene(scene_index - 1);
+ return;
+ }
+ update();
+ } else if (collections === null) {
+ pop_scene(scene_index - 1);
+ } else {
+ if (from_update) {
+ show_collections(collections);
+ } else {
+ collections = null;
+ update();
+ }
+ }
+ };
+ this.hide = function() {
+ html_scene.style.display = "none";
+ template.style.display = saved_template_display;
+ new_btn.onclick = null;
+ if (timer !== null) {
+ window.clearTimeout(timer);
+ timer = null;
+ }
+ from_update = false;
+ if (collections !== null && collections_req !== null) {
+ collections_req.abort();
+ collections_req = null;
+ }
+ show_collections([]);
+ };
+ this.release = function() {
+ scene_index = null;
+ if (collections_req !== null) {
+ collections_req.abort();
+ collections_req = null;
+ }
+ };
+}
+
+/**
+ * @constructor
+ * @implements {Scene}
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection
+ */
+function DeleteCollectionScene(user, password, collection) {
+ var html_scene = document.getElementById("deletecollectionscene");
+ var title_form = html_scene.querySelector("[name=title]");
+ var error_form = html_scene.querySelector("[name=error]");
+ var delete_btn = html_scene.querySelector("[name=delete]");
+ var cancel_btn = html_scene.querySelector("[name=cancel]");
+ var no_btn = html_scene.querySelector("[name=no]");
+
+ /** @type {?number} */ var scene_index = null;
+ /** @type {?XMLHttpRequest} */ var delete_req = null;
+ var error = "";
+
+ function ondelete() {
+ try {
+ var loading_scene = new LoadingScene();
+ push_scene(loading_scene);
+ delete_req = delete_collection(user, password, collection, function(error1) {
+ if (scene_index === null) {
+ return;
+ }
+ delete_req = null;
+ if (error1) {
+ error = error1;
+ pop_scene(scene_index);
+ } else {
+ pop_scene(scene_index - 1);
+ }
+ });
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ function oncancel() {
+ try {
+ pop_scene(scene_index - 1);
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ this.show = function() {
+ this.release();
+ scene_index = scene_stack.length - 1;
+ html_scene.style.display = "block";
+ title_form.textContent = collection.displayname || collection.href;
+ error_form.textContent = error ? "Error: " + error : "";
+ delete_btn.onclick = ondelete;
+ cancel_btn.onclick = oncancel;
+ };
+ this.hide = function() {
+ html_scene.style.display = "none";
+ cancel_btn.onclick = null;
+ delete_btn.onclick = null;
+ };
+ this.release = function() {
+ scene_index = null;
+ if (delete_req !== null) {
+ delete_req.abort();
+ delete_req = null;
+ }
+ };
+}
+
+/**
+ * Generate random hex number.
+ * @param {number} length
+ * @return {string}
+ */
+function randHex(length) {
+ var s = Math.floor(Math.random() * Math.pow(16, length)).toString(16);
+ while (s.length < length) {
+ s = "0" + s;
+ }
+ return s;
+}
+
+/**
+ * @constructor
+ * @implements {Scene}
+ * @param {string} user
+ * @param {string} password
+ * @param {Collection} collection if it's a principal collection, a new
+ * collection will be created inside of it.
+ * Otherwise the collection will be edited.
+ */
+function CreateEditCollectionScene(user, password, collection) {
+ var edit = collection.type !== CollectionType.PRINCIPAL;
+ var html_scene = document.getElementById(edit ? "editcollectionscene" : "createcollectionscene");
+ var title_form = edit ? html_scene.querySelector("[name=title]") : null;
+ var error_form = html_scene.querySelector("[name=error]");
+ var displayname_form = html_scene.querySelector("[name=displayname]");
+ var description_form = html_scene.querySelector("[name=description]");
+ var type_form = html_scene.querySelector("[name=type]");
+ var color_form = html_scene.querySelector("[name=color]");
+ var submit_btn = html_scene.querySelector("[name=submit]");
+ var cancel_btn = html_scene.querySelector("[name=cancel]");
+
+ /** @type {?number} */ var scene_index = null;
+ /** @type {?XMLHttpRequest} */ var create_edit_req = null;
+ var error = "";
+ /** @type {?Element} */ var saved_type_form = null;
+
+ var href = edit ? collection.href : (
+ collection.href + randHex(8) + "-" + randHex(4) + "-" + randHex(4) +
+ "-" + randHex(4) + "-" + randHex(12) + "/");
+ var displayname = edit ? collection.displayname : "";
+ var description = edit ? collection.description : "";
+ var type = edit ? collection.type : CollectionType.CALENDAR_JOURNAL_TASKS;
+ var color = edit && collection.color ? collection.color : "#" + randHex(6);
+
+ function remove_invalid_types() {
+ if (!edit) {
+ return;
+ }
+ /** @type {HTMLOptionsCollection} */ var options = type_form.options;
+ // remove all options that are not supersets
+ var i;
+ for (i = options.length - 1; i >= 0; i--) {
+ if (!CollectionType.is_subset(type, options[i].value)) {
+ options.remove(i);
+ }
+ }
+ }
+
+ function read_form() {
+ displayname = displayname_form.value;
+ description = description_form.value;
+ type = type_form.value;
+ color = color_form.value;
+ }
+
+ function fill_form() {
+ displayname_form.value = displayname;
+ description_form.value = description;
+ type_form.value = type;
+ color_form.value = color;
+ error_form.textContent = error ? "Error: " + error : "";
+ }
+
+ function onsubmit() {
+ try {
+ read_form();
+ var sane_color = color.trim();
+ if (sane_color) {
+ var color_match = COLOR_RE.exec(sane_color);
+ if (!color_match) {
+ error = "Invalid color";
+ fill_form();
+ return false;
+ }
+ sane_color = color_match[1];
+ }
+ var loading_scene = new LoadingScene();
+ push_scene(loading_scene);
+ var collection = new Collection(href, type, displayname, description, sane_color);
+ var callback = function(error1) {
+ if (scene_index === null) {
+ return;
+ }
+ create_edit_req = null;
+ if (error1) {
+ error = error1;
+ pop_scene(scene_index);
+ } else {
+ pop_scene(scene_index - 1);
+ }
+ };
+ if (edit) {
+ create_edit_req = edit_collection(user, password, collection, callback);
+ } else {
+ create_edit_req = create_collection(user, password, collection, callback);
+ }
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ function oncancel() {
+ try {
+ pop_scene(scene_index - 1);
+ } catch(err) {
+ console.error(err);
+ }
+ return false;
+ }
+
+ this.show = function() {
+ this.release();
+ scene_index = scene_stack.length - 1;
+ // Clone type_form because it's impossible to hide options without removing them
+ saved_type_form = type_form;
+ type_form = type_form.cloneNode(true);
+ saved_type_form.parentNode.replaceChild(type_form, saved_type_form);
+ remove_invalid_types();
+ html_scene.style.display = "block";
+ if (edit) {
+ title_form.textContent = collection.displayname || collection.href;
+ }
+ fill_form();
+ submit_btn.onclick = onsubmit;
+ cancel_btn.onclick = oncancel;
+ };
+ this.hide = function() {
+ read_form();
+ html_scene.style.display = "none";
+ // restore type_form
+ type_form.parentNode.replaceChild(saved_type_form, type_form);
+ type_form = saved_type_form;
+ saved_type_form = null;
+ submit_btn.onclick = null;
+ cancel_btn.onclick = null;
+ };
+ this.release = function() {
+ scene_index = null;
+ if (create_edit_req !== null) {
+ create_edit_req.abort();
+ create_edit_req = null;
+ }
+ };
+}
+
+function main() {
+ push_scene(new LoginScene(), false);
+}
+
+window.addEventListener("load", main);
diff --git a/radicale/web/index.html b/radicale/web/index.html
new file mode 100644
index 0000000..377e74c
--- /dev/null
+++ b/radicale/web/index.html
@@ -0,0 +1,105 @@
+
+
+
+
+
+
+
+ Web interface for Radicale
+
+
+
+
+
+
+
+ Loading
+ Please wait...
+
+
+ Collections
+ Create new addressbook or calendar
+
+ █ Title [addressbookcalendar, journal and taskscalendar and journalcalendar and tasksjournal and taskscalendarjournaltasks]
+ Description
+
+
+
+
+ Edit collection
+ Edit title:
+
+
+
+ Create new collection
+
+
+
+ Delete collection
+ Delete title?
+
+
+
+
+
diff --git a/radicale/xmlutils.py b/radicale/xmlutils.py
new file mode 100644
index 0000000..b096bcd
--- /dev/null
+++ b/radicale/xmlutils.py
@@ -0,0 +1,1324 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2008 Nicolas Kandel
+# Copyright © 2008 Pascal Halter
+# Copyright © 2008-2015 Guillaume Ayoub
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale. If not, see .
+
+"""
+XML and iCal requests manager.
+
+Note that all these functions need to receive unicode objects for full
+iCal requests (PUT) and string objects with charset correctly defined
+in them for XML requests (all but PUT).
+
+"""
+
+import copy
+import math
+import posixpath
+import re
+import xml.etree.ElementTree as ET
+from collections import OrderedDict
+from datetime import date, datetime, timedelta, timezone
+from http import client
+from itertools import chain
+from urllib.parse import quote, unquote, urlparse
+
+from . import storage
+
+MIMETYPES = {
+ "VADDRESSBOOK": "text/vcard",
+ "VCALENDAR": "text/calendar"}
+
+OBJECT_MIMETYPES = {
+ "VCARD": "text/vcard",
+ "VLIST": "text/x-vlist",
+ "VCALENDAR": "text/calendar"}
+
+NAMESPACES = {
+ "C": "urn:ietf:params:xml:ns:caldav",
+ "CR": "urn:ietf:params:xml:ns:carddav",
+ "D": "DAV:",
+ "CS": "http://calendarserver.org/ns/",
+ "ICAL": "http://apple.com/ns/ical/",
+ "ME": "http://me.com/_namespace/",
+ "RADICALE": "http://radicale.org/ns/"}
+
+NAMESPACES_REV = {}
+for short, url in NAMESPACES.items():
+ NAMESPACES_REV[url] = short
+ ET.register_namespace("" if short == "D" else short, url)
+
+CLARK_TAG_REGEX = re.compile(r"{(?P[^}]*)}(?P.*)", re.VERBOSE)
+HUMAN_REGEX = re.compile(r"(?P[^:{}]*):(?P.*)", re.VERBOSE)
+
+DAY = timedelta(days=1)
+SECOND = timedelta(seconds=1)
+DATETIME_MIN = datetime.min.replace(tzinfo=timezone.utc)
+DATETIME_MAX = datetime.max.replace(tzinfo=timezone.utc)
+TIMESTAMP_MIN = math.floor(DATETIME_MIN.timestamp())
+TIMESTAMP_MAX = math.ceil(DATETIME_MAX.timestamp())
+
+
+def pretty_xml(element, level=0):
+ """Indent an ElementTree ``element`` and its children."""
+ if not level:
+ element = copy.deepcopy(element)
+ i = "\n" + level * " "
+ if len(element):
+ if not element.text or not element.text.strip():
+ element.text = i + " "
+ if not element.tail or not element.tail.strip():
+ element.tail = i
+ for sub_element in element:
+ pretty_xml(sub_element, level + 1)
+ if not sub_element.tail or not sub_element.tail.strip():
+ sub_element.tail = i
+ else:
+ if level and (not element.tail or not element.tail.strip()):
+ element.tail = i
+ if not level:
+ return '\n%s' % ET.tostring(element, "unicode")
+
+
+def _tag(short_name, local):
+ """Get XML Clark notation {uri(``short_name``)}``local``."""
+ return "{%s}%s" % (NAMESPACES[short_name], local)
+
+
+def _tag_from_clark(name):
+ """Get a human-readable variant of the XML Clark notation tag ``name``.
+
+ For a given name using the XML Clark notation, return a human-readable
+ variant of the tag name for known namespaces. Otherwise, return the name as
+ is.
+
+ """
+ match = CLARK_TAG_REGEX.match(name)
+ if match and match.group("namespace") in NAMESPACES_REV:
+ args = {
+ "ns": NAMESPACES_REV[match.group("namespace")],
+ "tag": match.group("tag")}
+ return "%(ns)s:%(tag)s" % args
+ return name
+
+
+def _tag_from_human(name):
+ """Get an XML Clark notation tag from human-readable variant ``name``."""
+ match = HUMAN_REGEX.match(name)
+ if match and match.group("namespace") in NAMESPACES:
+ return _tag(match.group("namespace"), match.group("tag"))
+ return name
+
+
+def _response(code):
+ """Return full W3C names from HTTP status codes."""
+ return "HTTP/1.1 %i %s" % (code, client.responses[code])
+
+
+def _href(base_prefix, href):
+ """Return prefixed href."""
+ return quote("%s%s" % (base_prefix, href))
+
+
+def _webdav_error(namespace, name):
+ """Generate XML error message."""
+ root = ET.Element(_tag("D", "error"))
+ root.append(ET.Element(_tag(namespace, name)))
+ return root
+
+
+def _date_to_datetime(date_):
+ """Transform a date to a UTC datetime.
+
+ If date_ is a datetime without timezone, return as UTC datetime. If date_
+ is already a datetime with timezone, return as is.
+
+ """
+ if not isinstance(date_, datetime):
+ date_ = datetime.combine(date_, datetime.min.time())
+ if not date_.tzinfo:
+ date_ = date_.replace(tzinfo=timezone.utc)
+ return date_
+
+
+def _comp_match(item, filter_, level=0):
+ """Check whether the ``item`` matches the comp ``filter_``.
+
+ If ``level`` is ``0``, the filter is applied on the
+ item's collection. Otherwise, it's applied on the item.
+
+ See rfc4791-9.7.1.
+
+ """
+
+ # TODO: Filtering VALARM and VFREEBUSY is not implemented
+ # HACK: the filters are tested separately against all components
+
+ if level == 0:
+ tag = item.name
+ elif level == 1:
+ tag = item.component_name
+ else:
+ item.collection.logger.warning(
+ "Filters with three levels of comp-filter are not supported")
+ return True
+ if not tag:
+ return False
+ name = filter_.get("name").upper()
+ if len(filter_) == 0:
+ # Point #1 of rfc4791-9.7.1
+ return name == tag
+ if len(filter_) == 1:
+ if filter_[0].tag == _tag("C", "is-not-defined"):
+ # Point #2 of rfc4791-9.7.1
+ return name != tag
+ if name != tag:
+ return False
+ if (level == 0 and name != "VCALENDAR" or
+ level == 1 and name not in ("VTODO", "VEVENT", "VJOURNAL")):
+ item.collection.logger.warning("Filtering %s is not supported" % name)
+ return True
+ # Point #3 and #4 of rfc4791-9.7.1
+ components = ([item.item] if level == 0
+ else list(getattr(item, "%s_list" % tag.lower())))
+ for child in filter_:
+ if child.tag == _tag("C", "prop-filter"):
+ if not any(_prop_match(comp, child, "C")
+ for comp in components):
+ return False
+ elif child.tag == _tag("C", "time-range"):
+ if not _time_range_match(item.item, filter_[0], tag):
+ return False
+ elif child.tag == _tag("C", "comp-filter"):
+ if not _comp_match(item, child, level=level + 1):
+ return False
+ else:
+ raise ValueError("Unexpected %r in comp-filter" % child.tag)
+ return True
+
+
+def _prop_match(vobject_item, filter_, ns):
+ """Check whether the ``item`` matches the prop ``filter_``.
+
+ See rfc4791-9.7.2 and rfc6352-10.5.1.
+
+ """
+ name = filter_.get("name").lower()
+ if len(filter_) == 0:
+ # Point #1 of rfc4791-9.7.2
+ return name in vobject_item.contents
+ if len(filter_) == 1:
+ if filter_[0].tag == _tag("C", "is-not-defined"):
+ # Point #2 of rfc4791-9.7.2
+ return name not in vobject_item.contents
+ if name not in vobject_item.contents:
+ return False
+ # Point #3 and #4 of rfc4791-9.7.2
+ for child in filter_:
+ if ns == "C" and child.tag == _tag("C", "time-range"):
+ if not _time_range_match(vobject_item, child, name):
+ return False
+ elif child.tag == _tag(ns, "text-match"):
+ if not _text_match(vobject_item, child, name, ns):
+ return False
+ elif child.tag == _tag(ns, "param-filter"):
+ if not _param_filter_match(vobject_item, child, name, ns):
+ return False
+ else:
+ raise ValueError("Unexpected %r in prop-filter" % child.tag)
+ return True
+
+
+def _time_range_match(vobject_item, filter_, child_name):
+ """Check whether the component/property ``child_name`` of
+ ``vobject_item`` matches the time-range ``filter_``."""
+
+ start = filter_.get("start")
+ end = filter_.get("end")
+ if not start and not end:
+ return False
+ if start:
+ start = datetime.strptime(start, "%Y%m%dT%H%M%SZ")
+ else:
+ start = datetime.min
+ if end:
+ end = datetime.strptime(end, "%Y%m%dT%H%M%SZ")
+ else:
+ end = datetime.max
+ start = start.replace(tzinfo=timezone.utc)
+ end = end.replace(tzinfo=timezone.utc)
+
+ matched = False
+
+ def range_fn(range_start, range_end, is_recurrence):
+ nonlocal matched
+ if start < range_end and range_start < end:
+ matched = True
+ return True
+ if end < range_start and not is_recurrence:
+ return True
+ return False
+
+ def infinity_fn(start):
+ return False
+
+ _visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn)
+ return matched
+
+
+def _visit_time_ranges(vobject_item, child_name, range_fn, infinity_fn):
+ """Visit all time ranges in the component/property ``child_name`` of
+ `vobject_item`` with visitors ``range_fn`` and ``infinity_fn``.
+
+ ``range_fn`` gets called for every time_range with ``start`` and ``end``
+ datetimes and ``is_recurrence`` as arguments. If the function returns True,
+ the operation is cancelled.
+
+ ``infinity_fn`` gets called when an infiite recurrence rule is detected
+ with ``start`` datetime as argument. If the function returns True, the
+ operation is cancelled.
+
+ See rfc4791-9.9.
+
+ """
+
+ # HACK: According to rfc5545-3.8.4.4 an recurrance that is resheduled
+ # with Recurrence ID affects the recurrence itself and all following
+ # recurrences too. This is not respected and client don't seem to bother
+ # either.
+
+ def getrruleset(child, ignore=()):
+ if (hasattr(child, "rrule") and
+ ";UNTIL=" not in child.rrule.value.upper() and
+ ";COUNT=" not in child.rrule.value.upper()):
+ for dtstart in child.getrruleset(addRDate=True):
+ if dtstart in ignore:
+ continue
+ if infinity_fn(_date_to_datetime(dtstart)):
+ return (), True
+ break
+ return filter(lambda dtstart: dtstart not in ignore,
+ child.getrruleset(addRDate=True)), False
+
+ def get_children(components):
+ main = None
+ recurrences = []
+ for comp in components:
+ if hasattr(comp, "recurrence_id") and comp.recurrence_id.value:
+ recurrences.append(comp.recurrence_id.value)
+ if comp.rruleset:
+ # Prevent possible infinite loop
+ raise ValueError("Overwritten recurrence with RRULESET")
+ yield comp, True, ()
+ else:
+ if main is not None:
+ raise ValueError("Multiple main components")
+ main = comp
+ if main is None:
+ raise ValueError("Main component missing")
+ yield main, False, recurrences
+
+ # Comments give the lines in the tables of the specification
+ if child_name == "VEVENT":
+ for child, is_recurrence, recurrences in get_children(
+ vobject_item.vevent_list):
+ # TODO: check if there's a timezone
+ dtstart = child.dtstart.value
+
+ if child.rruleset:
+ dtstarts, infinity = getrruleset(child, recurrences)
+ if infinity:
+ return
+ else:
+ dtstarts = (dtstart,)
+
+ dtend = getattr(child, "dtend", None)
+ if dtend is not None:
+ dtend = dtend.value
+ original_duration = (dtend - dtstart).total_seconds()
+ dtend = _date_to_datetime(dtend)
+
+ duration = getattr(child, "duration", None)
+ if duration is not None:
+ original_duration = duration = duration.value
+
+ for dtstart in dtstarts:
+ dtstart_is_datetime = isinstance(dtstart, datetime)
+ dtstart = _date_to_datetime(dtstart)
+
+ if dtend is not None:
+ # Line 1
+ dtend = dtstart + timedelta(seconds=original_duration)
+ if range_fn(dtstart, dtend, is_recurrence):
+ return
+ elif duration is not None:
+ if original_duration is None:
+ original_duration = duration.seconds
+ if duration.seconds > 0:
+ # Line 2
+ if range_fn(dtstart, dtstart + duration,
+ is_recurrence):
+ return
+ else:
+ # Line 3
+ if range_fn(dtstart, dtstart + SECOND, is_recurrence):
+ return
+ elif dtstart_is_datetime:
+ # Line 4
+ if range_fn(dtstart, dtstart + SECOND, is_recurrence):
+ return
+ else:
+ # Line 5
+ if range_fn(dtstart, dtstart + DAY, is_recurrence):
+ return
+
+ elif child_name == "VTODO":
+ for child, is_recurrence, recurrences in get_children(
+ vobject_item.vtodo_list):
+ dtstart = getattr(child, "dtstart", None)
+ duration = getattr(child, "duration", None)
+ due = getattr(child, "due", None)
+ completed = getattr(child, "completed", None)
+ created = getattr(child, "created", None)
+
+ if dtstart is not None:
+ dtstart = _date_to_datetime(dtstart.value)
+ if duration is not None:
+ duration = duration.value
+ if due is not None:
+ due = _date_to_datetime(due.value)
+ if dtstart is not None:
+ original_duration = (due - dtstart).total_seconds()
+ if completed is not None:
+ completed = _date_to_datetime(completed.value)
+ if created is not None:
+ created = _date_to_datetime(created.value)
+ original_duration = (completed - created).total_seconds()
+ elif created is not None:
+ created = _date_to_datetime(created.value)
+
+ if child.rruleset:
+ reference_dates, infinity = getrruleset(child, recurrences)
+ if infinity:
+ return
+ else:
+ if dtstart is not None:
+ reference_dates = (dtstart,)
+ elif due is not None:
+ reference_dates = (due,)
+ elif completed is not None:
+ reference_dates = (completed,)
+ elif created is not None:
+ reference_dates = (created,)
+ else:
+ # Line 8
+ if range_fn(DATETIME_MIN, DATETIME_MAX, is_recurrence):
+ return
+ reference_dates = ()
+
+ for reference_date in reference_dates:
+ reference_date = _date_to_datetime(reference_date)
+
+ if dtstart is not None and duration is not None:
+ # Line 1
+ if range_fn(reference_date,
+ reference_date + duration + SECOND,
+ is_recurrence):
+ return
+ if range_fn(reference_date + duration - SECOND,
+ reference_date + duration + SECOND,
+ is_recurrence):
+ return
+ elif dtstart is not None and due is not None:
+ # Line 2
+ due = reference_date + timedelta(seconds=original_duration)
+ if (range_fn(reference_date, due, is_recurrence) or
+ range_fn(reference_date,
+ reference_date + SECOND, is_recurrence) or
+ range_fn(due - SECOND, due, is_recurrence) or
+ range_fn(due - SECOND, reference_date + SECOND,
+ is_recurrence)):
+ return
+ elif dtstart is not None:
+ if range_fn(reference_date, reference_date + SECOND,
+ is_recurrence):
+ return
+ elif due is not None:
+ # Line 4
+ if range_fn(reference_date - SECOND, reference_date,
+ is_recurrence):
+ return
+ elif completed is not None and created is not None:
+ # Line 5
+ completed = reference_date + timedelta(
+ seconds=original_duration)
+ if (range_fn(reference_date - SECOND,
+ reference_date + SECOND,
+ is_recurrence) or
+ range_fn(completed - SECOND, completed + SECOND,
+ is_recurrence) or
+ range_fn(reference_date - SECOND,
+ reference_date + SECOND, is_recurrence) or
+ range_fn(completed - SECOND, completed + SECOND,
+ is_recurrence)):
+ return
+ elif completed is not None:
+ # Line 6
+ if range_fn(reference_date - SECOND,
+ reference_date + SECOND, is_recurrence):
+ return
+ elif created is not None:
+ # Line 7
+ if range_fn(reference_date, DATETIME_MAX, is_recurrence):
+ return
+
+ elif child_name == "VJOURNAL":
+ for child, is_recurrence, recurrences in get_children(
+ vobject_item.vjournal_list):
+ dtstart = getattr(child, "dtstart", None)
+
+ if dtstart is not None:
+ dtstart = dtstart.value
+ if child.rruleset:
+ dtstarts, infinity = getrruleset(child, recurrences)
+ if infinity:
+ return
+ else:
+ dtstarts = (dtstart,)
+
+ for dtstart in dtstarts:
+ dtstart_is_datetime = isinstance(dtstart, datetime)
+ dtstart = _date_to_datetime(dtstart)
+
+ if dtstart_is_datetime:
+ # Line 1
+ if range_fn(dtstart, dtstart + SECOND, is_recurrence):
+ return
+ else:
+ # Line 2
+ if range_fn(dtstart, dtstart + DAY, is_recurrence):
+ return
+
+ else:
+ # Match a property
+ child = getattr(vobject_item, child_name.lower())
+ if isinstance(child, date):
+ range_fn(child, child + DAY, False)
+ elif isinstance(child, datetime):
+ range_fn(child, child + SECOND, False)
+
+
+def _text_match(vobject_item, filter_, child_name, ns, attrib_name=None):
+ """Check whether the ``item`` matches the text-match ``filter_``.
+
+ See rfc4791-9.7.5.
+
+ """
+ # TODO: collations are not supported, but the default ones needed
+ # for DAV servers are actually pretty useless. Texts are lowered to
+ # be case-insensitive, almost as the "i;ascii-casemap" value.
+ text = next(filter_.itertext()).lower()
+ match_type = "contains"
+ if ns == "CR":
+ match_type = filter_.get("match-type", match_type)
+
+ def match(value):
+ value = value.lower()
+ if match_type == "equals":
+ return value == text
+ if match_type == "contains":
+ return text in value
+ if match_type == "starts-with":
+ return value.startswith(text)
+ if match_type == "ends-with":
+ return value.endswith(text)
+ raise ValueError("Unexpected text-match match-type: %r" % match_type)
+
+ children = getattr(vobject_item, "%s_list" % child_name, [])
+ if attrib_name:
+ condition = any(
+ match(attrib) for child in children
+ for attrib in child.params.get(attrib_name, []))
+ else:
+ condition = any(match(child.value) for child in children)
+ if filter_.get("negate-condition") == "yes":
+ return not condition
+ else:
+ return condition
+
+
+def _param_filter_match(vobject_item, filter_, parent_name, ns):
+ """Check whether the ``item`` matches the param-filter ``filter_``.
+
+ See rfc4791-9.7.3.
+
+ """
+ name = filter_.get("name").upper()
+ children = getattr(vobject_item, "%s_list" % parent_name, [])
+ condition = any(name in child.params for child in children)
+ if len(filter_):
+ if filter_[0].tag == _tag(ns, "text-match"):
+ return condition and _text_match(
+ vobject_item, filter_[0], parent_name, ns, name)
+ elif filter_[0].tag == _tag(ns, "is-not-defined"):
+ return not condition
+ else:
+ return condition
+
+
+def simplify_prefilters(filters, collection_tag="VCALENDAR"):
+ """Creates a simplified condition from ``filters``.
+
+ Returns a tuple (``tag``, ``start``, ``end``, ``simple``) where ``tag`` is
+ a string or None (match all) and ``start`` and ``end`` are POSIX
+ timestamps (as int). ``simple`` is a bool that indicates that ``filters``
+ and the simplified condition are identical.
+
+ """
+ flat_filters = tuple(chain.from_iterable(filters))
+ simple = len(flat_filters) <= 1
+ for col_filter in flat_filters:
+ if collection_tag != "VCALENDAR":
+ simple = False
+ break
+ if (col_filter.tag != _tag("C", "comp-filter") or
+ col_filter.get("name").upper() != "VCALENDAR"):
+ simple = False
+ continue
+ simple &= len(col_filter) <= 1
+ for comp_filter in col_filter:
+ if comp_filter.tag != _tag("C", "comp-filter"):
+ simple = False
+ continue
+ tag = comp_filter.get("name").upper()
+ if comp_filter.find(_tag("C", "is-not-defined")) is not None:
+ simple = False
+ continue
+ simple &= len(comp_filter) <= 1
+ for time_filter in comp_filter:
+ if tag not in ("VTODO", "VEVENT", "VJOURNAL"):
+ simple = False
+ break
+ if time_filter.tag != _tag("C", "time-range"):
+ simple = False
+ continue
+ start = time_filter.get("start")
+ end = time_filter.get("end")
+ if start:
+ start = math.floor(datetime.strptime(
+ start, "%Y%m%dT%H%M%SZ").replace(
+ tzinfo=timezone.utc).timestamp())
+ else:
+ start = TIMESTAMP_MIN
+ if end:
+ end = math.ceil(datetime.strptime(
+ end, "%Y%m%dT%H%M%SZ").replace(
+ tzinfo=timezone.utc).timestamp())
+ else:
+ end = TIMESTAMP_MAX
+ return tag, start, end, simple
+ return tag, TIMESTAMP_MIN, TIMESTAMP_MAX, simple
+ return None, TIMESTAMP_MIN, TIMESTAMP_MAX, simple
+
+
+def get_content_type(item):
+ """Get the content-type of an item with charset and component parameters.
+ """
+ mimetype = OBJECT_MIMETYPES[item.name]
+ encoding = item.collection.configuration.get("encoding", "request")
+ tag = item.component_name
+ content_type = "%s;charset=%s" % (mimetype, encoding)
+ if tag:
+ content_type += ";component=%s" % tag
+ return content_type
+
+
+def find_tag(vobject_item):
+ """Find component name from ``vobject_item``."""
+ if vobject_item.name == "VCALENDAR":
+ for component in vobject_item.components():
+ if component.name != "VTIMEZONE":
+ return component.name
+ return None
+
+
+def find_tag_and_time_range(vobject_item):
+ """Find component name and enclosing time range from ``vobject item``.
+
+ Returns a tuple (``tag``, ``start``, ``end``) where ``tag`` is a string
+ and ``start`` and ``end`` are POSIX timestamps (as int).
+
+ This is intened to be used for matching against simplified prefilters.
+
+ """
+ tag = find_tag(vobject_item)
+ if not tag:
+ return (None, TIMESTAMP_MIN, TIMESTAMP_MAX)
+ start = end = None
+
+ def range_fn(range_start, range_end, is_recurrence):
+ nonlocal start, end
+ if start is None or range_start < start:
+ start = range_start
+ if end is None or end < range_end:
+ end = range_end
+ return False
+
+ def infinity_fn(range_start):
+ nonlocal start, end
+ if start is None or range_start < start:
+ start = range_start
+ end = DATETIME_MAX
+ return True
+
+ _visit_time_ranges(vobject_item, tag, range_fn, infinity_fn)
+ if start is None:
+ start = DATETIME_MIN
+ if end is None:
+ end = DATETIME_MAX
+ return tag, math.floor(start.timestamp()), math.ceil(end.timestamp())
+
+
+def name_from_path(path, collection):
+ """Return Radicale item name from ``path``."""
+ path = path.strip("/") + "/"
+ start = collection.path + "/"
+ if not path.startswith(start):
+ raise ValueError("%r doesn't start with %r" % (path, start))
+ name = path[len(start):][:-1]
+ if name and not storage.is_safe_path_component(name):
+ raise ValueError("%r is not a component in collection %r" %
+ (name, collection.path))
+ return name
+
+
+def props_from_request(xml_request, actions=("set", "remove")):
+ """Return a list of properties as a dictionary."""
+ result = OrderedDict()
+ if xml_request is None:
+ return result
+
+ for action in actions:
+ action_element = xml_request.find(_tag("D", action))
+ if action_element is not None:
+ break
+ else:
+ action_element = xml_request
+
+ prop_element = action_element.find(_tag("D", "prop"))
+ if prop_element is not None:
+ for prop in prop_element:
+ if prop.tag == _tag("D", "resourcetype"):
+ for resource_type in prop:
+ if resource_type.tag == _tag("C", "calendar"):
+ result["tag"] = "VCALENDAR"
+ break
+ elif resource_type.tag == _tag("CR", "addressbook"):
+ result["tag"] = "VADDRESSBOOK"
+ break
+ elif prop.tag == _tag("C", "supported-calendar-component-set"):
+ result[_tag_from_clark(prop.tag)] = ",".join(
+ supported_comp.attrib["name"]
+ for supported_comp in prop
+ if supported_comp.tag == _tag("C", "comp"))
+ else:
+ result[_tag_from_clark(prop.tag)] = prop.text
+
+ return result
+
+
+def delete(base_prefix, path, collection, href=None):
+ """Read and answer DELETE requests.
+
+ Read rfc4918-9.6 for info.
+
+ """
+ collection.delete(href)
+
+ multistatus = ET.Element(_tag("D", "multistatus"))
+ response = ET.Element(_tag("D", "response"))
+ multistatus.append(response)
+
+ href = ET.Element(_tag("D", "href"))
+ href.text = _href(base_prefix, path)
+ response.append(href)
+
+ status = ET.Element(_tag("D", "status"))
+ status.text = _response(200)
+ response.append(status)
+
+ return multistatus
+
+
+def propfind(base_prefix, path, xml_request, read_collections,
+ write_collections, user):
+ """Read and answer PROPFIND requests.
+
+ Read rfc4918-9.1 for info.
+
+ The collections parameter is a list of collections that are to be included
+ in the output.
+
+ """
+ # A client may choose not to submit a request body. An empty PROPFIND
+ # request body MUST be treated as if it were an 'allprop' request.
+ top_tag = (xml_request[0] if xml_request is not None else
+ ET.Element(_tag("D", "allprop")))
+
+ props = ()
+ allprop = False
+ propname = False
+ if top_tag.tag == _tag("D", "allprop"):
+ allprop = True
+ elif top_tag.tag == _tag("D", "propname"):
+ propname = True
+ elif top_tag.tag == _tag("D", "prop"):
+ props = [prop.tag for prop in top_tag]
+
+ if _tag("D", "current-user-principal") in props and not user:
+ # Ask for authentication
+ # Returning the DAV:unauthenticated pseudo-principal as specified in
+ # RFC 5397 doesn't seem to work with DAVdroid.
+ return client.FORBIDDEN, None
+
+ # Writing answer
+ multistatus = ET.Element(_tag("D", "multistatus"))
+
+ collections = []
+ for collection in write_collections:
+ collections.append(collection)
+ response = _propfind_response(
+ base_prefix, path, collection, props, user, write=True,
+ allprop=allprop, propname=propname)
+ if response:
+ multistatus.append(response)
+ for collection in read_collections:
+ if collection in collections:
+ continue
+ response = _propfind_response(
+ base_prefix, path, collection, props, user, write=False,
+ allprop=allprop, propname=propname)
+ if response:
+ multistatus.append(response)
+
+ return client.MULTI_STATUS, multistatus
+
+
+def _propfind_response(base_prefix, path, item, props, user, write=False,
+ propname=False, allprop=False):
+ """Build and return a PROPFIND response."""
+ if propname and allprop or (props and (propname or allprop)):
+ raise ValueError("Only use one of props, propname and allprops")
+ is_collection = isinstance(item, storage.BaseCollection)
+ if is_collection:
+ is_leaf = item.get_meta("tag") in ("VADDRESSBOOK", "VCALENDAR")
+ collection = item
+ else:
+ collection = item.collection
+
+ response = ET.Element(_tag("D", "response"))
+
+ href = ET.Element(_tag("D", "href"))
+ if is_collection:
+ # Some clients expect collections to end with /
+ uri = "/%s/" % item.path if item.path else "/"
+ else:
+ uri = "/" + posixpath.join(collection.path, item.href)
+
+ href.text = _href(base_prefix, uri)
+ response.append(href)
+
+ propstat404 = ET.Element(_tag("D", "propstat"))
+ propstat200 = ET.Element(_tag("D", "propstat"))
+ response.append(propstat200)
+
+ prop200 = ET.Element(_tag("D", "prop"))
+ propstat200.append(prop200)
+
+ prop404 = ET.Element(_tag("D", "prop"))
+ propstat404.append(prop404)
+
+ if propname or allprop:
+ props = []
+ # Should list all properties that can be retrieved by the code below
+ props.append(_tag("D", "principal-collection-set"))
+ props.append(_tag("D", "current-user-principal"))
+ props.append(_tag("D", "current-user-privilege-set"))
+ props.append(_tag("D", "supported-report-set"))
+ props.append(_tag("D", "resourcetype"))
+ props.append(_tag("D", "owner"))
+
+ if is_collection and collection.is_principal:
+ props.append(_tag("C", "calendar-user-address-set"))
+ props.append(_tag("D", "principal-URL"))
+ props.append(_tag("CR", "addressbook-home-set"))
+ props.append(_tag("C", "calendar-home-set"))
+
+ if not is_collection or is_leaf:
+ props.append(_tag("D", "getetag"))
+ props.append(_tag("D", "getlastmodified"))
+ props.append(_tag("D", "getcontenttype"))
+ props.append(_tag("D", "getcontentlength"))
+
+ if is_collection:
+ if is_leaf:
+ props.append(_tag("D", "displayname"))
+ props.append(_tag("D", "sync-token"))
+ if collection.get_meta("tag") == "VCALENDAR":
+ props.append(_tag("CS", "getctag"))
+ props.append(_tag("C", "supported-calendar-component-set"))
+
+ meta = item.get_meta()
+ for tag in meta:
+ if tag == "tag":
+ continue
+ clark_tag = _tag_from_human(tag)
+ if clark_tag not in props:
+ props.append(clark_tag)
+
+ if propname:
+ for tag in props:
+ prop200.append(ET.Element(tag))
+ props = ()
+
+ for tag in props:
+ element = ET.Element(tag)
+ is404 = False
+ if tag == _tag("D", "getetag"):
+ if not is_collection or is_leaf:
+ element.text = item.etag
+ else:
+ is404 = True
+ elif tag == _tag("D", "getlastmodified"):
+ if not is_collection or is_leaf:
+ element.text = item.last_modified
+ else:
+ is404 = True
+ elif tag == _tag("D", "principal-collection-set"):
+ tag = ET.Element(_tag("D", "href"))
+ tag.text = _href(base_prefix, "/")
+ element.append(tag)
+ elif (tag in (_tag("C", "calendar-user-address-set"),
+ _tag("D", "principal-URL"),
+ _tag("CR", "addressbook-home-set"),
+ _tag("C", "calendar-home-set")) and
+ collection.is_principal and is_collection):
+ tag = ET.Element(_tag("D", "href"))
+ tag.text = _href(base_prefix, path)
+ element.append(tag)
+ elif tag == _tag("C", "supported-calendar-component-set"):
+ human_tag = _tag_from_clark(tag)
+ if is_collection and is_leaf:
+ meta = item.get_meta(human_tag)
+ if meta:
+ components = meta.split(",")
+ else:
+ components = ("VTODO", "VEVENT", "VJOURNAL")
+ for component in components:
+ comp = ET.Element(_tag("C", "comp"))
+ comp.set("name", component)
+ element.append(comp)
+ else:
+ is404 = True
+ elif tag == _tag("D", "current-user-principal"):
+ if user:
+ tag = ET.Element(_tag("D", "href"))
+ tag.text = _href(base_prefix, "/%s/" % user)
+ element.append(tag)
+ else:
+ element.append(ET.Element(_tag("D", "unauthenticated")))
+ elif tag == _tag("D", "current-user-privilege-set"):
+ privileges = [("D", "read")]
+ if write:
+ privileges.append(("D", "all"))
+ privileges.append(("D", "write"))
+ privileges.append(("D", "write-properties"))
+ privileges.append(("D", "write-content"))
+ for ns, privilege_name in privileges:
+ privilege = ET.Element(_tag("D", "privilege"))
+ privilege.append(ET.Element(_tag(ns, privilege_name)))
+ element.append(privilege)
+ elif tag == _tag("D", "supported-report-set"):
+ # These 3 reports are not implemented
+ reports = [
+ ("D", "expand-property"),
+ ("D", "principal-search-property-set"),
+ ("D", "principal-property-search")]
+ if is_collection and is_leaf:
+ reports.append(("D", "sync-collection"))
+ if item.get_meta("tag") == "VADDRESSBOOK":
+ reports.append(("CR", "addressbook-multiget"))
+ reports.append(("CR", "addressbook-query"))
+ elif item.get_meta("tag") == "VCALENDAR":
+ reports.append(("C", "calendar-multiget"))
+ reports.append(("C", "calendar-query"))
+ for ns, report_name in reports:
+ supported = ET.Element(_tag("D", "supported-report"))
+ report_tag = ET.Element(_tag("D", "report"))
+ supported_report_tag = ET.Element(_tag(ns, report_name))
+ report_tag.append(supported_report_tag)
+ supported.append(report_tag)
+ element.append(supported)
+ elif tag == _tag("D", "getcontentlength"):
+ if not is_collection or is_leaf:
+ encoding = collection.configuration.get("encoding", "request")
+ element.text = str(len(item.serialize().encode(encoding)))
+ else:
+ is404 = True
+ elif tag == _tag("D", "owner"):
+ # return empty elment, if no owner available (rfc3744-5.1)
+ if collection.owner:
+ tag = ET.Element(_tag("D", "href"))
+ tag.text = _href(base_prefix, "/%s/" % collection.owner)
+ element.append(tag)
+ elif is_collection:
+ if tag == _tag("D", "getcontenttype"):
+ if is_leaf:
+ element.text = MIMETYPES[item.get_meta("tag")]
+ else:
+ is404 = True
+ elif tag == _tag("D", "resourcetype"):
+ if item.is_principal:
+ tag = ET.Element(_tag("D", "principal"))
+ element.append(tag)
+ if is_leaf:
+ if item.get_meta("tag") == "VADDRESSBOOK":
+ tag = ET.Element(_tag("CR", "addressbook"))
+ element.append(tag)
+ elif item.get_meta("tag") == "VCALENDAR":
+ tag = ET.Element(_tag("C", "calendar"))
+ element.append(tag)
+ tag = ET.Element(_tag("D", "collection"))
+ element.append(tag)
+ elif tag == _tag("RADICALE", "displayname"):
+ # Only for internal use by the web interface
+ displayname = item.get_meta("D:displayname")
+ if displayname is not None:
+ element.text = displayname
+ else:
+ is404 = True
+ elif tag == _tag("D", "displayname"):
+ displayname = item.get_meta("D:displayname")
+ if not displayname and is_leaf:
+ displayname = item.path
+ if displayname is not None:
+ element.text = displayname
+ else:
+ is404 = True
+ elif tag == _tag("CS", "getctag"):
+ if is_leaf:
+ element.text = item.etag
+ else:
+ is404 = True
+ elif tag == _tag("D", "sync-token"):
+ if is_leaf:
+ element.text, _ = item.sync()
+ else:
+ is404 = True
+ else:
+ human_tag = _tag_from_clark(tag)
+ meta = item.get_meta(human_tag)
+ if meta is not None:
+ element.text = meta
+ else:
+ is404 = True
+ # Not for collections
+ elif tag == _tag("D", "getcontenttype"):
+ element.text = get_content_type(item)
+ elif tag == _tag("D", "resourcetype"):
+ # resourcetype must be returned empty for non-collection elements
+ pass
+ else:
+ is404 = True
+
+ if is404:
+ prop404.append(element)
+ else:
+ prop200.append(element)
+
+ status200 = ET.Element(_tag("D", "status"))
+ status200.text = _response(200)
+ propstat200.append(status200)
+
+ status404 = ET.Element(_tag("D", "status"))
+ status404.text = _response(404)
+ propstat404.append(status404)
+ if len(prop404):
+ response.append(propstat404)
+
+ return response
+
+
+def _add_propstat_to(element, tag, status_number):
+ """Add a PROPSTAT response structure to an element.
+
+ The PROPSTAT answer structure is defined in rfc4918-9.1. It is added to the
+ given ``element``, for the following ``tag`` with the given
+ ``status_number``.
+
+ """
+ propstat = ET.Element(_tag("D", "propstat"))
+ element.append(propstat)
+
+ prop = ET.Element(_tag("D", "prop"))
+ propstat.append(prop)
+
+ clark_tag = tag if "{" in tag else _tag(*tag.split(":", 1))
+ prop_tag = ET.Element(clark_tag)
+ prop.append(prop_tag)
+
+ status = ET.Element(_tag("D", "status"))
+ status.text = _response(status_number)
+ propstat.append(status)
+
+
+def proppatch(base_prefix, path, xml_request, collection):
+ """Read and answer PROPPATCH requests.
+
+ Read rfc4918-9.2 for info.
+
+ """
+ props_to_set = props_from_request(xml_request, actions=("set",))
+ props_to_remove = props_from_request(xml_request, actions=("remove",))
+
+ multistatus = ET.Element(_tag("D", "multistatus"))
+ response = ET.Element(_tag("D", "response"))
+ multistatus.append(response)
+
+ href = ET.Element(_tag("D", "href"))
+ href.text = _href(base_prefix, path)
+ response.append(href)
+
+ new_props = collection.get_meta()
+ for short_name, value in props_to_set.items():
+ new_props[short_name] = value
+ _add_propstat_to(response, short_name, 200)
+ for short_name in props_to_remove:
+ try:
+ del new_props[short_name]
+ except KeyError:
+ pass
+ _add_propstat_to(response, short_name, 200)
+ storage.check_and_sanitize_props(new_props)
+ collection.set_meta_all(new_props)
+
+ return multistatus
+
+
+def report(base_prefix, path, xml_request, collection):
+ """Read and answer REPORT requests.
+
+ Read rfc3253-3.6 for info.
+
+ """
+ logger = collection.logger
+ multistatus = ET.Element(_tag("D", "multistatus"))
+ if xml_request is None:
+ return client.MULTI_STATUS, multistatus
+ root = xml_request
+ if root.tag in (
+ _tag("D", "principal-search-property-set"),
+ _tag("D", "principal-property-search"),
+ _tag("D", "expand-property")):
+ # We don't support searching for principals or indirect retrieving of
+ # properties, just return an empty result.
+ # InfCloud asks for expand-property reports (even if we don't announce
+ # support for them) and stops working if an error code is returned.
+ logger.warning("Unsupported REPORT method %r on %r requested",
+ _tag_from_clark(root.tag), path)
+ return client.MULTI_STATUS, multistatus
+ if (root.tag == _tag("C", "calendar-multiget") and
+ collection.get_meta("tag") != "VCALENDAR" or
+ root.tag == _tag("CR", "addressbook-multiget") and
+ collection.get_meta("tag") != "VADDRESSBOOK" or
+ root.tag == _tag("D", "sync-collection") and
+ collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")):
+ logger.warning("Invalid REPORT method %r on %r requested",
+ _tag_from_clark(root.tag), path)
+ return (client.PRECONDITION_FAILED,
+ _webdav_error("D", "supported-report"))
+ prop_element = root.find(_tag("D", "prop"))
+ props = (
+ [prop.tag for prop in prop_element]
+ if prop_element is not None else [])
+
+ if root.tag in (
+ _tag("C", "calendar-multiget"),
+ _tag("CR", "addressbook-multiget")):
+ # Read rfc4791-7.9 for info
+ hreferences = set()
+ for href_element in root.findall(_tag("D", "href")):
+ href_path = storage.sanitize_path(
+ unquote(urlparse(href_element.text).path))
+ if (href_path + "/").startswith(base_prefix + "/"):
+ hreferences.add(href_path[len(base_prefix):])
+ else:
+ logger.warning("Skipping invalid path %r in REPORT request on "
+ "%r", href_path, path)
+ elif root.tag == _tag("D", "sync-collection"):
+ old_sync_token_element = root.find(_tag("D", "sync-token"))
+ old_sync_token = ""
+ if old_sync_token_element is not None and old_sync_token_element.text:
+ old_sync_token = old_sync_token_element.text.strip()
+ logger.debug("Client provided sync token: %r", old_sync_token)
+ try:
+ sync_token, names = collection.sync(old_sync_token)
+ except ValueError as e:
+ # Invalid sync token
+ logger.warning("Client provided invalid sync token %r: %s",
+ old_sync_token, e, exc_info=True)
+ return (client.PRECONDITION_FAILED,
+ _webdav_error("D", "valid-sync-token"))
+ hreferences = ("/" + posixpath.join(collection.path, n) for n in names)
+ # Append current sync token to response
+ sync_token_element = ET.Element(_tag("D", "sync-token"))
+ sync_token_element.text = sync_token
+ multistatus.append(sync_token_element)
+ else:
+ hreferences = (path,)
+ filters = (
+ root.findall("./%s" % _tag("C", "filter")) +
+ root.findall("./%s" % _tag("CR", "filter")))
+
+ def retrieve_items(collection, hreferences, multistatus):
+ """Retrieves all items that are referenced in ``hreferences`` from
+ ``collection`` and adds 404 responses for missing and invalid items
+ to ``multistatus``."""
+ collection_requested = False
+
+ def get_names():
+ """Extracts all names from references in ``hreferences`` and adds
+ 404 responses for invalid references to ``multistatus``.
+ If the whole collections is referenced ``collection_requested``
+ gets set to ``True``."""
+ nonlocal collection_requested
+ for hreference in hreferences:
+ try:
+ name = name_from_path(hreference, collection)
+ except ValueError as e:
+ logger.warning("Skipping invalid path %r in REPORT request"
+ " on %r: %s", hreference, path, e)
+ response = _item_response(base_prefix, hreference,
+ found_item=False)
+ multistatus.append(response)
+ continue
+ if name:
+ # Reference is an item
+ yield name
+ else:
+ # Reference is a collection
+ collection_requested = True
+
+ for name, item in collection.get_multi2(get_names()):
+ if not item:
+ uri = "/" + posixpath.join(collection.path, name)
+ response = _item_response(base_prefix, uri,
+ found_item=False)
+ multistatus.append(response)
+ else:
+ yield item, False
+ if collection_requested:
+ yield from collection.get_all_filtered(filters)
+
+ def match(item, filter_):
+ tag = collection.get_meta("tag")
+ if (tag == "VCALENDAR" and filter_.tag != _tag("C", filter_)):
+ if len(filter_) == 0:
+ return True
+ if len(filter_) > 1:
+ raise ValueError("Filter with %d children" % len(filter_))
+ if filter_[0].tag != _tag("C", "comp-filter"):
+ raise ValueError("Unexpected %r in filter" % filter_[0].tag)
+ return _comp_match(item, filter_[0])
+ if tag == "VADDRESSBOOK" and filter_.tag != _tag("CR", filter_):
+ for child in filter_:
+ if child.tag != _tag("CR", "prop-filter"):
+ raise ValueError("Unexpected %r in filter" % child.tag)
+ test = filter_.get("test", "anyof")
+ if test == "anyof":
+ return any(_prop_match(item.item, f, "CR") for f in filter_)
+ if test == "allof":
+ return all(_prop_match(item.item, f, "CR") for f in filter_)
+ raise ValueError("Unsupported filter test: %r" % test)
+ return all(_prop_match(item.item, f, "CR") for f in filter_)
+ raise ValueError("unsupported filter %r for %r" % (filter_.tag, tag))
+
+ for item, filters_matched in retrieve_items(collection, hreferences,
+ multistatus):
+ if filters and not filters_matched:
+ try:
+ if not all(match(item, filter_) for filter_ in filters):
+ continue
+ except ValueError as e:
+ raise ValueError("Failed to filter item %r from %r: %s" %
+ (item.href, collection.path, e)) from e
+ except Exception as e:
+ raise RuntimeError("Failed to filter item %r from %r: %s" %
+ (item.href, collection.path, e)) from e
+
+ found_props = []
+ not_found_props = []
+
+ for tag in props:
+ element = ET.Element(tag)
+ if tag == _tag("D", "getetag"):
+ element.text = item.etag
+ found_props.append(element)
+ elif tag == _tag("D", "getcontenttype"):
+ element.text = get_content_type(item)
+ found_props.append(element)
+ elif tag in (
+ _tag("C", "calendar-data"),
+ _tag("CR", "address-data")):
+ element.text = item.serialize()
+ found_props.append(element)
+ else:
+ not_found_props.append(element)
+
+ uri = "/" + posixpath.join(collection.path, item.href)
+ multistatus.append(_item_response(
+ base_prefix, uri, found_props=found_props,
+ not_found_props=not_found_props, found_item=True))
+
+ return client.MULTI_STATUS, multistatus
+
+
+def _item_response(base_prefix, href, found_props=(), not_found_props=(),
+ found_item=True):
+ response = ET.Element(_tag("D", "response"))
+
+ href_tag = ET.Element(_tag("D", "href"))
+ href_tag.text = _href(base_prefix, href)
+ response.append(href_tag)
+
+ if found_item:
+ for code, props in ((200, found_props), (404, not_found_props)):
+ if props:
+ propstat = ET.Element(_tag("D", "propstat"))
+ status = ET.Element(_tag("D", "status"))
+ status.text = _response(code)
+ prop_tag = ET.Element(_tag("D", "prop"))
+ for prop in props:
+ prop_tag.append(prop)
+ propstat.append(prop_tag)
+ propstat.append(status)
+ response.append(propstat)
+ else:
+ status = ET.Element(_tag("D", "status"))
+ status.text = _response(404)
+ response.append(status)
+
+ return response