code
string | repo_name
string | path
string | language
string | license
string | size
int64 |
---|---|---|---|---|---|
# This file is distributed under the same license as the Django package.
#
# Translators:
# Machaku <bmachaku@gmail.com>, 2013-2014
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-23 18:54+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Swahili (http://www.transifex.com/django/django/language/"
"sw/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: sw\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
msgid "Sites"
msgstr "Tovuti"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Jina la kikoa haliwezi kuwa na nafasi yeyote kati yake."
msgid "domain name"
msgstr "jina la kikoa"
msgid "display name"
msgstr "jina"
msgid "site"
msgstr "tovuti"
msgid "sites"
msgstr "tovuti"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/sw/LC_MESSAGES/django.po
|
po
|
mit
| 901 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Jannis Leidel <jannis@leidel.info>, 2011
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Tamil (http://www.transifex.com/django/django/language/ta/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: ta\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
msgid "Sites"
msgstr ""
msgid "The domain name cannot contain any spaces or tabs."
msgstr ""
msgid "domain name"
msgstr "களப் பெயர்"
msgid "display name"
msgstr "காட்டும் பெயர்"
msgid "site"
msgstr "வலைத்தளம்"
msgid "sites"
msgstr "வலைத்தளங்கள்"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/ta/LC_MESSAGES/django.po
|
po
|
mit
| 939 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Jannis Leidel <jannis@leidel.info>, 2011
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Telugu (http://www.transifex.com/django/django/language/te/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: te\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
msgid "Sites"
msgstr ""
msgid "The domain name cannot contain any spaces or tabs."
msgstr ""
msgid "domain name"
msgstr "డొమైన్ నామము"
msgid "display name"
msgstr "కనిపిచ్చే పేరు"
msgid "site"
msgstr "సైట్"
msgid "sites"
msgstr "సైట్లు"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/te/LC_MESSAGES/django.po
|
po
|
mit
| 912 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Surush Sufiew <siriusproger@gmail.com>, 2020
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2020-05-15 00:35+0000\n"
"Last-Translator: Surush Sufiew <siriusproger@gmail.com>\n"
"Language-Team: Tajik (http://www.transifex.com/django/django/language/tg/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: tg\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
msgid "Sites"
msgstr "Сомонаҳо"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Номи домен наметавонад аз фосилаҳо ва табулятсия иборат бошад."
msgid "domain name"
msgstr "номи доменӣ"
msgid "display name"
msgstr "номи инъикосшуда"
msgid "site"
msgstr "сомона"
msgid "sites"
msgstr "сомонаҳо"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/tg/LC_MESSAGES/django.po
|
po
|
mit
| 1,026 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Jannis Leidel <jannis@leidel.info>, 2011
# Kowit Charoenratchatabhan <kowit.s.c@gmail.com>, 2013,2018
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2018-05-06 08:37+0000\n"
"Last-Translator: Kowit Charoenratchatabhan <kowit.s.c@gmail.com>\n"
"Language-Team: Thai (http://www.transifex.com/django/django/language/th/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: th\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr "ไซต์"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "ชื่อโดนเมนต้องไม่ไม่ช่องว่างหรือแท็บ"
msgid "domain name"
msgstr "ชื่อโดเมน"
msgid "display name"
msgstr "ชื่อที่แสดง"
msgid "site"
msgstr "ไซต์"
msgid "sites"
msgstr "ไซต์"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/th/LC_MESSAGES/django.po
|
po
|
mit
| 1,077 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Resulkary <resulsaparov@gmail.com>, 2020
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2020-07-06 11:50+0000\n"
"Last-Translator: Resulkary <resulsaparov@gmail.com>\n"
"Language-Team: Turkmen (http://www.transifex.com/django/django/language/"
"tk/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: tk\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
msgid "Sites"
msgstr "Sahypalar"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Domen adynda boşluklar ýa-da goýmalar bolup bilmez."
msgid "domain name"
msgstr "domen ady"
msgid "display name"
msgstr "görkezilýän ady"
msgid "site"
msgstr "sahypa"
msgid "sites"
msgstr "sahypalar"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/tk/LC_MESSAGES/django.po
|
po
|
mit
| 917 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Ahmet Emre Aladağ <emre.aladag@isik.edu.tr>, 2014
# BouRock, 2014
# Caner Başaran <basaran.caner@protonmail.com>, 2013
# Jannis Leidel <jannis@leidel.info>, 2011
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-23 18:54+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Turkish (http://www.transifex.com/django/django/language/"
"tr/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: tr\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
msgid "Sites"
msgstr "Siteler"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Etki alanı adı, herhangi bir boşluk ya da sekme içeremez."
msgid "domain name"
msgstr "etki alanı adı"
msgid "display name"
msgstr "görünen isim"
msgid "site"
msgstr "site"
msgid "sites"
msgstr "siteler"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/tr/LC_MESSAGES/django.po
|
po
|
mit
| 1,043 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Azat Khasanshin <lordofbazuks@gmail.com>, 2011
# v_ildar <v_ildar@bk.ru>, 2014
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Tatar (http://www.transifex.com/django/django/language/tt/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: tt\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr "Сайтлар"
msgid "The domain name cannot contain any spaces or tabs."
msgstr ""
msgid "domain name"
msgstr "домен исеме"
msgid "display name"
msgstr "чагылдырылган исем"
msgid "site"
msgstr "сайт"
msgid "sites"
msgstr "сайтлар"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/tt/LC_MESSAGES/django.po
|
po
|
mit
| 930 |
# This file is distributed under the same license as the Django package.
#
# Translators:
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2014-10-05 20:13+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Udmurt (http://www.transifex.com/projects/p/django/language/"
"udm/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: udm\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr ""
msgid "The domain name cannot contain any spaces or tabs."
msgstr ""
msgid "domain name"
msgstr ""
msgid "display name"
msgstr ""
msgid "site"
msgstr ""
msgid "sites"
msgstr ""
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/udm/LC_MESSAGES/django.po
|
po
|
mit
| 767 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Oleksandr Chernihov <o.chernihov@gmail.com>, 2014
# Jannis Leidel <jannis@leidel.info>, 2011
# Alex Bolotov <oleksandr.bolotov@gmail.com>, 2013
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-23 18:54+0000\n"
"Last-Translator: Mykola Zamkovoi <nickzam@gmail.com>\n"
"Language-Team: Ukrainian (http://www.transifex.com/django/django/language/"
"uk/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: uk\n"
"Plural-Forms: nplurals=4; plural=(n % 1 == 0 && n % 10 == 1 && n % 100 != "
"11 ? 0 : n % 1 == 0 && n % 10 >= 2 && n % 10 <= 4 && (n % 100 < 12 || n % "
"100 > 14) ? 1 : n % 1 == 0 && (n % 10 ==0 || (n % 10 >=5 && n % 10 <=9) || "
"(n % 100 >=11 && n % 100 <=14 )) ? 2: 3);\n"
msgid "Sites"
msgstr "Сайти"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Доменне ім'я не може містити пробіли або символи табуляції."
msgid "domain name"
msgstr "доменне ім'я"
msgid "display name"
msgstr "відображуване ім'я"
msgid "site"
msgstr "сайт"
msgid "sites"
msgstr "сайти"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/uk/LC_MESSAGES/django.po
|
po
|
mit
| 1,341 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Mansoorulhaq Mansoor <mnsrknp@gmail.com>, 2011
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Urdu (http://www.transifex.com/django/django/language/ur/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: ur\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
msgid "Sites"
msgstr ""
msgid "The domain name cannot contain any spaces or tabs."
msgstr ""
msgid "domain name"
msgstr "ڈومین کا نام"
msgid "display name"
msgstr "ظاھر ھونے والا نام"
msgid "site"
msgstr "سائٹ"
msgid "sites"
msgstr "سائٹس"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/ur/LC_MESSAGES/django.po
|
po
|
mit
| 885 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Nuruddin Iminokhunov <nuruddin.iminohunov@gmail.com>, 2016
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-23 01:18+0000\n"
"Last-Translator: Nuruddin Iminokhunov <nuruddin.iminohunov@gmail.com>\n"
"Language-Team: Uzbek (http://www.transifex.com/django/django/language/uz/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: uz\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr "Saytlar"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Domen ismi tab`lar va bo'shliqlarsiz bo'lishi kerak"
msgid "domain name"
msgstr "domen nomi"
msgid "display name"
msgstr "ko'rsatiladigan ismi"
msgid "site"
msgstr "sayt"
msgid "sites"
msgstr "saytlar"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/uz/LC_MESSAGES/django.po
|
po
|
mit
| 935 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Jannis Leidel <jannis@leidel.info>, 2011
# Thanh Le Viet <lethanhx2k@gmail.com>, 2013
# Tran <hongdiepkien@gmail.com>, 2011
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-23 18:54+0000\n"
"Last-Translator: Tran Van <vantxm@yahoo.co.uk>\n"
"Language-Team: Vietnamese (http://www.transifex.com/django/django/language/"
"vi/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: vi\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr ""
msgid "The domain name cannot contain any spaces or tabs."
msgstr "Tên miền không gồm kí tự trống hoặc tab"
msgid "domain name"
msgstr "Tên miền"
msgid "display name"
msgstr "Tên hiển thị"
msgid "site"
msgstr "trang web"
msgid "sites"
msgstr "các trang web"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/vi/LC_MESSAGES/django.po
|
po
|
mit
| 989 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Jannis Leidel <jannis@leidel.info>, 2011
# Ronald White <ouyanghongyu@gmail.com>, 2014
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
"Last-Translator: Jannis Leidel <jannis@leidel.info>\n"
"Language-Team: Chinese (China) (http://www.transifex.com/django/django/"
"language/zh_CN/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: zh_CN\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr "站点"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "域名不能包含任何空格或制表符。"
msgid "domain name"
msgstr "域名"
msgid "display name"
msgstr "显示名称"
msgid "site"
msgstr "站点"
msgid "sites"
msgstr "站点"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.po
|
po
|
mit
| 946 |
# This file is distributed under the same license as the Django package.
#
# Translators:
# Chen Chun-Chia <ccc.larc@gmail.com>, 2015
# Jannis Leidel <jannis@leidel.info>, 2011
# mail6543210 <mail6543210@yahoo.com.tw>, 2013
# Tzu-ping Chung <uranusjr@gmail.com>, 2016
msgid ""
msgstr ""
"Project-Id-Version: django\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
"Last-Translator: Tzu-ping Chung <uranusjr@gmail.com>\n"
"Language-Team: Chinese (Taiwan) (http://www.transifex.com/django/django/"
"language/zh_TW/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: zh_TW\n"
"Plural-Forms: nplurals=1; plural=0;\n"
msgid "Sites"
msgstr "網站"
msgid "The domain name cannot contain any spaces or tabs."
msgstr "網域名稱不能包含空格或定位字元。"
msgid "domain name"
msgstr "網域名稱"
msgid "display name"
msgstr "顯示名稱"
msgid "site"
msgstr "網站"
msgid "sites"
msgstr "網站"
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.po
|
po
|
mit
| 1,046 |
"""
Creates the default Site object.
"""
from django.apps import apps as global_apps
from django.conf import settings
from django.core.management.color import no_style
from django.db import DEFAULT_DB_ALIAS, connections, router
def create_default_site(
app_config,
verbosity=2,
interactive=True,
using=DEFAULT_DB_ALIAS,
apps=global_apps,
**kwargs,
):
try:
Site = apps.get_model("sites", "Site")
except LookupError:
return
if not router.allow_migrate_model(using, Site):
return
if not Site.objects.using(using).exists():
# The default settings set SITE_ID = 1, and some tests in Django's test
# suite rely on this value. However, if database sequences are reused
# (e.g. in the test suite after flush/syncdb), it isn't guaranteed that
# the next id will be 1, so we coerce it. See #15573 and #16353. This
# can also crop up outside of tests - see #15346.
if verbosity >= 2:
print("Creating example.com Site object")
Site(
pk=getattr(settings, "SITE_ID", 1), domain="example.com", name="example.com"
).save(using=using)
# We set an explicit pk instead of relying on auto-incrementation,
# so we need to reset the database sequence. See #17415.
sequence_sql = connections[using].ops.sequence_reset_sql(no_style(), [Site])
if sequence_sql:
if verbosity >= 2:
print("Resetting sequence")
with connections[using].cursor() as cursor:
for command in sequence_sql:
cursor.execute(command)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/management.py
|
Python
|
mit
| 1,646 |
from django.conf import settings
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
class CurrentSiteManager(models.Manager):
"Use this to limit objects to those associated with the current site."
use_in_migrations = True
def __init__(self, field_name=None):
super().__init__()
self.__field_name = field_name
def check(self, **kwargs):
errors = super().check(**kwargs)
errors.extend(self._check_field_name())
return errors
def _check_field_name(self):
field_name = self._get_field_name()
try:
field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"CurrentSiteManager could not find a field named '%s'."
% field_name,
obj=self,
id="sites.E001",
)
]
if not field.many_to_many and not isinstance(field, (models.ForeignKey)):
return [
checks.Error(
"CurrentSiteManager cannot use '%s.%s' as it is not a foreign key "
"or a many-to-many field."
% (self.model._meta.object_name, field_name),
obj=self,
id="sites.E002",
)
]
return []
def _get_field_name(self):
"""Return self.__field_name or 'site' or 'sites'."""
if not self.__field_name:
try:
self.model._meta.get_field("site")
except FieldDoesNotExist:
self.__field_name = "sites"
else:
self.__field_name = "site"
return self.__field_name
def get_queryset(self):
return (
super()
.get_queryset()
.filter(**{self._get_field_name() + "__id": settings.SITE_ID})
)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/managers.py
|
Python
|
mit
| 1,994 |
from django.utils.deprecation import MiddlewareMixin
from .shortcuts import get_current_site
class CurrentSiteMiddleware(MiddlewareMixin):
"""
Middleware that sets `site` attribute to request object.
"""
def process_request(self, request):
request.site = get_current_site(request)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/middleware.py
|
Python
|
mit
| 309 |
import django.contrib.sites.models
from django.contrib.sites.models import _simple_domain_name_validator
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="Site",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"domain",
models.CharField(
max_length=100,
verbose_name="domain name",
validators=[_simple_domain_name_validator],
),
),
("name", models.CharField(max_length=50, verbose_name="display name")),
],
options={
"ordering": ["domain"],
"db_table": "django_site",
"verbose_name": "site",
"verbose_name_plural": "sites",
},
bases=(models.Model,),
managers=[
("objects", django.contrib.sites.models.SiteManager()),
],
),
]
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/migrations/0001_initial.py
|
Python
|
mit
| 1,361 |
import django.contrib.sites.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("sites", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="site",
name="domain",
field=models.CharField(
max_length=100,
unique=True,
validators=[django.contrib.sites.models._simple_domain_name_validator],
verbose_name="domain name",
),
),
]
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/migrations/0002_alter_domain_unique.py
|
Python
|
mit
| 549 |
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/migrations/__init__.py
|
Python
|
mit
| 0 |
|
import string
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.http.request import split_domain_port
from django.utils.translation import gettext_lazy as _
SITE_CACHE = {}
def _simple_domain_name_validator(value):
"""
Validate that the given value contains no whitespaces to prevent common
typos.
"""
checks = ((s in value) for s in string.whitespace)
if any(checks):
raise ValidationError(
_("The domain name cannot contain any spaces or tabs."),
code="invalid",
)
class SiteManager(models.Manager):
use_in_migrations = True
def _get_site_by_id(self, site_id):
if site_id not in SITE_CACHE:
site = self.get(pk=site_id)
SITE_CACHE[site_id] = site
return SITE_CACHE[site_id]
def _get_site_by_request(self, request):
host = request.get_host()
try:
# First attempt to look up the site by host with or without port.
if host not in SITE_CACHE:
SITE_CACHE[host] = self.get(domain__iexact=host)
return SITE_CACHE[host]
except Site.DoesNotExist:
# Fallback to looking up site after stripping port from the host.
domain, port = split_domain_port(host)
if domain not in SITE_CACHE:
SITE_CACHE[domain] = self.get(domain__iexact=domain)
return SITE_CACHE[domain]
def get_current(self, request=None):
"""
Return the current Site based on the SITE_ID in the project's settings.
If SITE_ID isn't defined, return the site with domain matching
request.get_host(). The ``Site`` object is cached the first time it's
retrieved from the database.
"""
from django.conf import settings
if getattr(settings, "SITE_ID", ""):
site_id = settings.SITE_ID
return self._get_site_by_id(site_id)
elif request:
return self._get_site_by_request(request)
raise ImproperlyConfigured(
'You\'re using the Django "sites framework" without having '
"set the SITE_ID setting. Create a site in your database and "
"set the SITE_ID setting or pass a request to "
"Site.objects.get_current() to fix this error."
)
def clear_cache(self):
"""Clear the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
def get_by_natural_key(self, domain):
return self.get(domain=domain)
class Site(models.Model):
domain = models.CharField(
_("domain name"),
max_length=100,
validators=[_simple_domain_name_validator],
unique=True,
)
name = models.CharField(_("display name"), max_length=50)
objects = SiteManager()
class Meta:
db_table = "django_site"
verbose_name = _("site")
verbose_name_plural = _("sites")
ordering = ["domain"]
def __str__(self):
return self.domain
def natural_key(self):
return (self.domain,)
def clear_site_cache(sender, **kwargs):
"""
Clear the cache (if primed) each time a site is saved or deleted.
"""
instance = kwargs["instance"]
using = kwargs["using"]
try:
del SITE_CACHE[instance.pk]
except KeyError:
pass
try:
del SITE_CACHE[Site.objects.using(using).get(pk=instance.pk).domain]
except (KeyError, Site.DoesNotExist):
pass
pre_save.connect(clear_site_cache, sender=Site)
pre_delete.connect(clear_site_cache, sender=Site)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/models.py
|
Python
|
mit
| 3,695 |
class RequestSite:
"""
A class that shares the primary interface of Site (i.e., it has ``domain``
and ``name`` attributes) but gets its data from an HttpRequest object
rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __str__(self):
return self.domain
def save(self, force_insert=False, force_update=False):
raise NotImplementedError("RequestSite cannot be saved.")
def delete(self):
raise NotImplementedError("RequestSite cannot be deleted.")
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/requests.py
|
Python
|
mit
| 641 |
from django.apps import apps
from .requests import RequestSite
def get_current_site(request):
"""
Check if contrib.sites is installed and return either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
# Import is inside the function because its point is to avoid importing the
# Site models when django.contrib.sites isn't installed.
if apps.is_installed("django.contrib.sites"):
from .models import Site
return Site.objects.get_current(request)
else:
return RequestSite(request)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/sites/shortcuts.py
|
Python
|
mit
| 573 |
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/__init__.py
|
Python
|
mit
| 0 |
|
from django.apps import AppConfig
from django.contrib.staticfiles.checks import check_finders
from django.core import checks
from django.utils.translation import gettext_lazy as _
class StaticFilesConfig(AppConfig):
name = "django.contrib.staticfiles"
verbose_name = _("Static Files")
ignore_patterns = ["CVS", ".*", "*~"]
def ready(self):
checks.register(check_finders, checks.Tags.staticfiles)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/apps.py
|
Python
|
mit
| 423 |
from django.contrib.staticfiles.finders import get_finders
def check_finders(app_configs=None, **kwargs):
"""Check all registered staticfiles finders."""
errors = []
for finder in get_finders():
try:
finder_errors = finder.check()
except NotImplementedError:
pass
else:
errors.extend(finder_errors)
return errors
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/checks.py
|
Python
|
mit
| 391 |
import functools
import os
from django.apps import apps
from django.conf import settings
from django.contrib.staticfiles import utils
from django.core.checks import Error, Warning
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage, Storage, default_storage
from django.utils._os import safe_join
from django.utils.functional import LazyObject, empty
from django.utils.module_loading import import_string
# To keep track on which directories the finder has searched the static files.
searched_locations = []
class BaseFinder:
"""
A base file finder to be used for custom staticfiles finder classes.
"""
def check(self, **kwargs):
raise NotImplementedError(
"subclasses may provide a check() method to verify the finder is "
"configured correctly."
)
def find(self, path, all=False):
"""
Given a relative file path, find an absolute file path.
If the ``all`` parameter is False (default) return only the first found
file path; if True, return a list of all found files paths.
"""
raise NotImplementedError(
"subclasses of BaseFinder must provide a find() method"
)
def list(self, ignore_patterns):
"""
Given an optional list of paths to ignore, return a two item iterable
consisting of the relative path and storage instance.
"""
raise NotImplementedError(
"subclasses of BaseFinder must provide a list() method"
)
class FileSystemFinder(BaseFinder):
"""
A static files finder that uses the ``STATICFILES_DIRS`` setting
to locate files.
"""
def __init__(self, app_names=None, *args, **kwargs):
# List of locations with static files
self.locations = []
# Maps dir paths to an appropriate storage instance
self.storages = {}
for root in settings.STATICFILES_DIRS:
if isinstance(root, (list, tuple)):
prefix, root = root
else:
prefix = ""
if (prefix, root) not in self.locations:
self.locations.append((prefix, root))
for prefix, root in self.locations:
filesystem_storage = FileSystemStorage(location=root)
filesystem_storage.prefix = prefix
self.storages[root] = filesystem_storage
super().__init__(*args, **kwargs)
def check(self, **kwargs):
errors = []
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
errors.append(
Error(
"The STATICFILES_DIRS setting is not a tuple or list.",
hint="Perhaps you forgot a trailing comma?",
id="staticfiles.E001",
)
)
return errors
for root in settings.STATICFILES_DIRS:
if isinstance(root, (list, tuple)):
prefix, root = root
if prefix.endswith("/"):
errors.append(
Error(
"The prefix %r in the STATICFILES_DIRS setting must "
"not end with a slash." % prefix,
id="staticfiles.E003",
)
)
if settings.STATIC_ROOT and os.path.abspath(
settings.STATIC_ROOT
) == os.path.abspath(root):
errors.append(
Error(
"The STATICFILES_DIRS setting should not contain the "
"STATIC_ROOT setting.",
id="staticfiles.E002",
)
)
if not os.path.isdir(root):
errors.append(
Warning(
f"The directory '{root}' in the STATICFILES_DIRS setting "
f"does not exist.",
id="staticfiles.W004",
)
)
return errors
def find(self, path, all=False):
"""
Look for files in the extra locations as defined in STATICFILES_DIRS.
"""
matches = []
for prefix, root in self.locations:
if root not in searched_locations:
searched_locations.append(root)
matched_path = self.find_location(root, path, prefix)
if matched_path:
if not all:
return matched_path
matches.append(matched_path)
return matches
def find_location(self, root, path, prefix=None):
"""
Find a requested static file in a location and return the found
absolute path (or ``None`` if no match).
"""
if prefix:
prefix = "%s%s" % (prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix) :]
path = safe_join(root, path)
if os.path.exists(path):
return path
def list(self, ignore_patterns):
"""
List all files in all locations.
"""
for prefix, root in self.locations:
# Skip nonexistent directories.
if os.path.isdir(root):
storage = self.storages[root]
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
class AppDirectoriesFinder(BaseFinder):
"""
A static files finder that looks in the directory of each app as
specified in the source_dir attribute.
"""
storage_class = FileSystemStorage
source_dir = "static"
def __init__(self, app_names=None, *args, **kwargs):
# The list of apps that are handled
self.apps = []
# Mapping of app names to storage instances
self.storages = {}
app_configs = apps.get_app_configs()
if app_names:
app_names = set(app_names)
app_configs = [ac for ac in app_configs if ac.name in app_names]
for app_config in app_configs:
app_storage = self.storage_class(
os.path.join(app_config.path, self.source_dir)
)
if os.path.isdir(app_storage.location):
self.storages[app_config.name] = app_storage
if app_config.name not in self.apps:
self.apps.append(app_config.name)
super().__init__(*args, **kwargs)
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in self.storages.values():
if storage.exists(""): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
def find(self, path, all=False):
"""
Look for files in the app directories.
"""
matches = []
for app in self.apps:
app_location = self.storages[app].location
if app_location not in searched_locations:
searched_locations.append(app_location)
match = self.find_in_app(app, path)
if match:
if not all:
return match
matches.append(match)
return matches
def find_in_app(self, app, path):
"""
Find a requested static file in an app's static locations.
"""
storage = self.storages.get(app)
# Only try to find a file if the source dir actually exists.
if storage and storage.exists(path):
matched_path = storage.path(path)
if matched_path:
return matched_path
class BaseStorageFinder(BaseFinder):
"""
A base static files finder to be used to extended
with an own storage class.
"""
storage = None
def __init__(self, storage=None, *args, **kwargs):
if storage is not None:
self.storage = storage
if self.storage is None:
raise ImproperlyConfigured(
"The staticfiles storage finder %r "
"doesn't have a storage class "
"assigned." % self.__class__
)
# Make sure we have a storage instance here.
if not isinstance(self.storage, (Storage, LazyObject)):
self.storage = self.storage()
super().__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Look for files in the default file storage, if it's local.
"""
try:
self.storage.path("")
except NotImplementedError:
pass
else:
if self.storage.location not in searched_locations:
searched_locations.append(self.storage.location)
if self.storage.exists(path):
match = self.storage.path(path)
if all:
match = [match]
return match
return []
def list(self, ignore_patterns):
"""
List all files of the storage.
"""
for path in utils.get_files(self.storage, ignore_patterns):
yield path, self.storage
class DefaultStorageFinder(BaseStorageFinder):
"""
A static files finder that uses the default storage backend.
"""
storage = default_storage
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
base_location = getattr(self.storage, "base_location", empty)
if not base_location:
raise ImproperlyConfigured(
"The storage backend of the "
"staticfiles finder %r doesn't have "
"a valid location." % self.__class__
)
def find(path, all=False):
"""
Find a static file with the given path using all enabled finders.
If ``all`` is ``False`` (default), return the first matching
absolute path (or ``None`` if no match). Otherwise return a list.
"""
searched_locations[:] = []
matches = []
for finder in get_finders():
result = finder.find(path, all=all)
if not all and result:
return result
if not isinstance(result, (list, tuple)):
result = [result]
matches.extend(result)
if matches:
return matches
# No match.
return [] if all else None
def get_finders():
for finder_path in settings.STATICFILES_FINDERS:
yield get_finder(finder_path)
@functools.lru_cache(maxsize=None)
def get_finder(import_path):
"""
Import the staticfiles finder class described by import_path, where
import_path is the full Python path to the class.
"""
Finder = import_string(import_path)
if not issubclass(Finder, BaseFinder):
raise ImproperlyConfigured(
'Finder "%s" is not a subclass of "%s"' % (Finder, BaseFinder)
)
return Finder()
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/finders.py
|
Python
|
mit
| 11,008 |
from urllib.parse import urlparse
from urllib.request import url2pathname
from asgiref.sync import sync_to_async
from django.conf import settings
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.views import serve
from django.core.handlers.asgi import ASGIHandler
from django.core.handlers.exception import response_for_exception
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.http import Http404
class StaticFilesHandlerMixin:
"""
Common methods used by WSGI and ASGI handlers.
"""
# May be used to differentiate between handler types (e.g. in a
# request_finished signal)
handles_files = True
def load_middleware(self):
# Middleware are already loaded for self.application; no need to reload
# them for self.
pass
def get_base_url(self):
utils.check_settings()
return settings.STATIC_URL
def _should_handle(self, path):
"""
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Return the relative path to the media file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]) :]
return url2pathname(relative_url)
def serve(self, request):
"""Serve the request path."""
return serve(request, self.file_path(request.path), insecure=True)
def get_response(self, request):
try:
return self.serve(request)
except Http404 as e:
return response_for_exception(request, e)
async def get_response_async(self, request):
try:
return await sync_to_async(self.serve, thread_sensitive=False)(request)
except Http404 as e:
return await sync_to_async(response_for_exception, thread_sensitive=False)(
request, e
)
class StaticFilesHandler(StaticFilesHandlerMixin, WSGIHandler):
"""
WSGI middleware that intercepts calls to the static files directory, as
defined by the STATIC_URL setting, and serves those files.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super().__init__()
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super().__call__(environ, start_response)
class ASGIStaticFilesHandler(StaticFilesHandlerMixin, ASGIHandler):
"""
ASGI application which wraps another and intercepts requests for static
files, passing them off to Django's static file serving.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
async def __call__(self, scope, receive, send):
# Only even look at HTTP requests
if scope["type"] == "http" and self._should_handle(scope["path"]):
# Serve static content
# (the one thing super() doesn't do is __call__, apparently)
return await super().__call__(scope, receive, send)
# Hand off to the main app
return await self.application(scope, receive, send)
async def get_response_async(self, request):
response = await super().get_response_async(request)
response._resource_closers.append(request.close)
# FileResponse is not async compatible.
if response.streaming and not response.is_async:
_iterator = response.streaming_content
async def awrapper():
for part in await sync_to_async(list)(_iterator):
yield part
response.streaming_content = awrapper()
return response
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/handlers.py
|
Python
|
mit
| 4,029 |
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/management/__init__.py
|
Python
|
mit
| 0 |
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/management/commands/__init__.py
|
Python
|
mit
| 0 |
|
import os
from django.apps import apps
from django.contrib.staticfiles.finders import get_finders
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.checks import Tags
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.utils.functional import cached_property
class Command(BaseCommand):
"""
Copies or symlinks static files from different locations to the
settings.STATIC_ROOT.
"""
help = "Collect static files in a single location."
requires_system_checks = [Tags.staticfiles]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.copied_files = []
self.symlinked_files = []
self.unmodified_files = []
self.post_processed_files = []
self.storage = staticfiles_storage
self.style = no_style()
@cached_property
def local(self):
try:
self.storage.path("")
except NotImplementedError:
return False
return True
def add_arguments(self, parser):
parser.add_argument(
"--noinput",
"--no-input",
action="store_false",
dest="interactive",
help="Do NOT prompt the user for input of any kind.",
)
parser.add_argument(
"--no-post-process",
action="store_false",
dest="post_process",
help="Do NOT post process collected files.",
)
parser.add_argument(
"-i",
"--ignore",
action="append",
default=[],
dest="ignore_patterns",
metavar="PATTERN",
help="Ignore files or directories matching this glob-style "
"pattern. Use multiple times to ignore more.",
)
parser.add_argument(
"-n",
"--dry-run",
action="store_true",
help="Do everything except modify the filesystem.",
)
parser.add_argument(
"-c",
"--clear",
action="store_true",
help="Clear the existing files using the storage "
"before trying to copy or link the original file.",
)
parser.add_argument(
"-l",
"--link",
action="store_true",
help="Create a symbolic link to each file instead of copying.",
)
parser.add_argument(
"--no-default-ignore",
action="store_false",
dest="use_default_ignore_patterns",
help=(
"Don't ignore the common private glob-style patterns (defaults to "
"'CVS', '.*' and '*~')."
),
)
def set_options(self, **options):
"""
Set instance variables based on an options dict
"""
self.interactive = options["interactive"]
self.verbosity = options["verbosity"]
self.symlink = options["link"]
self.clear = options["clear"]
self.dry_run = options["dry_run"]
ignore_patterns = options["ignore_patterns"]
if options["use_default_ignore_patterns"]:
ignore_patterns += apps.get_app_config("staticfiles").ignore_patterns
self.ignore_patterns = list({os.path.normpath(p) for p in ignore_patterns})
self.post_process = options["post_process"]
def collect(self):
"""
Perform the bulk of the work of collectstatic.
Split off from handle() to facilitate testing.
"""
if self.symlink and not self.local:
raise CommandError("Can't symlink to a remote destination.")
if self.clear:
self.clear_dir("")
if self.symlink:
handler = self.link_file
else:
handler = self.copy_file
found_files = {}
for finder in get_finders():
for path, storage in finder.list(self.ignore_patterns):
# Prefix the relative path if the source storage contains it
if getattr(storage, "prefix", None):
prefixed_path = os.path.join(storage.prefix, path)
else:
prefixed_path = path
if prefixed_path not in found_files:
found_files[prefixed_path] = (storage, path)
handler(path, prefixed_path, storage)
else:
self.log(
"Found another file with the destination path '%s'. It "
"will be ignored since only the first encountered file "
"is collected. If this is not what you want, make sure "
"every static file has a unique path." % prefixed_path,
level=1,
)
# Storage backends may define a post_process() method.
if self.post_process and hasattr(self.storage, "post_process"):
processor = self.storage.post_process(found_files, dry_run=self.dry_run)
for original_path, processed_path, processed in processor:
if isinstance(processed, Exception):
self.stderr.write("Post-processing '%s' failed!" % original_path)
# Add a blank line before the traceback, otherwise it's
# too easy to miss the relevant part of the error message.
self.stderr.write()
raise processed
if processed:
self.log(
"Post-processed '%s' as '%s'" % (original_path, processed_path),
level=2,
)
self.post_processed_files.append(original_path)
else:
self.log("Skipped post-processing '%s'" % original_path)
return {
"modified": self.copied_files + self.symlinked_files,
"unmodified": self.unmodified_files,
"post_processed": self.post_processed_files,
}
def handle(self, **options):
self.set_options(**options)
message = ["\n"]
if self.dry_run:
message.append(
"You have activated the --dry-run option so no files will be "
"modified.\n\n"
)
message.append(
"You have requested to collect static files at the destination\n"
"location as specified in your settings"
)
if self.is_local_storage() and self.storage.location:
destination_path = self.storage.location
message.append(":\n\n %s\n\n" % destination_path)
should_warn_user = self.storage.exists(destination_path) and any(
self.storage.listdir(destination_path)
)
else:
destination_path = None
message.append(".\n\n")
# Destination files existence not checked; play it safe and warn.
should_warn_user = True
if self.interactive and should_warn_user:
if self.clear:
message.append("This will DELETE ALL FILES in this location!\n")
else:
message.append("This will overwrite existing files!\n")
message.append(
"Are you sure you want to do this?\n\n"
"Type 'yes' to continue, or 'no' to cancel: "
)
if input("".join(message)) != "yes":
raise CommandError("Collecting static files cancelled.")
collected = self.collect()
if self.verbosity >= 1:
modified_count = len(collected["modified"])
unmodified_count = len(collected["unmodified"])
post_processed_count = len(collected["post_processed"])
return (
"\n%(modified_count)s %(identifier)s %(action)s"
"%(destination)s%(unmodified)s%(post_processed)s."
) % {
"modified_count": modified_count,
"identifier": "static file" + ("" if modified_count == 1 else "s"),
"action": "symlinked" if self.symlink else "copied",
"destination": (
" to '%s'" % destination_path if destination_path else ""
),
"unmodified": (
", %s unmodified" % unmodified_count
if collected["unmodified"]
else ""
),
"post_processed": (
collected["post_processed"]
and ", %s post-processed" % post_processed_count
or ""
),
}
def log(self, msg, level=2):
"""
Small log helper
"""
if self.verbosity >= level:
self.stdout.write(msg)
def is_local_storage(self):
return isinstance(self.storage, FileSystemStorage)
def clear_dir(self, path):
"""
Delete the given relative path using the destination storage backend.
"""
if not self.storage.exists(path):
return
dirs, files = self.storage.listdir(path)
for f in files:
fpath = os.path.join(path, f)
if self.dry_run:
self.log("Pretending to delete '%s'" % fpath, level=1)
else:
self.log("Deleting '%s'" % fpath, level=1)
try:
full_path = self.storage.path(fpath)
except NotImplementedError:
self.storage.delete(fpath)
else:
if not os.path.exists(full_path) and os.path.lexists(full_path):
# Delete broken symlinks
os.unlink(full_path)
else:
self.storage.delete(fpath)
for d in dirs:
self.clear_dir(os.path.join(path, d))
def delete_file(self, path, prefixed_path, source_storage):
"""
Check if the target file should be deleted if it already exists.
"""
if self.storage.exists(prefixed_path):
try:
# When was the target file modified last time?
target_last_modified = self.storage.get_modified_time(prefixed_path)
except (OSError, NotImplementedError, AttributeError):
# The storage doesn't support get_modified_time() or failed
pass
else:
try:
# When was the source file modified last time?
source_last_modified = source_storage.get_modified_time(path)
except (OSError, NotImplementedError, AttributeError):
pass
else:
# The full path of the target file
if self.local:
full_path = self.storage.path(prefixed_path)
# If it's --link mode and the path isn't a link (i.e.
# the previous collectstatic wasn't with --link) or if
# it's non-link mode and the path is a link (i.e. the
# previous collectstatic was with --link), the old
# links/files must be deleted so it's not safe to skip
# unmodified files.
can_skip_unmodified_files = not (
self.symlink ^ os.path.islink(full_path)
)
else:
# In remote storages, skipping is only based on the
# modified times since symlinks aren't relevant.
can_skip_unmodified_files = True
# Avoid sub-second precision (see #14665, #19540)
file_is_unmodified = target_last_modified.replace(
microsecond=0
) >= source_last_modified.replace(microsecond=0)
if file_is_unmodified and can_skip_unmodified_files:
if prefixed_path not in self.unmodified_files:
self.unmodified_files.append(prefixed_path)
self.log("Skipping '%s' (not modified)" % path)
return False
# Then delete the existing file if really needed
if self.dry_run:
self.log("Pretending to delete '%s'" % path)
else:
self.log("Deleting '%s'" % path)
self.storage.delete(prefixed_path)
return True
def link_file(self, path, prefixed_path, source_storage):
"""
Attempt to link ``path``
"""
# Skip this file if it was already copied earlier
if prefixed_path in self.symlinked_files:
return self.log("Skipping '%s' (already linked earlier)" % path)
# Delete the target file if needed or break
if not self.delete_file(path, prefixed_path, source_storage):
return
# The full path of the source file
source_path = source_storage.path(path)
# Finally link the file
if self.dry_run:
self.log("Pretending to link '%s'" % source_path, level=1)
else:
self.log("Linking '%s'" % source_path, level=2)
full_path = self.storage.path(prefixed_path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
try:
if os.path.lexists(full_path):
os.unlink(full_path)
os.symlink(source_path, full_path)
except NotImplementedError:
import platform
raise CommandError(
"Symlinking is not supported in this "
"platform (%s)." % platform.platform()
)
except OSError as e:
raise CommandError(e)
if prefixed_path not in self.symlinked_files:
self.symlinked_files.append(prefixed_path)
def copy_file(self, path, prefixed_path, source_storage):
"""
Attempt to copy ``path`` with storage
"""
# Skip this file if it was already copied earlier
if prefixed_path in self.copied_files:
return self.log("Skipping '%s' (already copied earlier)" % path)
# Delete the target file if needed or break
if not self.delete_file(path, prefixed_path, source_storage):
return
# The full path of the source file
source_path = source_storage.path(path)
# Finally start copying
if self.dry_run:
self.log("Pretending to copy '%s'" % source_path, level=1)
else:
self.log("Copying '%s'" % source_path, level=2)
with source_storage.open(path) as source_file:
self.storage.save(prefixed_path, source_file)
self.copied_files.append(prefixed_path)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/management/commands/collectstatic.py
|
Python
|
mit
| 15,137 |
import os
from django.contrib.staticfiles import finders
from django.core.management.base import LabelCommand
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
label = "staticfile"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--first",
action="store_false",
dest="all",
help="Only return the first match for each static file.",
)
def handle_label(self, path, **options):
verbosity = options["verbosity"]
result = finders.find(path, all=options["all"])
if verbosity >= 2:
searched_locations = (
"\nLooking in the following locations:\n %s"
% "\n ".join([str(loc) for loc in finders.searched_locations])
)
else:
searched_locations = ""
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = (os.path.realpath(path) for path in result)
if verbosity >= 1:
file_list = "\n ".join(result)
return "Found '%s' here:\n %s%s" % (
path,
file_list,
searched_locations,
)
else:
return "\n".join(result)
else:
message = ["No matching file found for '%s'." % path]
if verbosity >= 2:
message.append(searched_locations)
if verbosity >= 1:
self.stderr.write("\n".join(message))
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/management/commands/findstatic.py
|
Python
|
mit
| 1,638 |
from django.conf import settings
from django.contrib.staticfiles.handlers import StaticFilesHandler
from django.core.management.commands.runserver import Command as RunserverCommand
class Command(RunserverCommand):
help = (
"Starts a lightweight web server for development and also serves static files."
)
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--nostatic",
action="store_false",
dest="use_static_handler",
help="Tells Django to NOT automatically serve static files at STATIC_URL.",
)
parser.add_argument(
"--insecure",
action="store_true",
dest="insecure_serving",
help="Allows serving static files even if DEBUG is False.",
)
def get_handler(self, *args, **options):
"""
Return the static files serving handler wrapping the default handler,
if static files should be served. Otherwise return the default handler.
"""
handler = super().get_handler(*args, **options)
use_static_handler = options["use_static_handler"]
insecure_serving = options["insecure_serving"]
if use_static_handler and (settings.DEBUG or insecure_serving):
return StaticFilesHandler(handler)
return handler
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/management/commands/runserver.py
|
Python
|
mit
| 1,373 |
import json
import os
import posixpath
import re
from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit
from django.conf import STATICFILES_STORAGE_ALIAS, settings
from django.contrib.staticfiles.utils import check_settings, matches_patterns
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage, storages
from django.utils.crypto import md5
from django.utils.functional import LazyObject
class StaticFilesStorage(FileSystemStorage):
"""
Standard file system storage for static files.
The defaults for ``location`` and ``base_url`` are
``STATIC_ROOT`` and ``STATIC_URL``.
"""
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.STATIC_ROOT
if base_url is None:
base_url = settings.STATIC_URL
check_settings(base_url)
super().__init__(location, base_url, *args, **kwargs)
# FileSystemStorage fallbacks to MEDIA_ROOT when location
# is empty, so we restore the empty value.
if not location:
self.base_location = None
self.location = None
def path(self, name):
if not self.location:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the STATIC_ROOT "
"setting to a filesystem path."
)
return super().path(name)
class HashedFilesMixin:
default_template = """url("%(url)s")"""
max_post_process_passes = 5
support_js_module_import_aggregation = False
_js_module_import_aggregation_patterns = (
"*.js",
(
(
(
r"""(?P<matched>import(?s:(?P<import>[\s\{].*?))"""
r"""\s*from\s*['"](?P<url>[\.\/].*?)["']\s*;)"""
),
"""import%(import)s from "%(url)s";""",
),
(
(
r"""(?P<matched>export(?s:(?P<exports>[\s\{].*?))"""
r"""\s*from\s*["'](?P<url>[\.\/].*?)["']\s*;)"""
),
"""export%(exports)s from "%(url)s";""",
),
(
r"""(?P<matched>import\s*['"](?P<url>[\.\/].*?)["']\s*;)""",
"""import"%(url)s";""",
),
(
r"""(?P<matched>import\(["'](?P<url>.*?)["']\))""",
"""import("%(url)s")""",
),
),
)
patterns = (
(
"*.css",
(
r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""",
(
r"""(?P<matched>@import\s*["']\s*(?P<url>.*?)["'])""",
"""@import url("%(url)s")""",
),
(
(
r"(?m)(?P<matched>)^(/\*#[ \t]"
r"(?-i:sourceMappingURL)=(?P<url>.*)[ \t]*\*/)$"
),
"/*# sourceMappingURL=%(url)s */",
),
),
),
(
"*.js",
(
(
r"(?m)(?P<matched>)^(//# (?-i:sourceMappingURL)=(?P<url>.*))$",
"//# sourceMappingURL=%(url)s",
),
),
),
)
keep_intermediate_files = True
def __init__(self, *args, **kwargs):
if self.support_js_module_import_aggregation:
self.patterns += (self._js_module_import_aggregation_patterns,)
super().__init__(*args, **kwargs)
self._patterns = {}
self.hashed_files = {}
for extension, patterns in self.patterns:
for pattern in patterns:
if isinstance(pattern, (tuple, list)):
pattern, template = pattern
else:
template = self.default_template
compiled = re.compile(pattern, re.IGNORECASE)
self._patterns.setdefault(extension, []).append((compiled, template))
def file_hash(self, name, content=None):
"""
Return a hash of the file with the given name and optional content.
"""
if content is None:
return None
hasher = md5(usedforsecurity=False)
for chunk in content.chunks():
hasher.update(chunk)
return hasher.hexdigest()[:12]
def hashed_name(self, name, content=None, filename=None):
# `filename` is the name of file to hash if `content` isn't given.
# `name` is the base name to construct the new hashed filename from.
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
opened = content is None
if opened:
if not self.exists(filename):
raise ValueError(
"The file '%s' could not be found with %r." % (filename, self)
)
try:
content = self.open(filename)
except OSError:
# Handle directory paths and fragments
return name
try:
file_hash = self.file_hash(clean_name, content)
finally:
if opened:
content.close()
path, filename = os.path.split(clean_name)
root, ext = os.path.splitext(filename)
file_hash = (".%s" % file_hash) if file_hash else ""
hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext))
unparsed_name = list(parsed_name)
unparsed_name[2] = hashed_name
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if "?#" in name and not unparsed_name[3]:
unparsed_name[2] += "?"
return urlunsplit(unparsed_name)
def _url(self, hashed_name_func, name, force=False, hashed_files=None):
"""
Return the non-hashed URL in DEBUG mode.
"""
if settings.DEBUG and not force:
hashed_name, fragment = name, ""
else:
clean_name, fragment = urldefrag(name)
if urlsplit(clean_name).path.endswith("/"): # don't hash paths
hashed_name = name
else:
args = (clean_name,)
if hashed_files is not None:
args += (hashed_files,)
hashed_name = hashed_name_func(*args)
final_url = super().url(hashed_name)
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
query_fragment = "?#" in name # [sic!]
if fragment or query_fragment:
urlparts = list(urlsplit(final_url))
if fragment and not urlparts[4]:
urlparts[4] = fragment
if query_fragment and not urlparts[3]:
urlparts[2] += "?"
final_url = urlunsplit(urlparts)
return unquote(final_url)
def url(self, name, force=False):
"""
Return the non-hashed URL in DEBUG mode.
"""
return self._url(self.stored_name, name, force)
def url_converter(self, name, hashed_files, template=None):
"""
Return the custom URL converter for the given file name.
"""
if template is None:
template = self.default_template
def converter(matchobj):
"""
Convert the matched URL to a normalized and hashed URL.
This requires figuring out which files the matched URL resolves
to and calling the url() method of the storage.
"""
matches = matchobj.groupdict()
matched = matches["matched"]
url = matches["url"]
# Ignore absolute/protocol-relative and data-uri URLs.
if re.match(r"^[a-z]+:", url):
return matched
# Ignore absolute URLs that don't point to a static file (dynamic
# CSS / JS?). Note that STATIC_URL cannot be empty.
if url.startswith("/") and not url.startswith(settings.STATIC_URL):
return matched
# Strip off the fragment so a path-like fragment won't interfere.
url_path, fragment = urldefrag(url)
# Ignore URLs without a path
if not url_path:
return matched
if url_path.startswith("/"):
# Otherwise the condition above would have returned prematurely.
assert url_path.startswith(settings.STATIC_URL)
target_name = url_path[len(settings.STATIC_URL) :]
else:
# We're using the posixpath module to mix paths and URLs conveniently.
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
# Determine the hashed name of the target file with the storage backend.
hashed_url = self._url(
self._stored_name,
unquote(target_name),
force=True,
hashed_files=hashed_files,
)
transformed_url = "/".join(
url_path.split("/")[:-1] + hashed_url.split("/")[-1:]
)
# Restore the fragment that was stripped off earlier.
if fragment:
transformed_url += ("?#" if "?#" in url else "#") + fragment
# Return the hashed version to the file
matches["url"] = unquote(transformed_url)
return template % matches
return converter
def post_process(self, paths, dry_run=False, **options):
"""
Post process the given dictionary of files (called from collectstatic).
Processing is actually two separate operations:
1. renaming files to include a hash of their content for cache-busting,
and copying those files to the target storage.
2. adjusting files which contain references to other files so they
refer to the cache-busting filenames.
If either of these are performed on a file, then that file is considered
post-processed.
"""
# don't even dare to process the files if we're in dry run mode
if dry_run:
return
# where to store the new paths
hashed_files = {}
# build a list of adjustable files
adjustable_paths = [
path for path in paths if matches_patterns(path, self._patterns)
]
# Adjustable files to yield at end, keyed by the original path.
processed_adjustable_paths = {}
# Do a single pass first. Post-process all files once, yielding not
# adjustable files and exceptions, and collecting adjustable files.
for name, hashed_name, processed, _ in self._post_process(
paths, adjustable_paths, hashed_files
):
if name not in adjustable_paths or isinstance(processed, Exception):
yield name, hashed_name, processed
else:
processed_adjustable_paths[name] = (name, hashed_name, processed)
paths = {path: paths[path] for path in adjustable_paths}
substitutions = False
for i in range(self.max_post_process_passes):
substitutions = False
for name, hashed_name, processed, subst in self._post_process(
paths, adjustable_paths, hashed_files
):
# Overwrite since hashed_name may be newer.
processed_adjustable_paths[name] = (name, hashed_name, processed)
substitutions = substitutions or subst
if not substitutions:
break
if substitutions:
yield "All", None, RuntimeError("Max post-process passes exceeded.")
# Store the processed paths
self.hashed_files.update(hashed_files)
# Yield adjustable files with final, hashed name.
yield from processed_adjustable_paths.values()
def _post_process(self, paths, adjustable_paths, hashed_files):
# Sort the files by directory level
def path_level(name):
return len(name.split(os.sep))
for name in sorted(paths, key=path_level, reverse=True):
substitutions = True
# use the original, local file, not the copied-but-unprocessed
# file, which might be somewhere far away, like S3
storage, path = paths[name]
with storage.open(path) as original_file:
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
# generate the hash with the original content, even for
# adjustable files.
if hash_key not in hashed_files:
hashed_name = self.hashed_name(name, original_file)
else:
hashed_name = hashed_files[hash_key]
# then get the original's file content..
if hasattr(original_file, "seek"):
original_file.seek(0)
hashed_file_exists = self.exists(hashed_name)
processed = False
# ..to apply each replacement pattern to the content
if name in adjustable_paths:
old_hashed_name = hashed_name
content = original_file.read().decode("utf-8")
for extension, patterns in self._patterns.items():
if matches_patterns(path, (extension,)):
for pattern, template in patterns:
converter = self.url_converter(
name, hashed_files, template
)
try:
content = pattern.sub(converter, content)
except ValueError as exc:
yield name, None, exc, False
if hashed_file_exists:
self.delete(hashed_name)
# then save the processed result
content_file = ContentFile(content.encode())
if self.keep_intermediate_files:
# Save intermediate file for reference
self._save(hashed_name, content_file)
hashed_name = self.hashed_name(name, content_file)
if self.exists(hashed_name):
self.delete(hashed_name)
saved_name = self._save(hashed_name, content_file)
hashed_name = self.clean_name(saved_name)
# If the file hash stayed the same, this file didn't change
if old_hashed_name == hashed_name:
substitutions = False
processed = True
if not processed:
# or handle the case in which neither processing nor
# a change to the original file happened
if not hashed_file_exists:
processed = True
saved_name = self._save(hashed_name, original_file)
hashed_name = self.clean_name(saved_name)
# and then set the cache accordingly
hashed_files[hash_key] = hashed_name
yield name, hashed_name, processed, substitutions
def clean_name(self, name):
return name.replace("\\", "/")
def hash_key(self, name):
return name
def _stored_name(self, name, hashed_files):
# Normalize the path to avoid multiple names for the same file like
# ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
# path.
name = posixpath.normpath(name)
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
cache_name = hashed_files.get(hash_key)
if cache_name is None:
cache_name = self.clean_name(self.hashed_name(name))
return cache_name
def stored_name(self, name):
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
cache_name = self.hashed_files.get(hash_key)
if cache_name:
return cache_name
# No cached name found, recalculate it from the files.
intermediate_name = name
for i in range(self.max_post_process_passes + 1):
cache_name = self.clean_name(
self.hashed_name(name, content=None, filename=intermediate_name)
)
if intermediate_name == cache_name:
# Store the hashed name if there was a miss.
self.hashed_files[hash_key] = cache_name
return cache_name
else:
# Move on to the next intermediate file.
intermediate_name = cache_name
# If the cache name can't be determined after the max number of passes,
# the intermediate files on disk may be corrupt; avoid an infinite loop.
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
class ManifestFilesMixin(HashedFilesMixin):
manifest_version = "1.1" # the manifest format standard
manifest_name = "staticfiles.json"
manifest_strict = True
keep_intermediate_files = False
def __init__(self, *args, manifest_storage=None, **kwargs):
super().__init__(*args, **kwargs)
if manifest_storage is None:
manifest_storage = self
self.manifest_storage = manifest_storage
self.hashed_files, self.manifest_hash = self.load_manifest()
def read_manifest(self):
try:
with self.manifest_storage.open(self.manifest_name) as manifest:
return manifest.read().decode()
except FileNotFoundError:
return None
def load_manifest(self):
content = self.read_manifest()
if content is None:
return {}, ""
try:
stored = json.loads(content)
except json.JSONDecodeError:
pass
else:
version = stored.get("version")
if version in ("1.0", "1.1"):
return stored.get("paths", {}), stored.get("hash", "")
raise ValueError(
"Couldn't load manifest '%s' (version %s)"
% (self.manifest_name, self.manifest_version)
)
def post_process(self, *args, **kwargs):
self.hashed_files = {}
yield from super().post_process(*args, **kwargs)
if not kwargs.get("dry_run"):
self.save_manifest()
def save_manifest(self):
self.manifest_hash = self.file_hash(
None, ContentFile(json.dumps(sorted(self.hashed_files.items())).encode())
)
payload = {
"paths": self.hashed_files,
"version": self.manifest_version,
"hash": self.manifest_hash,
}
if self.manifest_storage.exists(self.manifest_name):
self.manifest_storage.delete(self.manifest_name)
contents = json.dumps(payload).encode()
self.manifest_storage._save(self.manifest_name, ContentFile(contents))
def stored_name(self, name):
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
hash_key = self.hash_key(clean_name)
cache_name = self.hashed_files.get(hash_key)
if cache_name is None:
if self.manifest_strict:
raise ValueError(
"Missing staticfiles manifest entry for '%s'" % clean_name
)
cache_name = self.clean_name(self.hashed_name(name))
unparsed_name = list(parsed_name)
unparsed_name[2] = cache_name
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if "?#" in name and not unparsed_name[3]:
unparsed_name[2] += "?"
return urlunsplit(unparsed_name)
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
"""
A static file system storage backend which also saves
hashed copies of the files it saves.
"""
pass
class ConfiguredStorage(LazyObject):
def _setup(self):
self._wrapped = storages[STATICFILES_STORAGE_ALIAS]
staticfiles_storage = ConfiguredStorage()
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/storage.py
|
Python
|
mit
| 20,892 |
from django.contrib.staticfiles.handlers import StaticFilesHandler
from django.test import LiveServerTestCase
class StaticLiveServerTestCase(LiveServerTestCase):
"""
Extend django.test.LiveServerTestCase to transparently overlay at test
execution-time the assets provided by the staticfiles app finders. This
means you don't need to run collectstatic before or as a part of your tests
setup.
"""
static_handler = StaticFilesHandler
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/testing.py
|
Python
|
mit
| 463 |
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.views import serve
urlpatterns = []
def staticfiles_urlpatterns(prefix=None):
"""
Helper function to return a URL pattern for serving static files.
"""
if prefix is None:
prefix = settings.STATIC_URL
return static(prefix, view=serve)
# Only append if urlpatterns are empty
if settings.DEBUG and not urlpatterns:
urlpatterns += staticfiles_urlpatterns()
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/urls.py
|
Python
|
mit
| 498 |
import fnmatch
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def matches_patterns(path, patterns):
"""
Return True or False depending on whether the ``path`` should be
ignored (if it matches any pattern in ``ignore_patterns``).
"""
return any(fnmatch.fnmatchcase(path, pattern) for pattern in patterns)
def get_files(storage, ignore_patterns=None, location=""):
"""
Recursively walk the storage directories yielding the paths
of all files that should be copied.
"""
if ignore_patterns is None:
ignore_patterns = []
directories, files = storage.listdir(location)
for fn in files:
# Match only the basename.
if matches_patterns(fn, ignore_patterns):
continue
if location:
fn = os.path.join(location, fn)
# Match the full file path.
if matches_patterns(fn, ignore_patterns):
continue
yield fn
for dir in directories:
if matches_patterns(dir, ignore_patterns):
continue
if location:
dir = os.path.join(location, dir)
yield from get_files(storage, ignore_patterns, dir)
def check_settings(base_url=None):
"""
Check if the staticfiles settings have sane values.
"""
if base_url is None:
base_url = settings.STATIC_URL
if not base_url:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the required STATIC_URL setting."
)
if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured(
"The MEDIA_URL and STATIC_URL settings must have different values"
)
if (
settings.DEBUG
and settings.MEDIA_URL
and settings.STATIC_URL
and settings.MEDIA_URL.startswith(settings.STATIC_URL)
):
raise ImproperlyConfigured(
"runserver can't serve media if MEDIA_URL is within STATIC_URL."
)
if (settings.MEDIA_ROOT and settings.STATIC_ROOT) and (
settings.MEDIA_ROOT == settings.STATIC_ROOT
):
raise ImproperlyConfigured(
"The MEDIA_ROOT and STATIC_ROOT settings must have different values"
)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/utils.py
|
Python
|
mit
| 2,279 |
"""
Views and functions for serving static files. These are only to be used during
development, and SHOULD NOT be used in a production setting.
"""
import os
import posixpath
from django.conf import settings
from django.contrib.staticfiles import finders
from django.http import Http404
from django.views import static
def serve(request, path, insecure=False, **kwargs):
"""
Serve static files below a given point in the directory structure or
from locations inferred from the staticfiles finders.
To use, put a URL pattern such as::
from django.contrib.staticfiles import views
path('<path:path>', views.serve)
in your URLconf.
It uses the django.views.static.serve() view to serve the found files.
"""
if not settings.DEBUG and not insecure:
raise Http404
normalized_path = posixpath.normpath(path).lstrip("/")
absolute_path = finders.find(normalized_path)
if not absolute_path:
if path.endswith("/") or path == "":
raise Http404("Directory indexes are not allowed here.")
raise Http404("'%s' could not be found" % path)
document_root, path = os.path.split(absolute_path)
return static.serve(request, path, document_root=document_root, **kwargs)
|
castiel248/Convert
|
Lib/site-packages/django/contrib/staticfiles/views.py
|
Python
|
mit
| 1,261 |
castiel248/Convert
|
Lib/site-packages/django/contrib/syndication/__init__.py
|
Python
|
mit
| 0 |
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class SyndicationConfig(AppConfig):
name = "django.contrib.syndication"
verbose_name = _("Syndication")
|
castiel248/Convert
|
Lib/site-packages/django/contrib/syndication/apps.py
|
Python
|
mit
| 203 |
from inspect import getattr_static, unwrap
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.template import TemplateDoesNotExist, loader
from django.utils import feedgenerator
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import http_date
from django.utils.timezone import get_default_timezone, is_naive, make_aware
from django.utils.translation import get_language
def add_domain(domain, url, secure=False):
protocol = "https" if secure else "http"
if url.startswith("//"):
# Support network-path reference (see #16753) - RSS requires a protocol
url = "%s:%s" % (protocol, url)
elif not url.startswith(("http://", "https://", "mailto:")):
url = iri_to_uri("%s://%s%s" % (protocol, domain, url))
return url
class FeedDoesNotExist(ObjectDoesNotExist):
pass
class Feed:
feed_type = feedgenerator.DefaultFeed
title_template = None
description_template = None
language = None
def __call__(self, request, *args, **kwargs):
try:
obj = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
raise Http404("Feed object does not exist.")
feedgen = self.get_feed(obj, request)
response = HttpResponse(content_type=feedgen.content_type)
if hasattr(self, "item_pubdate") or hasattr(self, "item_updateddate"):
# if item_pubdate or item_updateddate is defined for the feed, set
# header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED
response.headers["Last-Modified"] = http_date(
feedgen.latest_post_date().timestamp()
)
feedgen.write(response, "utf-8")
return response
def item_title(self, item):
# Titles should be double escaped by default (see #6533)
return escape(str(item))
def item_description(self, item):
return str(item)
def item_link(self, item):
try:
return item.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured(
"Give your %s class a get_absolute_url() method, or define an "
"item_link() method in your Feed class." % item.__class__.__name__
)
def item_enclosures(self, item):
enc_url = self._get_dynamic_attr("item_enclosure_url", item)
if enc_url:
enc = feedgenerator.Enclosure(
url=str(enc_url),
length=str(self._get_dynamic_attr("item_enclosure_length", item)),
mime_type=str(self._get_dynamic_attr("item_enclosure_mime_type", item)),
)
return [enc]
return []
def _get_dynamic_attr(self, attname, obj, default=None):
try:
attr = getattr(self, attname)
except AttributeError:
return default
if callable(attr):
# Check co_argcount rather than try/excepting the function and
# catching the TypeError, because something inside the function
# may raise the TypeError. This technique is more accurate.
func = unwrap(attr)
try:
code = func.__code__
except AttributeError:
func = unwrap(attr.__call__)
code = func.__code__
# If function doesn't have arguments and it is not a static method,
# it was decorated without using @functools.wraps.
if not code.co_argcount and not isinstance(
getattr_static(self, func.__name__, None), staticmethod
):
raise ImproperlyConfigured(
f"Feed method {attname!r} decorated by {func.__name__!r} needs to "
f"use @functools.wraps."
)
if code.co_argcount == 2: # one argument is 'self'
return attr(obj)
else:
return attr()
return attr
def feed_extra_kwargs(self, obj):
"""
Return an extra keyword arguments dictionary that is used when
initializing the feed generator.
"""
return {}
def item_extra_kwargs(self, item):
"""
Return an extra keyword arguments dictionary that is used with
the `add_item` call of the feed generator.
"""
return {}
def get_object(self, request, *args, **kwargs):
return None
def get_context_data(self, **kwargs):
"""
Return a dictionary to use as extra context if either
``self.description_template`` or ``self.item_template`` are used.
Default implementation preserves the old behavior
of using {'obj': item, 'site': current_site} as the context.
"""
return {"obj": kwargs.get("item"), "site": kwargs.get("site")}
def get_feed(self, obj, request):
"""
Return a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raise FeedDoesNotExist for invalid parameters.
"""
current_site = get_current_site(request)
link = self._get_dynamic_attr("link", obj)
link = add_domain(current_site.domain, link, request.is_secure())
feed = self.feed_type(
title=self._get_dynamic_attr("title", obj),
subtitle=self._get_dynamic_attr("subtitle", obj),
link=link,
description=self._get_dynamic_attr("description", obj),
language=self.language or get_language(),
feed_url=add_domain(
current_site.domain,
self._get_dynamic_attr("feed_url", obj) or request.path,
request.is_secure(),
),
author_name=self._get_dynamic_attr("author_name", obj),
author_link=self._get_dynamic_attr("author_link", obj),
author_email=self._get_dynamic_attr("author_email", obj),
categories=self._get_dynamic_attr("categories", obj),
feed_copyright=self._get_dynamic_attr("feed_copyright", obj),
feed_guid=self._get_dynamic_attr("feed_guid", obj),
ttl=self._get_dynamic_attr("ttl", obj),
**self.feed_extra_kwargs(obj),
)
title_tmp = None
if self.title_template is not None:
try:
title_tmp = loader.get_template(self.title_template)
except TemplateDoesNotExist:
pass
description_tmp = None
if self.description_template is not None:
try:
description_tmp = loader.get_template(self.description_template)
except TemplateDoesNotExist:
pass
for item in self._get_dynamic_attr("items", obj):
context = self.get_context_data(
item=item, site=current_site, obj=obj, request=request
)
if title_tmp is not None:
title = title_tmp.render(context, request)
else:
title = self._get_dynamic_attr("item_title", item)
if description_tmp is not None:
description = description_tmp.render(context, request)
else:
description = self._get_dynamic_attr("item_description", item)
link = add_domain(
current_site.domain,
self._get_dynamic_attr("item_link", item),
request.is_secure(),
)
enclosures = self._get_dynamic_attr("item_enclosures", item)
author_name = self._get_dynamic_attr("item_author_name", item)
if author_name is not None:
author_email = self._get_dynamic_attr("item_author_email", item)
author_link = self._get_dynamic_attr("item_author_link", item)
else:
author_email = author_link = None
tz = get_default_timezone()
pubdate = self._get_dynamic_attr("item_pubdate", item)
if pubdate and is_naive(pubdate):
pubdate = make_aware(pubdate, tz)
updateddate = self._get_dynamic_attr("item_updateddate", item)
if updateddate and is_naive(updateddate):
updateddate = make_aware(updateddate, tz)
feed.add_item(
title=title,
link=link,
description=description,
unique_id=self._get_dynamic_attr("item_guid", item, link),
unique_id_is_permalink=self._get_dynamic_attr(
"item_guid_is_permalink", item
),
enclosures=enclosures,
pubdate=pubdate,
updateddate=updateddate,
author_name=author_name,
author_email=author_email,
author_link=author_link,
comments=self._get_dynamic_attr("item_comments", item),
categories=self._get_dynamic_attr("item_categories", item),
item_copyright=self._get_dynamic_attr("item_copyright", item),
**self.item_extra_kwargs(item),
)
return feed
|
castiel248/Convert
|
Lib/site-packages/django/contrib/syndication/views.py
|
Python
|
mit
| 9,309 |
castiel248/Convert
|
Lib/site-packages/django/core/__init__.py
|
Python
|
mit
| 0 |
|
import django
from django.core.handlers.asgi import ASGIHandler
def get_asgi_application():
"""
The public interface to Django's ASGI support. Return an ASGI 3 callable.
Avoids making django.core.handlers.ASGIHandler a public API, in case the
internal implementation changes or moves in the future.
"""
django.setup(set_prefix=False)
return ASGIHandler()
|
castiel248/Convert
|
Lib/site-packages/django/core/asgi.py
|
Python
|
mit
| 386 |
"""
Caching framework.
This package defines set of cache backends that all conform to a simple API.
In a nutshell, a cache is a set of values -- which can be any object that
may be pickled -- identified by string keys. For the complete API, see
the abstract BaseCache class in django.core.cache.backends.base.
Client code should use the `cache` variable defined here to access the default
cache backend and look up non-default cache backends in the `caches` dict-like
object.
See docs/topics/cache.txt for information on the public API.
"""
from django.core import signals
from django.core.cache.backends.base import (
BaseCache,
CacheKeyWarning,
InvalidCacheBackendError,
InvalidCacheKey,
)
from django.utils.connection import BaseConnectionHandler, ConnectionProxy
from django.utils.module_loading import import_string
__all__ = [
"cache",
"caches",
"DEFAULT_CACHE_ALIAS",
"InvalidCacheBackendError",
"CacheKeyWarning",
"BaseCache",
"InvalidCacheKey",
]
DEFAULT_CACHE_ALIAS = "default"
class CacheHandler(BaseConnectionHandler):
settings_name = "CACHES"
exception_class = InvalidCacheBackendError
def create_connection(self, alias):
params = self.settings[alias].copy()
backend = params.pop("BACKEND")
location = params.pop("LOCATION", "")
try:
backend_cls = import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError(
"Could not find backend '%s': %s" % (backend, e)
) from e
return backend_cls(location, params)
caches = CacheHandler()
cache = ConnectionProxy(caches, DEFAULT_CACHE_ALIAS)
def close_caches(**kwargs):
# Some caches need to do a cleanup at the end of a request cycle. If not
# implemented in a particular backend cache.close() is a no-op.
caches.close_all()
signals.request_finished.connect(close_caches)
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/__init__.py
|
Python
|
mit
| 1,928 |
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/__init__.py
|
Python
|
mit
| 0 |
|
"Base Cache class."
import time
import warnings
from asgiref.sync import sync_to_async
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
class InvalidCacheBackendError(ImproperlyConfigured):
pass
class CacheKeyWarning(RuntimeWarning):
pass
class InvalidCacheKey(ValueError):
pass
# Stub class to ensure not passing in a `timeout` argument results in
# the default timeout
DEFAULT_TIMEOUT = object()
# Memcached does not accept keys longer than this.
MEMCACHE_MAX_KEY_LENGTH = 250
def default_key_func(key, key_prefix, version):
"""
Default function to generate keys.
Construct the key used by all other methods. By default, prepend
the `key_prefix`. KEY_FUNCTION can be used to specify an alternate
function with custom key making behavior.
"""
return "%s:%s:%s" % (key_prefix, version, key)
def get_key_func(key_func):
"""
Function to decide which key function to use.
Default to ``default_key_func``.
"""
if key_func is not None:
if callable(key_func):
return key_func
else:
return import_string(key_func)
return default_key_func
class BaseCache:
_missing_key = object()
def __init__(self, params):
timeout = params.get("timeout", params.get("TIMEOUT", 300))
if timeout is not None:
try:
timeout = int(timeout)
except (ValueError, TypeError):
timeout = 300
self.default_timeout = timeout
options = params.get("OPTIONS", {})
max_entries = params.get("max_entries", options.get("MAX_ENTRIES", 300))
try:
self._max_entries = int(max_entries)
except (ValueError, TypeError):
self._max_entries = 300
cull_frequency = params.get("cull_frequency", options.get("CULL_FREQUENCY", 3))
try:
self._cull_frequency = int(cull_frequency)
except (ValueError, TypeError):
self._cull_frequency = 3
self.key_prefix = params.get("KEY_PREFIX", "")
self.version = params.get("VERSION", 1)
self.key_func = get_key_func(params.get("KEY_FUNCTION"))
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
"""
Return the timeout value usable by this backend based upon the provided
timeout.
"""
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
elif timeout == 0:
# ticket 21147 - avoid time.time() related precision issues
timeout = -1
return None if timeout is None else time.time() + timeout
def make_key(self, key, version=None):
"""
Construct the key used by all other methods. By default, use the
key_func to generate a key (which, by default, prepends the
`key_prefix' and 'version'). A different key function can be provided
at the time of cache construction; alternatively, you can subclass the
cache backend to provide custom key making behavior.
"""
if version is None:
version = self.version
return self.key_func(key, self.key_prefix, version)
def validate_key(self, key):
"""
Warn about keys that would not be portable to the memcached
backend. This encourages (but does not force) writing backend-portable
cache code.
"""
for warning in memcache_key_warnings(key):
warnings.warn(warning, CacheKeyWarning)
def make_and_validate_key(self, key, version=None):
"""Helper to make and validate keys."""
key = self.make_key(key, version=version)
self.validate_key(key)
return key
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a value in the cache if the key does not already exist. If
timeout is given, use that timeout for the key; otherwise use the
default cache timeout.
Return True if the value was stored, False otherwise.
"""
raise NotImplementedError(
"subclasses of BaseCache must provide an add() method"
)
async def aadd(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
return await sync_to_async(self.add, thread_sensitive=True)(
key, value, timeout, version
)
def get(self, key, default=None, version=None):
"""
Fetch a given key from the cache. If the key does not exist, return
default, which itself defaults to None.
"""
raise NotImplementedError("subclasses of BaseCache must provide a get() method")
async def aget(self, key, default=None, version=None):
return await sync_to_async(self.get, thread_sensitive=True)(
key, default, version
)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a value in the cache. If timeout is given, use that timeout for the
key; otherwise use the default cache timeout.
"""
raise NotImplementedError("subclasses of BaseCache must provide a set() method")
async def aset(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
return await sync_to_async(self.set, thread_sensitive=True)(
key, value, timeout, version
)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
"""
Update the key's expiry time using timeout. Return True if successful
or False if the key does not exist.
"""
raise NotImplementedError(
"subclasses of BaseCache must provide a touch() method"
)
async def atouch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
return await sync_to_async(self.touch, thread_sensitive=True)(
key, timeout, version
)
def delete(self, key, version=None):
"""
Delete a key from the cache and return whether it succeeded, failing
silently.
"""
raise NotImplementedError(
"subclasses of BaseCache must provide a delete() method"
)
async def adelete(self, key, version=None):
return await sync_to_async(self.delete, thread_sensitive=True)(key, version)
def get_many(self, keys, version=None):
"""
Fetch a bunch of keys from the cache. For certain backends (memcached,
pgsql) this can be *much* faster when fetching multiple values.
Return a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
"""
d = {}
for k in keys:
val = self.get(k, self._missing_key, version=version)
if val is not self._missing_key:
d[k] = val
return d
async def aget_many(self, keys, version=None):
"""See get_many()."""
d = {}
for k in keys:
val = await self.aget(k, self._missing_key, version=version)
if val is not self._missing_key:
d[k] = val
return d
def get_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None):
"""
Fetch a given key from the cache. If the key does not exist,
add the key and set it to the default value. The default value can
also be any callable. If timeout is given, use that timeout for the
key; otherwise use the default cache timeout.
Return the value of the key stored or retrieved.
"""
val = self.get(key, self._missing_key, version=version)
if val is self._missing_key:
if callable(default):
default = default()
self.add(key, default, timeout=timeout, version=version)
# Fetch the value again to avoid a race condition if another caller
# added a value between the first get() and the add() above.
return self.get(key, default, version=version)
return val
async def aget_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None):
"""See get_or_set()."""
val = await self.aget(key, self._missing_key, version=version)
if val is self._missing_key:
if callable(default):
default = default()
await self.aadd(key, default, timeout=timeout, version=version)
# Fetch the value again to avoid a race condition if another caller
# added a value between the first aget() and the aadd() above.
return await self.aget(key, default, version=version)
return val
def has_key(self, key, version=None):
"""
Return True if the key is in the cache and has not expired.
"""
return (
self.get(key, self._missing_key, version=version) is not self._missing_key
)
async def ahas_key(self, key, version=None):
return (
await self.aget(key, self._missing_key, version=version)
is not self._missing_key
)
def incr(self, key, delta=1, version=None):
"""
Add delta to value in the cache. If the key does not exist, raise a
ValueError exception.
"""
value = self.get(key, self._missing_key, version=version)
if value is self._missing_key:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
self.set(key, new_value, version=version)
return new_value
async def aincr(self, key, delta=1, version=None):
"""See incr()."""
value = await self.aget(key, self._missing_key, version=version)
if value is self._missing_key:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
await self.aset(key, new_value, version=version)
return new_value
def decr(self, key, delta=1, version=None):
"""
Subtract delta from value in the cache. If the key does not exist, raise
a ValueError exception.
"""
return self.incr(key, -delta, version=version)
async def adecr(self, key, delta=1, version=None):
return await self.aincr(key, -delta, version=version)
def __contains__(self, key):
"""
Return True if the key is in the cache and has not expired.
"""
# This is a separate method, rather than just a copy of has_key(),
# so that it always has the same functionality as has_key(), even
# if a subclass overrides it.
return self.has_key(key)
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
"""
Set a bunch of values in the cache at once from a dict of key/value
pairs. For certain backends (memcached), this is much more efficient
than calling set() multiple times.
If timeout is given, use that timeout for the key; otherwise use the
default cache timeout.
On backends that support it, return a list of keys that failed
insertion, or an empty list if all keys were inserted successfully.
"""
for key, value in data.items():
self.set(key, value, timeout=timeout, version=version)
return []
async def aset_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
for key, value in data.items():
await self.aset(key, value, timeout=timeout, version=version)
return []
def delete_many(self, keys, version=None):
"""
Delete a bunch of values in the cache at once. For certain backends
(memcached), this is much more efficient than calling delete() multiple
times.
"""
for key in keys:
self.delete(key, version=version)
async def adelete_many(self, keys, version=None):
for key in keys:
await self.adelete(key, version=version)
def clear(self):
"""Remove *all* values from the cache at once."""
raise NotImplementedError(
"subclasses of BaseCache must provide a clear() method"
)
async def aclear(self):
return await sync_to_async(self.clear, thread_sensitive=True)()
def incr_version(self, key, delta=1, version=None):
"""
Add delta to the cache version for the supplied key. Return the new
version.
"""
if version is None:
version = self.version
value = self.get(key, self._missing_key, version=version)
if value is self._missing_key:
raise ValueError("Key '%s' not found" % key)
self.set(key, value, version=version + delta)
self.delete(key, version=version)
return version + delta
async def aincr_version(self, key, delta=1, version=None):
"""See incr_version()."""
if version is None:
version = self.version
value = await self.aget(key, self._missing_key, version=version)
if value is self._missing_key:
raise ValueError("Key '%s' not found" % key)
await self.aset(key, value, version=version + delta)
await self.adelete(key, version=version)
return version + delta
def decr_version(self, key, delta=1, version=None):
"""
Subtract delta from the cache version for the supplied key. Return the
new version.
"""
return self.incr_version(key, -delta, version)
async def adecr_version(self, key, delta=1, version=None):
return await self.aincr_version(key, -delta, version)
def close(self, **kwargs):
"""Close the cache connection"""
pass
async def aclose(self, **kwargs):
pass
def memcache_key_warnings(key):
if len(key) > MEMCACHE_MAX_KEY_LENGTH:
yield (
"Cache key will cause errors if used with memcached: %r "
"(longer than %s)" % (key, MEMCACHE_MAX_KEY_LENGTH)
)
for char in key:
if ord(char) < 33 or ord(char) == 127:
yield (
"Cache key contains characters that will cause errors if "
"used with memcached: %r" % key
)
break
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/base.py
|
Python
|
mit
| 14,227 |
"Database cache backend."
import base64
import pickle
from datetime import datetime, timezone
from django.conf import settings
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.db import DatabaseError, connections, models, router, transaction
from django.utils.timezone import now as tz_now
class Options:
"""A class that will quack like a Django model _meta class.
This allows cache operations to be controlled by the router
"""
def __init__(self, table):
self.db_table = table
self.app_label = "django_cache"
self.model_name = "cacheentry"
self.verbose_name = "cache entry"
self.verbose_name_plural = "cache entries"
self.object_name = "CacheEntry"
self.abstract = False
self.managed = True
self.proxy = False
self.swapped = False
class BaseDatabaseCache(BaseCache):
def __init__(self, table, params):
super().__init__(params)
self._table = table
class CacheEntry:
_meta = Options(table)
self.cache_model_class = CacheEntry
class DatabaseCache(BaseDatabaseCache):
# This class uses cursors provided by the database connection. This means
# it reads expiration values as aware or naive datetimes, depending on the
# value of USE_TZ and whether the database supports time zones. The ORM's
# conversion and adaptation infrastructure is then used to avoid comparing
# aware and naive datetimes accidentally.
pickle_protocol = pickle.HIGHEST_PROTOCOL
def get(self, key, default=None, version=None):
return self.get_many([key], version).get(key, default)
def get_many(self, keys, version=None):
if not keys:
return {}
key_map = {
self.make_and_validate_key(key, version=version): key for key in keys
}
db = router.db_for_read(self.cache_model_class)
connection = connections[db]
quote_name = connection.ops.quote_name
table = quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute(
"SELECT %s, %s, %s FROM %s WHERE %s IN (%s)"
% (
quote_name("cache_key"),
quote_name("value"),
quote_name("expires"),
table,
quote_name("cache_key"),
", ".join(["%s"] * len(key_map)),
),
list(key_map),
)
rows = cursor.fetchall()
result = {}
expired_keys = []
expression = models.Expression(output_field=models.DateTimeField())
converters = connection.ops.get_db_converters(
expression
) + expression.get_db_converters(connection)
for key, value, expires in rows:
for converter in converters:
expires = converter(expires, expression, connection)
if expires < tz_now():
expired_keys.append(key)
else:
value = connection.ops.process_clob(value)
value = pickle.loads(base64.b64decode(value.encode()))
result[key_map.get(key)] = value
self._base_delete_many(expired_keys)
return result
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
self._base_set("set", key, value, timeout)
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._base_set("add", key, value, timeout)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._base_set("touch", key, None, timeout)
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
timeout = self.get_backend_timeout(timeout)
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
quote_name = connection.ops.quote_name
table = quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0]
now = tz_now()
now = now.replace(microsecond=0)
if timeout is None:
exp = datetime.max
else:
tz = timezone.utc if settings.USE_TZ else None
exp = datetime.fromtimestamp(timeout, tz=tz)
exp = exp.replace(microsecond=0)
if num > self._max_entries:
self._cull(db, cursor, now, num)
pickled = pickle.dumps(value, self.pickle_protocol)
# The DB column is expecting a string, so make sure the value is a
# string, not bytes. Refs #19274.
b64encoded = base64.b64encode(pickled).decode("latin1")
try:
# Note: typecasting for datetimes is needed by some 3rd party
# database backends. All core backends work without typecasting,
# so be careful about changes here - test suite will NOT pick
# regressions.
with transaction.atomic(using=db):
cursor.execute(
"SELECT %s, %s FROM %s WHERE %s = %%s"
% (
quote_name("cache_key"),
quote_name("expires"),
table,
quote_name("cache_key"),
),
[key],
)
result = cursor.fetchone()
if result:
current_expires = result[1]
expression = models.Expression(
output_field=models.DateTimeField()
)
for converter in connection.ops.get_db_converters(
expression
) + expression.get_db_converters(connection):
current_expires = converter(
current_expires, expression, connection
)
exp = connection.ops.adapt_datetimefield_value(exp)
if result and mode == "touch":
cursor.execute(
"UPDATE %s SET %s = %%s WHERE %s = %%s"
% (table, quote_name("expires"), quote_name("cache_key")),
[exp, key],
)
elif result and (
mode == "set" or (mode == "add" and current_expires < now)
):
cursor.execute(
"UPDATE %s SET %s = %%s, %s = %%s WHERE %s = %%s"
% (
table,
quote_name("value"),
quote_name("expires"),
quote_name("cache_key"),
),
[b64encoded, exp, key],
)
elif mode != "touch":
cursor.execute(
"INSERT INTO %s (%s, %s, %s) VALUES (%%s, %%s, %%s)"
% (
table,
quote_name("cache_key"),
quote_name("value"),
quote_name("expires"),
),
[key, b64encoded, exp],
)
else:
return False # touch failed.
except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently
return False
else:
return True
def delete(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
return self._base_delete_many([key])
def delete_many(self, keys, version=None):
keys = [self.make_and_validate_key(key, version=version) for key in keys]
self._base_delete_many(keys)
def _base_delete_many(self, keys):
if not keys:
return False
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
quote_name = connection.ops.quote_name
table = quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute(
"DELETE FROM %s WHERE %s IN (%s)"
% (
table,
quote_name("cache_key"),
", ".join(["%s"] * len(keys)),
),
keys,
)
return bool(cursor.rowcount)
def has_key(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
db = router.db_for_read(self.cache_model_class)
connection = connections[db]
quote_name = connection.ops.quote_name
now = tz_now().replace(microsecond=0, tzinfo=None)
with connection.cursor() as cursor:
cursor.execute(
"SELECT %s FROM %s WHERE %s = %%s and %s > %%s"
% (
quote_name("cache_key"),
quote_name(self._table),
quote_name("cache_key"),
quote_name("expires"),
),
[key, connection.ops.adapt_datetimefield_value(now)],
)
return cursor.fetchone() is not None
def _cull(self, db, cursor, now, num):
if self._cull_frequency == 0:
self.clear()
else:
connection = connections[db]
table = connection.ops.quote_name(self._table)
cursor.execute(
"DELETE FROM %s WHERE %s < %%s"
% (
table,
connection.ops.quote_name("expires"),
),
[connection.ops.adapt_datetimefield_value(now)],
)
deleted_count = cursor.rowcount
remaining_num = num - deleted_count
if remaining_num > self._max_entries:
cull_num = remaining_num // self._cull_frequency
cursor.execute(
connection.ops.cache_key_culling_sql() % table, [cull_num]
)
last_cache_key = cursor.fetchone()
if last_cache_key:
cursor.execute(
"DELETE FROM %s WHERE %s < %%s"
% (
table,
connection.ops.quote_name("cache_key"),
),
[last_cache_key[0]],
)
def clear(self):
db = router.db_for_write(self.cache_model_class)
connection = connections[db]
table = connection.ops.quote_name(self._table)
with connection.cursor() as cursor:
cursor.execute("DELETE FROM %s" % table)
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/db.py
|
Python
|
mit
| 11,372 |
"Dummy cache backend"
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
class DummyCache(BaseCache):
def __init__(self, host, *args, **kwargs):
super().__init__(*args, **kwargs)
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
self.make_and_validate_key(key, version=version)
return True
def get(self, key, default=None, version=None):
self.make_and_validate_key(key, version=version)
return default
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
self.make_and_validate_key(key, version=version)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
self.make_and_validate_key(key, version=version)
return False
def delete(self, key, version=None):
self.make_and_validate_key(key, version=version)
return False
def has_key(self, key, version=None):
self.make_and_validate_key(key, version=version)
return False
def clear(self):
pass
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/dummy.py
|
Python
|
mit
| 1,043 |
"File-based cache backend"
import glob
import os
import pickle
import random
import tempfile
import time
import zlib
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.core.files import locks
from django.core.files.move import file_move_safe
from django.utils.crypto import md5
class FileBasedCache(BaseCache):
cache_suffix = ".djcache"
pickle_protocol = pickle.HIGHEST_PROTOCOL
def __init__(self, dir, params):
super().__init__(params)
self._dir = os.path.abspath(dir)
self._createdir()
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
if self.has_key(key, version):
return False
self.set(key, value, timeout, version)
return True
def get(self, key, default=None, version=None):
fname = self._key_to_file(key, version)
try:
with open(fname, "rb") as f:
if not self._is_expired(f):
return pickle.loads(zlib.decompress(f.read()))
except FileNotFoundError:
pass
return default
def _write_content(self, file, timeout, value):
expiry = self.get_backend_timeout(timeout)
file.write(pickle.dumps(expiry, self.pickle_protocol))
file.write(zlib.compress(pickle.dumps(value, self.pickle_protocol)))
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
self._createdir() # Cache dir can be deleted at any time.
fname = self._key_to_file(key, version)
self._cull() # make some room if necessary
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
renamed = False
try:
with open(fd, "wb") as f:
self._write_content(f, timeout, value)
file_move_safe(tmp_path, fname, allow_overwrite=True)
renamed = True
finally:
if not renamed:
os.remove(tmp_path)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
try:
with open(self._key_to_file(key, version), "r+b") as f:
try:
locks.lock(f, locks.LOCK_EX)
if self._is_expired(f):
return False
else:
previous_value = pickle.loads(zlib.decompress(f.read()))
f.seek(0)
self._write_content(f, timeout, previous_value)
return True
finally:
locks.unlock(f)
except FileNotFoundError:
return False
def delete(self, key, version=None):
return self._delete(self._key_to_file(key, version))
def _delete(self, fname):
if not fname.startswith(self._dir) or not os.path.exists(fname):
return False
try:
os.remove(fname)
except FileNotFoundError:
# The file may have been removed by another process.
return False
return True
def has_key(self, key, version=None):
fname = self._key_to_file(key, version)
try:
with open(fname, "rb") as f:
return not self._is_expired(f)
except FileNotFoundError:
return False
def _cull(self):
"""
Remove random cache entries if max_entries is reached at a ratio
of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
that the entire cache will be purged.
"""
filelist = self._list_cache_files()
num_entries = len(filelist)
if num_entries < self._max_entries:
return # return early if no culling is required
if self._cull_frequency == 0:
return self.clear() # Clear the cache when CULL_FREQUENCY = 0
# Delete a random selection of entries
filelist = random.sample(filelist, int(num_entries / self._cull_frequency))
for fname in filelist:
self._delete(fname)
def _createdir(self):
# Set the umask because os.makedirs() doesn't apply the "mode" argument
# to intermediate-level directories.
old_umask = os.umask(0o077)
try:
os.makedirs(self._dir, 0o700, exist_ok=True)
finally:
os.umask(old_umask)
def _key_to_file(self, key, version=None):
"""
Convert a key into a cache file path. Basically this is the
root cache path joined with the md5sum of the key and a suffix.
"""
key = self.make_and_validate_key(key, version=version)
return os.path.join(
self._dir,
"".join(
[
md5(key.encode(), usedforsecurity=False).hexdigest(),
self.cache_suffix,
]
),
)
def clear(self):
"""
Remove all the cache files.
"""
for fname in self._list_cache_files():
self._delete(fname)
def _is_expired(self, f):
"""
Take an open cache file `f` and delete it if it's expired.
"""
try:
exp = pickle.load(f)
except EOFError:
exp = 0 # An empty file is considered expired.
if exp is not None and exp < time.time():
f.close() # On Windows a file has to be closed before deleting
self._delete(f.name)
return True
return False
def _list_cache_files(self):
"""
Get a list of paths to all the cache files. These are all the files
in the root cache dir that end on the cache_suffix.
"""
return [
os.path.join(self._dir, fname)
for fname in glob.glob1(self._dir, "*%s" % self.cache_suffix)
]
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/filebased.py
|
Python
|
mit
| 5,800 |
"Thread-safe in-memory cache backend."
import pickle
import time
from collections import OrderedDict
from threading import Lock
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
# Global in-memory store of cache data. Keyed by name, to provide
# multiple named local memory caches.
_caches = {}
_expire_info = {}
_locks = {}
class LocMemCache(BaseCache):
pickle_protocol = pickle.HIGHEST_PROTOCOL
def __init__(self, name, params):
super().__init__(params)
self._cache = _caches.setdefault(name, OrderedDict())
self._expire_info = _expire_info.setdefault(name, {})
self._lock = _locks.setdefault(name, Lock())
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
pickled = pickle.dumps(value, self.pickle_protocol)
with self._lock:
if self._has_expired(key):
self._set(key, pickled, timeout)
return True
return False
def get(self, key, default=None, version=None):
key = self.make_and_validate_key(key, version=version)
with self._lock:
if self._has_expired(key):
self._delete(key)
return default
pickled = self._cache[key]
self._cache.move_to_end(key, last=False)
return pickle.loads(pickled)
def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
if len(self._cache) >= self._max_entries:
self._cull()
self._cache[key] = value
self._cache.move_to_end(key, last=False)
self._expire_info[key] = self.get_backend_timeout(timeout)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
pickled = pickle.dumps(value, self.pickle_protocol)
with self._lock:
self._set(key, pickled, timeout)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
with self._lock:
if self._has_expired(key):
return False
self._expire_info[key] = self.get_backend_timeout(timeout)
return True
def incr(self, key, delta=1, version=None):
key = self.make_and_validate_key(key, version=version)
with self._lock:
if self._has_expired(key):
self._delete(key)
raise ValueError("Key '%s' not found" % key)
pickled = self._cache[key]
value = pickle.loads(pickled)
new_value = value + delta
pickled = pickle.dumps(new_value, self.pickle_protocol)
self._cache[key] = pickled
self._cache.move_to_end(key, last=False)
return new_value
def has_key(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
with self._lock:
if self._has_expired(key):
self._delete(key)
return False
return True
def _has_expired(self, key):
exp = self._expire_info.get(key, -1)
return exp is not None and exp <= time.time()
def _cull(self):
if self._cull_frequency == 0:
self._cache.clear()
self._expire_info.clear()
else:
count = len(self._cache) // self._cull_frequency
for i in range(count):
key, _ = self._cache.popitem()
del self._expire_info[key]
def _delete(self, key):
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
return False
return True
def delete(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
with self._lock:
return self._delete(key)
def clear(self):
with self._lock:
self._cache.clear()
self._expire_info.clear()
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/locmem.py
|
Python
|
mit
| 4,035 |
"Memcached cache backend"
import re
import time
from django.core.cache.backends.base import (
DEFAULT_TIMEOUT,
BaseCache,
InvalidCacheKey,
memcache_key_warnings,
)
from django.utils.functional import cached_property
class BaseMemcachedCache(BaseCache):
def __init__(self, server, params, library, value_not_found_exception):
super().__init__(params)
if isinstance(server, str):
self._servers = re.split("[;,]", server)
else:
self._servers = server
# Exception type raised by the underlying client library for a
# nonexistent key.
self.LibraryValueNotFoundException = value_not_found_exception
self._lib = library
self._class = library.Client
self._options = params.get("OPTIONS") or {}
@property
def client_servers(self):
return self._servers
@cached_property
def _cache(self):
"""
Implement transparent thread-safe access to a memcached client.
"""
return self._class(self.client_servers, **self._options)
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
"""
Memcached deals with long (> 30 days) timeouts in a special
way. Call this function to obtain a safe value for your timeout.
"""
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
if timeout is None:
# Using 0 in memcache sets a non-expiring timeout.
return 0
elif int(timeout) == 0:
# Other cache backends treat 0 as set-and-expire. To achieve this
# in memcache backends, a negative timeout must be passed.
timeout = -1
if timeout > 2592000: # 60*60*24*30, 30 days
# See https://github.com/memcached/memcached/wiki/Programming#expiration
# "Expiration times can be set from 0, meaning "never expire", to
# 30 days. Any time higher than 30 days is interpreted as a Unix
# timestamp date. If you want to expire an object on January 1st of
# next year, this is how you do that."
#
# This means that we have to switch to absolute timestamps.
timeout += int(time.time())
return int(timeout)
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.add(key, value, self.get_backend_timeout(timeout))
def get(self, key, default=None, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.get(key, default)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
if not self._cache.set(key, value, self.get_backend_timeout(timeout)):
# Make sure the key doesn't keep its old value in case of failure
# to set (memcached's 1MB limit).
self._cache.delete(key)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return bool(self._cache.touch(key, self.get_backend_timeout(timeout)))
def delete(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
return bool(self._cache.delete(key))
def get_many(self, keys, version=None):
key_map = {
self.make_and_validate_key(key, version=version): key for key in keys
}
ret = self._cache.get_multi(key_map.keys())
return {key_map[k]: v for k, v in ret.items()}
def close(self, **kwargs):
# Many clients don't clean up connections properly.
self._cache.disconnect_all()
def incr(self, key, delta=1, version=None):
key = self.make_and_validate_key(key, version=version)
try:
# Memcached doesn't support negative delta.
if delta < 0:
val = self._cache.decr(key, -delta)
else:
val = self._cache.incr(key, delta)
# Normalize an exception raised by the underlying client library to
# ValueError in the event of a nonexistent key when calling
# incr()/decr().
except self.LibraryValueNotFoundException:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
safe_data = {}
original_keys = {}
for key, value in data.items():
safe_key = self.make_and_validate_key(key, version=version)
safe_data[safe_key] = value
original_keys[safe_key] = key
failed_keys = self._cache.set_multi(
safe_data, self.get_backend_timeout(timeout)
)
return [original_keys[k] for k in failed_keys]
def delete_many(self, keys, version=None):
keys = [self.make_and_validate_key(key, version=version) for key in keys]
self._cache.delete_multi(keys)
def clear(self):
self._cache.flush_all()
def validate_key(self, key):
for warning in memcache_key_warnings(key):
raise InvalidCacheKey(warning)
class PyLibMCCache(BaseMemcachedCache):
"An implementation of a cache binding using pylibmc"
def __init__(self, server, params):
import pylibmc
super().__init__(
server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound
)
@property
def client_servers(self):
output = []
for server in self._servers:
output.append(server[5:] if server.startswith("unix:") else server)
return output
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
if timeout == 0:
return self._cache.delete(key)
return self._cache.touch(key, self.get_backend_timeout(timeout))
def close(self, **kwargs):
# libmemcached manages its own connections. Don't call disconnect_all()
# as it resets the failover state and creates unnecessary reconnects.
pass
class PyMemcacheCache(BaseMemcachedCache):
"""An implementation of a cache binding using pymemcache."""
def __init__(self, server, params):
import pymemcache.serde
super().__init__(
server, params, library=pymemcache, value_not_found_exception=KeyError
)
self._class = self._lib.HashClient
self._options = {
"allow_unicode_keys": True,
"default_noreply": False,
"serde": pymemcache.serde.pickle_serde,
**self._options,
}
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/memcached.py
|
Python
|
mit
| 6,800 |
"""Redis cache backend."""
import pickle
import random
import re
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class RedisSerializer:
def __init__(self, protocol=None):
self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol
def dumps(self, obj):
# Only skip pickling for integers, a int subclasses as bool should be
# pickled.
if type(obj) is int:
return obj
return pickle.dumps(obj, self.protocol)
def loads(self, data):
try:
return int(data)
except ValueError:
return pickle.loads(data)
class RedisCacheClient:
def __init__(
self,
servers,
serializer=None,
pool_class=None,
parser_class=None,
**options,
):
import redis
self._lib = redis
self._servers = servers
self._pools = {}
self._client = self._lib.Redis
if isinstance(pool_class, str):
pool_class = import_string(pool_class)
self._pool_class = pool_class or self._lib.ConnectionPool
if isinstance(serializer, str):
serializer = import_string(serializer)
if callable(serializer):
serializer = serializer()
self._serializer = serializer or RedisSerializer()
if isinstance(parser_class, str):
parser_class = import_string(parser_class)
parser_class = parser_class or self._lib.connection.DefaultParser
self._pool_options = {"parser_class": parser_class, **options}
def _get_connection_pool_index(self, write):
# Write to the first server. Read from other servers if there are more,
# otherwise read from the first server.
if write or len(self._servers) == 1:
return 0
return random.randint(1, len(self._servers) - 1)
def _get_connection_pool(self, write):
index = self._get_connection_pool_index(write)
if index not in self._pools:
self._pools[index] = self._pool_class.from_url(
self._servers[index],
**self._pool_options,
)
return self._pools[index]
def get_client(self, key=None, *, write=False):
# key is used so that the method signature remains the same and custom
# cache client can be implemented which might require the key to select
# the server, e.g. sharding.
pool = self._get_connection_pool(write)
return self._client(connection_pool=pool)
def add(self, key, value, timeout):
client = self.get_client(key, write=True)
value = self._serializer.dumps(value)
if timeout == 0:
if ret := bool(client.set(key, value, nx=True)):
client.delete(key)
return ret
else:
return bool(client.set(key, value, ex=timeout, nx=True))
def get(self, key, default):
client = self.get_client(key)
value = client.get(key)
return default if value is None else self._serializer.loads(value)
def set(self, key, value, timeout):
client = self.get_client(key, write=True)
value = self._serializer.dumps(value)
if timeout == 0:
client.delete(key)
else:
client.set(key, value, ex=timeout)
def touch(self, key, timeout):
client = self.get_client(key, write=True)
if timeout is None:
return bool(client.persist(key))
else:
return bool(client.expire(key, timeout))
def delete(self, key):
client = self.get_client(key, write=True)
return bool(client.delete(key))
def get_many(self, keys):
client = self.get_client(None)
ret = client.mget(keys)
return {
k: self._serializer.loads(v) for k, v in zip(keys, ret) if v is not None
}
def has_key(self, key):
client = self.get_client(key)
return bool(client.exists(key))
def incr(self, key, delta):
client = self.get_client(key, write=True)
if not client.exists(key):
raise ValueError("Key '%s' not found." % key)
return client.incr(key, delta)
def set_many(self, data, timeout):
client = self.get_client(None, write=True)
pipeline = client.pipeline()
pipeline.mset({k: self._serializer.dumps(v) for k, v in data.items()})
if timeout is not None:
# Setting timeout for each key as redis does not support timeout
# with mset().
for key in data:
pipeline.expire(key, timeout)
pipeline.execute()
def delete_many(self, keys):
client = self.get_client(None, write=True)
client.delete(*keys)
def clear(self):
client = self.get_client(None, write=True)
return bool(client.flushdb())
class RedisCache(BaseCache):
def __init__(self, server, params):
super().__init__(params)
if isinstance(server, str):
self._servers = re.split("[;,]", server)
else:
self._servers = server
self._class = RedisCacheClient
self._options = params.get("OPTIONS", {})
@cached_property
def _cache(self):
return self._class(self._servers, **self._options)
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
# The key will be made persistent if None used as a timeout.
# Non-positive values will cause the key to be deleted.
return None if timeout is None else max(0, int(timeout))
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.add(key, value, self.get_backend_timeout(timeout))
def get(self, key, default=None, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.get(key, default)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
self._cache.set(key, value, self.get_backend_timeout(timeout))
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.touch(key, self.get_backend_timeout(timeout))
def delete(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.delete(key)
def get_many(self, keys, version=None):
key_map = {
self.make_and_validate_key(key, version=version): key for key in keys
}
ret = self._cache.get_many(key_map.keys())
return {key_map[k]: v for k, v in ret.items()}
def has_key(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.has_key(key)
def incr(self, key, delta=1, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.incr(key, delta)
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
if not data:
return []
safe_data = {}
for key, value in data.items():
key = self.make_and_validate_key(key, version=version)
safe_data[key] = value
self._cache.set_many(safe_data, self.get_backend_timeout(timeout))
return []
def delete_many(self, keys, version=None):
if not keys:
return
safe_keys = [self.make_and_validate_key(key, version=version) for key in keys]
self._cache.delete_many(safe_keys)
def clear(self):
return self._cache.clear()
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/backends/redis.py
|
Python
|
mit
| 7,924 |
from django.utils.crypto import md5
TEMPLATE_FRAGMENT_KEY_TEMPLATE = "template.cache.%s.%s"
def make_template_fragment_key(fragment_name, vary_on=None):
hasher = md5(usedforsecurity=False)
if vary_on is not None:
for arg in vary_on:
hasher.update(str(arg).encode())
hasher.update(b":")
return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, hasher.hexdigest())
|
castiel248/Convert
|
Lib/site-packages/django/core/cache/utils.py
|
Python
|
mit
| 409 |
from .messages import (
CRITICAL,
DEBUG,
ERROR,
INFO,
WARNING,
CheckMessage,
Critical,
Debug,
Error,
Info,
Warning,
)
from .registry import Tags, register, run_checks, tag_exists
# Import these to force registration of checks
import django.core.checks.async_checks # NOQA isort:skip
import django.core.checks.caches # NOQA isort:skip
import django.core.checks.compatibility.django_4_0 # NOQA isort:skip
import django.core.checks.database # NOQA isort:skip
import django.core.checks.files # NOQA isort:skip
import django.core.checks.model_checks # NOQA isort:skip
import django.core.checks.security.base # NOQA isort:skip
import django.core.checks.security.csrf # NOQA isort:skip
import django.core.checks.security.sessions # NOQA isort:skip
import django.core.checks.templates # NOQA isort:skip
import django.core.checks.translation # NOQA isort:skip
import django.core.checks.urls # NOQA isort:skip
__all__ = [
"CheckMessage",
"Debug",
"Info",
"Warning",
"Error",
"Critical",
"DEBUG",
"INFO",
"WARNING",
"ERROR",
"CRITICAL",
"register",
"run_checks",
"tag_exists",
"Tags",
]
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/__init__.py
|
Python
|
mit
| 1,195 |
import os
from . import Error, Tags, register
E001 = Error(
"You should not set the DJANGO_ALLOW_ASYNC_UNSAFE environment variable in "
"deployment. This disables async safety protection.",
id="async.E001",
)
@register(Tags.async_support, deploy=True)
def check_async_unsafe(app_configs, **kwargs):
if os.environ.get("DJANGO_ALLOW_ASYNC_UNSAFE"):
return [E001]
return []
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/async_checks.py
|
Python
|
mit
| 403 |
import pathlib
from django.conf import settings
from django.core.cache import DEFAULT_CACHE_ALIAS, caches
from django.core.cache.backends.filebased import FileBasedCache
from . import Error, Tags, Warning, register
E001 = Error(
"You must define a '%s' cache in your CACHES setting." % DEFAULT_CACHE_ALIAS,
id="caches.E001",
)
@register(Tags.caches)
def check_default_cache_is_configured(app_configs, **kwargs):
if DEFAULT_CACHE_ALIAS not in settings.CACHES:
return [E001]
return []
@register(Tags.caches, deploy=True)
def check_cache_location_not_exposed(app_configs, **kwargs):
errors = []
for name in ("MEDIA_ROOT", "STATIC_ROOT", "STATICFILES_DIRS"):
setting = getattr(settings, name, None)
if not setting:
continue
if name == "STATICFILES_DIRS":
paths = set()
for staticfiles_dir in setting:
if isinstance(staticfiles_dir, (list, tuple)):
_, staticfiles_dir = staticfiles_dir
paths.add(pathlib.Path(staticfiles_dir).resolve())
else:
paths = {pathlib.Path(setting).resolve()}
for alias in settings.CACHES:
cache = caches[alias]
if not isinstance(cache, FileBasedCache):
continue
cache_path = pathlib.Path(cache._dir).resolve()
if any(path == cache_path for path in paths):
relation = "matches"
elif any(path in cache_path.parents for path in paths):
relation = "is inside"
elif any(cache_path in path.parents for path in paths):
relation = "contains"
else:
continue
errors.append(
Warning(
f"Your '{alias}' cache configuration might expose your cache "
f"or lead to corruption of your data because its LOCATION "
f"{relation} {name}.",
id="caches.W002",
)
)
return errors
@register(Tags.caches)
def check_file_based_cache_is_absolute(app_configs, **kwargs):
errors = []
for alias, config in settings.CACHES.items():
cache = caches[alias]
if not isinstance(cache, FileBasedCache):
continue
if not pathlib.Path(config["LOCATION"]).is_absolute():
errors.append(
Warning(
f"Your '{alias}' cache LOCATION path is relative. Use an "
f"absolute path instead.",
id="caches.W003",
)
)
return errors
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/caches.py
|
Python
|
mit
| 2,643 |
castiel248/Convert
|
Lib/site-packages/django/core/checks/compatibility/__init__.py
|
Python
|
mit
| 0 |
|
from django.conf import settings
from .. import Error, Tags, register
@register(Tags.compatibility)
def check_csrf_trusted_origins(app_configs, **kwargs):
errors = []
for origin in settings.CSRF_TRUSTED_ORIGINS:
if "://" not in origin:
errors.append(
Error(
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
"setting must start with a scheme (usually http:// or "
"https://) but found %s. See the release notes for details."
% origin,
id="4_0.E001",
)
)
return errors
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/compatibility/django_4_0.py
|
Python
|
mit
| 657 |
from django.db import connections
from . import Tags, register
@register(Tags.database)
def check_database_backends(databases=None, **kwargs):
if databases is None:
return []
issues = []
for alias in databases:
conn = connections[alias]
issues.extend(conn.validation.check(**kwargs))
return issues
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/database.py
|
Python
|
mit
| 341 |
from pathlib import Path
from django.conf import settings
from . import Error, Tags, register
@register(Tags.files)
def check_setting_file_upload_temp_dir(app_configs, **kwargs):
setting = getattr(settings, "FILE_UPLOAD_TEMP_DIR", None)
if setting and not Path(setting).is_dir():
return [
Error(
f"The FILE_UPLOAD_TEMP_DIR setting refers to the nonexistent "
f"directory '{setting}'.",
id="files.E001",
),
]
return []
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/files.py
|
Python
|
mit
| 522 |
# Levels
DEBUG = 10
INFO = 20
WARNING = 30
ERROR = 40
CRITICAL = 50
class CheckMessage:
def __init__(self, level, msg, hint=None, obj=None, id=None):
if not isinstance(level, int):
raise TypeError("The first argument should be level.")
self.level = level
self.msg = msg
self.hint = hint
self.obj = obj
self.id = id
def __eq__(self, other):
return isinstance(other, self.__class__) and all(
getattr(self, attr) == getattr(other, attr)
for attr in ["level", "msg", "hint", "obj", "id"]
)
def __str__(self):
from django.db import models
if self.obj is None:
obj = "?"
elif isinstance(self.obj, models.base.ModelBase):
# We need to hardcode ModelBase and Field cases because its __str__
# method doesn't return "applabel.modellabel" and cannot be changed.
obj = self.obj._meta.label
else:
obj = str(self.obj)
id = "(%s) " % self.id if self.id else ""
hint = "\n\tHINT: %s" % self.hint if self.hint else ""
return "%s: %s%s%s" % (obj, id, self.msg, hint)
def __repr__(self):
return "<%s: level=%r, msg=%r, hint=%r, obj=%r, id=%r>" % (
self.__class__.__name__,
self.level,
self.msg,
self.hint,
self.obj,
self.id,
)
def is_serious(self, level=ERROR):
return self.level >= level
def is_silenced(self):
from django.conf import settings
return self.id in settings.SILENCED_SYSTEM_CHECKS
class Debug(CheckMessage):
def __init__(self, *args, **kwargs):
super().__init__(DEBUG, *args, **kwargs)
class Info(CheckMessage):
def __init__(self, *args, **kwargs):
super().__init__(INFO, *args, **kwargs)
class Warning(CheckMessage):
def __init__(self, *args, **kwargs):
super().__init__(WARNING, *args, **kwargs)
class Error(CheckMessage):
def __init__(self, *args, **kwargs):
super().__init__(ERROR, *args, **kwargs)
class Critical(CheckMessage):
def __init__(self, *args, **kwargs):
super().__init__(CRITICAL, *args, **kwargs)
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/messages.py
|
Python
|
mit
| 2,241 |
import inspect
import types
from collections import defaultdict
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.core.checks import Error, Tags, Warning, register
@register(Tags.models)
def check_all_models(app_configs=None, **kwargs):
db_table_models = defaultdict(list)
indexes = defaultdict(list)
constraints = defaultdict(list)
errors = []
if app_configs is None:
models = apps.get_models()
else:
models = chain.from_iterable(
app_config.get_models() for app_config in app_configs
)
for model in models:
if model._meta.managed and not model._meta.proxy:
db_table_models[model._meta.db_table].append(model._meta.label)
if not inspect.ismethod(model.check):
errors.append(
Error(
"The '%s.check()' class method is currently overridden by %r."
% (model.__name__, model.check),
obj=model,
id="models.E020",
)
)
else:
errors.extend(model.check(**kwargs))
for model_index in model._meta.indexes:
indexes[model_index.name].append(model._meta.label)
for model_constraint in model._meta.constraints:
constraints[model_constraint.name].append(model._meta.label)
if settings.DATABASE_ROUTERS:
error_class, error_id = Warning, "models.W035"
error_hint = (
"You have configured settings.DATABASE_ROUTERS. Verify that %s "
"are correctly routed to separate databases."
)
else:
error_class, error_id = Error, "models.E028"
error_hint = None
for db_table, model_labels in db_table_models.items():
if len(model_labels) != 1:
model_labels_str = ", ".join(model_labels)
errors.append(
error_class(
"db_table '%s' is used by multiple models: %s."
% (db_table, model_labels_str),
obj=db_table,
hint=(error_hint % model_labels_str) if error_hint else None,
id=error_id,
)
)
for index_name, model_labels in indexes.items():
if len(model_labels) > 1:
model_labels = set(model_labels)
errors.append(
Error(
"index name '%s' is not unique %s %s."
% (
index_name,
"for model" if len(model_labels) == 1 else "among models:",
", ".join(sorted(model_labels)),
),
id="models.E029" if len(model_labels) == 1 else "models.E030",
),
)
for constraint_name, model_labels in constraints.items():
if len(model_labels) > 1:
model_labels = set(model_labels)
errors.append(
Error(
"constraint name '%s' is not unique %s %s."
% (
constraint_name,
"for model" if len(model_labels) == 1 else "among models:",
", ".join(sorted(model_labels)),
),
id="models.E031" if len(model_labels) == 1 else "models.E032",
),
)
return errors
def _check_lazy_references(apps, ignore=None):
"""
Ensure all lazy (i.e. string) model references have been resolved.
Lazy references are used in various places throughout Django, primarily in
related fields and model signals. Identify those common cases and provide
more helpful error messages for them.
The ignore parameter is used by StateApps to exclude swappable models from
this check.
"""
pending_models = set(apps._pending_operations) - (ignore or set())
# Short circuit if there aren't any errors.
if not pending_models:
return []
from django.db.models import signals
model_signals = {
signal: name
for name, signal in vars(signals).items()
if isinstance(signal, signals.ModelSignal)
}
def extract_operation(obj):
"""
Take a callable found in Apps._pending_operations and identify the
original callable passed to Apps.lazy_model_operation(). If that
callable was a partial, return the inner, non-partial function and
any arguments and keyword arguments that were supplied with it.
obj is a callback defined locally in Apps.lazy_model_operation() and
annotated there with a `func` attribute so as to imitate a partial.
"""
operation, args, keywords = obj, [], {}
while hasattr(operation, "func"):
args.extend(getattr(operation, "args", []))
keywords.update(getattr(operation, "keywords", {}))
operation = operation.func
return operation, args, keywords
def app_model_error(model_key):
try:
apps.get_app_config(model_key[0])
model_error = "app '%s' doesn't provide model '%s'" % model_key
except LookupError:
model_error = "app '%s' isn't installed" % model_key[0]
return model_error
# Here are several functions which return CheckMessage instances for the
# most common usages of lazy operations throughout Django. These functions
# take the model that was being waited on as an (app_label, modelname)
# pair, the original lazy function, and its positional and keyword args as
# determined by extract_operation().
def field_error(model_key, func, args, keywords):
error_msg = (
"The field %(field)s was declared with a lazy reference "
"to '%(model)s', but %(model_error)s."
)
params = {
"model": ".".join(model_key),
"field": keywords["field"],
"model_error": app_model_error(model_key),
}
return Error(error_msg % params, obj=keywords["field"], id="fields.E307")
def signal_connect_error(model_key, func, args, keywords):
error_msg = (
"%(receiver)s was connected to the '%(signal)s' signal with a "
"lazy reference to the sender '%(model)s', but %(model_error)s."
)
receiver = args[0]
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The function '%s'" % receiver.__name__
elif isinstance(receiver, types.MethodType):
description = "Bound method '%s.%s'" % (
receiver.__self__.__class__.__name__,
receiver.__name__,
)
else:
description = "An instance of class '%s'" % receiver.__class__.__name__
signal_name = model_signals.get(func.__self__, "unknown")
params = {
"model": ".".join(model_key),
"receiver": description,
"signal": signal_name,
"model_error": app_model_error(model_key),
}
return Error(error_msg % params, obj=receiver.__module__, id="signals.E001")
def default_error(model_key, func, args, keywords):
error_msg = (
"%(op)s contains a lazy reference to %(model)s, but %(model_error)s."
)
params = {
"op": func,
"model": ".".join(model_key),
"model_error": app_model_error(model_key),
}
return Error(error_msg % params, obj=func, id="models.E022")
# Maps common uses of lazy operations to corresponding error functions
# defined above. If a key maps to None, no error will be produced.
# default_error() will be used for usages that don't appear in this dict.
known_lazy = {
("django.db.models.fields.related", "resolve_related_class"): field_error,
("django.db.models.fields.related", "set_managed"): None,
("django.dispatch.dispatcher", "connect"): signal_connect_error,
}
def build_error(model_key, func, args, keywords):
key = (func.__module__, func.__name__)
error_fn = known_lazy.get(key, default_error)
return error_fn(model_key, func, args, keywords) if error_fn else None
return sorted(
filter(
None,
(
build_error(model_key, *extract_operation(func))
for model_key in pending_models
for func in apps._pending_operations[model_key]
),
),
key=lambda error: error.msg,
)
@register(Tags.models)
def check_lazy_references(app_configs=None, **kwargs):
return _check_lazy_references(apps)
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/model_checks.py
|
Python
|
mit
| 8,830 |
from itertools import chain
from django.utils.inspect import func_accepts_kwargs
from django.utils.itercompat import is_iterable
class Tags:
"""
Built-in tags for internal checks.
"""
admin = "admin"
async_support = "async_support"
caches = "caches"
compatibility = "compatibility"
database = "database"
files = "files"
models = "models"
security = "security"
signals = "signals"
sites = "sites"
staticfiles = "staticfiles"
templates = "templates"
translation = "translation"
urls = "urls"
class CheckRegistry:
def __init__(self):
self.registered_checks = set()
self.deployment_checks = set()
def register(self, check=None, *tags, **kwargs):
"""
Can be used as a function or a decorator. Register given function
`f` labeled with given `tags`. The function should receive **kwargs
and return list of Errors and Warnings.
Example::
registry = CheckRegistry()
@registry.register('mytag', 'anothertag')
def my_check(app_configs, **kwargs):
# ... perform checks and collect `errors` ...
return errors
# or
registry.register(my_check, 'mytag', 'anothertag')
"""
def inner(check):
if not func_accepts_kwargs(check):
raise TypeError(
"Check functions must accept keyword arguments (**kwargs)."
)
check.tags = tags
checks = (
self.deployment_checks
if kwargs.get("deploy")
else self.registered_checks
)
checks.add(check)
return check
if callable(check):
return inner(check)
else:
if check:
tags += (check,)
return inner
def run_checks(
self,
app_configs=None,
tags=None,
include_deployment_checks=False,
databases=None,
):
"""
Run all registered checks and return list of Errors and Warnings.
"""
errors = []
checks = self.get_checks(include_deployment_checks)
if tags is not None:
checks = [check for check in checks if not set(check.tags).isdisjoint(tags)]
for check in checks:
new_errors = check(app_configs=app_configs, databases=databases)
if not is_iterable(new_errors):
raise TypeError(
"The function %r did not return a list. All functions "
"registered with the checks registry must return a list." % check,
)
errors.extend(new_errors)
return errors
def tag_exists(self, tag, include_deployment_checks=False):
return tag in self.tags_available(include_deployment_checks)
def tags_available(self, deployment_checks=False):
return set(
chain.from_iterable(
check.tags for check in self.get_checks(deployment_checks)
)
)
def get_checks(self, include_deployment_checks=False):
checks = list(self.registered_checks)
if include_deployment_checks:
checks.extend(self.deployment_checks)
return checks
registry = CheckRegistry()
register = registry.register
run_checks = registry.run_checks
tag_exists = registry.tag_exists
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/registry.py
|
Python
|
mit
| 3,458 |
castiel248/Convert
|
Lib/site-packages/django/core/checks/security/__init__.py
|
Python
|
mit
| 0 |
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from .. import Error, Tags, Warning, register
CROSS_ORIGIN_OPENER_POLICY_VALUES = {
"same-origin",
"same-origin-allow-popups",
"unsafe-none",
}
REFERRER_POLICY_VALUES = {
"no-referrer",
"no-referrer-when-downgrade",
"origin",
"origin-when-cross-origin",
"same-origin",
"strict-origin",
"strict-origin-when-cross-origin",
"unsafe-url",
}
SECRET_KEY_INSECURE_PREFIX = "django-insecure-"
SECRET_KEY_MIN_LENGTH = 50
SECRET_KEY_MIN_UNIQUE_CHARACTERS = 5
SECRET_KEY_WARNING_MSG = (
f"Your %s has less than {SECRET_KEY_MIN_LENGTH} characters, less than "
f"{SECRET_KEY_MIN_UNIQUE_CHARACTERS} unique characters, or it's prefixed "
f"with '{SECRET_KEY_INSECURE_PREFIX}' indicating that it was generated "
f"automatically by Django. Please generate a long and random value, "
f"otherwise many of Django's security-critical features will be "
f"vulnerable to attack."
)
W001 = Warning(
"You do not have 'django.middleware.security.SecurityMiddleware' "
"in your MIDDLEWARE so the SECURE_HSTS_SECONDS, "
"SECURE_CONTENT_TYPE_NOSNIFF, SECURE_REFERRER_POLICY, "
"SECURE_CROSS_ORIGIN_OPENER_POLICY, and SECURE_SSL_REDIRECT settings will "
"have no effect.",
id="security.W001",
)
W002 = Warning(
"You do not have "
"'django.middleware.clickjacking.XFrameOptionsMiddleware' in your "
"MIDDLEWARE, so your pages will not be served with an "
"'x-frame-options' header. Unless there is a good reason for your "
"site to be served in a frame, you should consider enabling this "
"header to help prevent clickjacking attacks.",
id="security.W002",
)
W004 = Warning(
"You have not set a value for the SECURE_HSTS_SECONDS setting. "
"If your entire site is served only over SSL, you may want to consider "
"setting a value and enabling HTTP Strict Transport Security. "
"Be sure to read the documentation first; enabling HSTS carelessly "
"can cause serious, irreversible problems.",
id="security.W004",
)
W005 = Warning(
"You have not set the SECURE_HSTS_INCLUDE_SUBDOMAINS setting to True. "
"Without this, your site is potentially vulnerable to attack "
"via an insecure connection to a subdomain. Only set this to True if "
"you are certain that all subdomains of your domain should be served "
"exclusively via SSL.",
id="security.W005",
)
W006 = Warning(
"Your SECURE_CONTENT_TYPE_NOSNIFF setting is not set to True, "
"so your pages will not be served with an "
"'X-Content-Type-Options: nosniff' header. "
"You should consider enabling this header to prevent the "
"browser from identifying content types incorrectly.",
id="security.W006",
)
W008 = Warning(
"Your SECURE_SSL_REDIRECT setting is not set to True. "
"Unless your site should be available over both SSL and non-SSL "
"connections, you may want to either set this setting True "
"or configure a load balancer or reverse-proxy server "
"to redirect all connections to HTTPS.",
id="security.W008",
)
W009 = Warning(
SECRET_KEY_WARNING_MSG % "SECRET_KEY",
id="security.W009",
)
W018 = Warning(
"You should not have DEBUG set to True in deployment.",
id="security.W018",
)
W019 = Warning(
"You have "
"'django.middleware.clickjacking.XFrameOptionsMiddleware' in your "
"MIDDLEWARE, but X_FRAME_OPTIONS is not set to 'DENY'. "
"Unless there is a good reason for your site to serve other parts of "
"itself in a frame, you should change it to 'DENY'.",
id="security.W019",
)
W020 = Warning(
"ALLOWED_HOSTS must not be empty in deployment.",
id="security.W020",
)
W021 = Warning(
"You have not set the SECURE_HSTS_PRELOAD setting to True. Without this, "
"your site cannot be submitted to the browser preload list.",
id="security.W021",
)
W022 = Warning(
"You have not set the SECURE_REFERRER_POLICY setting. Without this, your "
"site will not send a Referrer-Policy header. You should consider "
"enabling this header to protect user privacy.",
id="security.W022",
)
E023 = Error(
"You have set the SECURE_REFERRER_POLICY setting to an invalid value.",
hint="Valid values are: {}.".format(", ".join(sorted(REFERRER_POLICY_VALUES))),
id="security.E023",
)
E024 = Error(
"You have set the SECURE_CROSS_ORIGIN_OPENER_POLICY setting to an invalid "
"value.",
hint="Valid values are: {}.".format(
", ".join(sorted(CROSS_ORIGIN_OPENER_POLICY_VALUES)),
),
id="security.E024",
)
W025 = Warning(SECRET_KEY_WARNING_MSG, id="security.W025")
def _security_middleware():
return "django.middleware.security.SecurityMiddleware" in settings.MIDDLEWARE
def _xframe_middleware():
return (
"django.middleware.clickjacking.XFrameOptionsMiddleware" in settings.MIDDLEWARE
)
@register(Tags.security, deploy=True)
def check_security_middleware(app_configs, **kwargs):
passed_check = _security_middleware()
return [] if passed_check else [W001]
@register(Tags.security, deploy=True)
def check_xframe_options_middleware(app_configs, **kwargs):
passed_check = _xframe_middleware()
return [] if passed_check else [W002]
@register(Tags.security, deploy=True)
def check_sts(app_configs, **kwargs):
passed_check = not _security_middleware() or settings.SECURE_HSTS_SECONDS
return [] if passed_check else [W004]
@register(Tags.security, deploy=True)
def check_sts_include_subdomains(app_configs, **kwargs):
passed_check = (
not _security_middleware()
or not settings.SECURE_HSTS_SECONDS
or settings.SECURE_HSTS_INCLUDE_SUBDOMAINS is True
)
return [] if passed_check else [W005]
@register(Tags.security, deploy=True)
def check_sts_preload(app_configs, **kwargs):
passed_check = (
not _security_middleware()
or not settings.SECURE_HSTS_SECONDS
or settings.SECURE_HSTS_PRELOAD is True
)
return [] if passed_check else [W021]
@register(Tags.security, deploy=True)
def check_content_type_nosniff(app_configs, **kwargs):
passed_check = (
not _security_middleware() or settings.SECURE_CONTENT_TYPE_NOSNIFF is True
)
return [] if passed_check else [W006]
@register(Tags.security, deploy=True)
def check_ssl_redirect(app_configs, **kwargs):
passed_check = not _security_middleware() or settings.SECURE_SSL_REDIRECT is True
return [] if passed_check else [W008]
def _check_secret_key(secret_key):
return (
len(set(secret_key)) >= SECRET_KEY_MIN_UNIQUE_CHARACTERS
and len(secret_key) >= SECRET_KEY_MIN_LENGTH
and not secret_key.startswith(SECRET_KEY_INSECURE_PREFIX)
)
@register(Tags.security, deploy=True)
def check_secret_key(app_configs, **kwargs):
try:
secret_key = settings.SECRET_KEY
except (ImproperlyConfigured, AttributeError):
passed_check = False
else:
passed_check = _check_secret_key(secret_key)
return [] if passed_check else [W009]
@register(Tags.security, deploy=True)
def check_secret_key_fallbacks(app_configs, **kwargs):
warnings = []
try:
fallbacks = settings.SECRET_KEY_FALLBACKS
except (ImproperlyConfigured, AttributeError):
warnings.append(Warning(W025.msg % "SECRET_KEY_FALLBACKS", id=W025.id))
else:
for index, key in enumerate(fallbacks):
if not _check_secret_key(key):
warnings.append(
Warning(W025.msg % f"SECRET_KEY_FALLBACKS[{index}]", id=W025.id)
)
return warnings
@register(Tags.security, deploy=True)
def check_debug(app_configs, **kwargs):
passed_check = not settings.DEBUG
return [] if passed_check else [W018]
@register(Tags.security, deploy=True)
def check_xframe_deny(app_configs, **kwargs):
passed_check = not _xframe_middleware() or settings.X_FRAME_OPTIONS == "DENY"
return [] if passed_check else [W019]
@register(Tags.security, deploy=True)
def check_allowed_hosts(app_configs, **kwargs):
return [] if settings.ALLOWED_HOSTS else [W020]
@register(Tags.security, deploy=True)
def check_referrer_policy(app_configs, **kwargs):
if _security_middleware():
if settings.SECURE_REFERRER_POLICY is None:
return [W022]
# Support a comma-separated string or iterable of values to allow fallback.
if isinstance(settings.SECURE_REFERRER_POLICY, str):
values = {v.strip() for v in settings.SECURE_REFERRER_POLICY.split(",")}
else:
values = set(settings.SECURE_REFERRER_POLICY)
if not values <= REFERRER_POLICY_VALUES:
return [E023]
return []
@register(Tags.security, deploy=True)
def check_cross_origin_opener_policy(app_configs, **kwargs):
if (
_security_middleware()
and settings.SECURE_CROSS_ORIGIN_OPENER_POLICY is not None
and settings.SECURE_CROSS_ORIGIN_OPENER_POLICY
not in CROSS_ORIGIN_OPENER_POLICY_VALUES
):
return [E024]
return []
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/security/base.py
|
Python
|
mit
| 9,140 |
import inspect
from django.conf import settings
from .. import Error, Tags, Warning, register
W003 = Warning(
"You don't appear to be using Django's built-in "
"cross-site request forgery protection via the middleware "
"('django.middleware.csrf.CsrfViewMiddleware' is not in your "
"MIDDLEWARE). Enabling the middleware is the safest approach "
"to ensure you don't leave any holes.",
id="security.W003",
)
W016 = Warning(
"You have 'django.middleware.csrf.CsrfViewMiddleware' in your "
"MIDDLEWARE, but you have not set CSRF_COOKIE_SECURE to True. "
"Using a secure-only CSRF cookie makes it more difficult for network "
"traffic sniffers to steal the CSRF token.",
id="security.W016",
)
def _csrf_middleware():
return "django.middleware.csrf.CsrfViewMiddleware" in settings.MIDDLEWARE
@register(Tags.security, deploy=True)
def check_csrf_middleware(app_configs, **kwargs):
passed_check = _csrf_middleware()
return [] if passed_check else [W003]
@register(Tags.security, deploy=True)
def check_csrf_cookie_secure(app_configs, **kwargs):
passed_check = (
settings.CSRF_USE_SESSIONS
or not _csrf_middleware()
or settings.CSRF_COOKIE_SECURE is True
)
return [] if passed_check else [W016]
@register(Tags.security)
def check_csrf_failure_view(app_configs, **kwargs):
from django.middleware.csrf import _get_failure_view
errors = []
try:
view = _get_failure_view()
except ImportError:
msg = (
"The CSRF failure view '%s' could not be imported."
% settings.CSRF_FAILURE_VIEW
)
errors.append(Error(msg, id="security.E102"))
else:
try:
inspect.signature(view).bind(None, reason=None)
except TypeError:
msg = (
"The CSRF failure view '%s' does not take the correct number of "
"arguments." % settings.CSRF_FAILURE_VIEW
)
errors.append(Error(msg, id="security.E101"))
return errors
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/security/csrf.py
|
Python
|
mit
| 2,055 |
from django.conf import settings
from .. import Tags, Warning, register
def add_session_cookie_message(message):
return message + (
" Using a secure-only session cookie makes it more difficult for "
"network traffic sniffers to hijack user sessions."
)
W010 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_SECURE to True."
),
id="security.W010",
)
W011 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE, but you have not set "
"SESSION_COOKIE_SECURE to True."
),
id="security.W011",
)
W012 = Warning(
add_session_cookie_message("SESSION_COOKIE_SECURE is not set to True."),
id="security.W012",
)
def add_httponly_message(message):
return message + (
" Using an HttpOnly session cookie makes it more difficult for "
"cross-site scripting attacks to hijack user sessions."
)
W013 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_HTTPONLY to True.",
),
id="security.W013",
)
W014 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE, but you have not set "
"SESSION_COOKIE_HTTPONLY to True."
),
id="security.W014",
)
W015 = Warning(
add_httponly_message("SESSION_COOKIE_HTTPONLY is not set to True."),
id="security.W015",
)
@register(Tags.security, deploy=True)
def check_session_cookie_secure(app_configs, **kwargs):
if settings.SESSION_COOKIE_SECURE is True:
return []
errors = []
if _session_app():
errors.append(W010)
if _session_middleware():
errors.append(W011)
if len(errors) > 1:
errors = [W012]
return errors
@register(Tags.security, deploy=True)
def check_session_cookie_httponly(app_configs, **kwargs):
if settings.SESSION_COOKIE_HTTPONLY is True:
return []
errors = []
if _session_app():
errors.append(W013)
if _session_middleware():
errors.append(W014)
if len(errors) > 1:
errors = [W015]
return errors
def _session_middleware():
return "django.contrib.sessions.middleware.SessionMiddleware" in settings.MIDDLEWARE
def _session_app():
return "django.contrib.sessions" in settings.INSTALLED_APPS
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/security/sessions.py
|
Python
|
mit
| 2,554 |
import copy
from collections import defaultdict
from django.conf import settings
from django.template.backends.django import get_template_tag_modules
from . import Error, Tags, Warning, register
E001 = Error(
"You have 'APP_DIRS': True in your TEMPLATES but also specify 'loaders' "
"in OPTIONS. Either remove APP_DIRS or remove the 'loaders' option.",
id="templates.E001",
)
E002 = Error(
"'string_if_invalid' in TEMPLATES OPTIONS must be a string but got: {} ({}).",
id="templates.E002",
)
W003 = Warning(
"{} is used for multiple template tag modules: {}",
id="templates.E003",
)
@register(Tags.templates)
def check_setting_app_dirs_loaders(app_configs, **kwargs):
return (
[E001]
if any(
conf.get("APP_DIRS") and "loaders" in conf.get("OPTIONS", {})
for conf in settings.TEMPLATES
)
else []
)
@register(Tags.templates)
def check_string_if_invalid_is_string(app_configs, **kwargs):
errors = []
for conf in settings.TEMPLATES:
string_if_invalid = conf.get("OPTIONS", {}).get("string_if_invalid", "")
if not isinstance(string_if_invalid, str):
error = copy.copy(E002)
error.msg = error.msg.format(
string_if_invalid, type(string_if_invalid).__name__
)
errors.append(error)
return errors
@register(Tags.templates)
def check_for_template_tags_with_the_same_name(app_configs, **kwargs):
errors = []
libraries = defaultdict(set)
for conf in settings.TEMPLATES:
custom_libraries = conf.get("OPTIONS", {}).get("libraries", {})
for module_name, module_path in custom_libraries.items():
libraries[module_name].add(module_path)
for module_name, module_path in get_template_tag_modules():
libraries[module_name].add(module_path)
for library_name, items in libraries.items():
if len(items) > 1:
errors.append(
Warning(
W003.msg.format(
repr(library_name),
", ".join(repr(item) for item in sorted(items)),
),
id=W003.id,
)
)
return errors
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/templates.py
|
Python
|
mit
| 2,257 |
from django.conf import settings
from django.utils.translation import get_supported_language_variant
from django.utils.translation.trans_real import language_code_re
from . import Error, Tags, register
E001 = Error(
"You have provided an invalid value for the LANGUAGE_CODE setting: {!r}.",
id="translation.E001",
)
E002 = Error(
"You have provided an invalid language code in the LANGUAGES setting: {!r}.",
id="translation.E002",
)
E003 = Error(
"You have provided an invalid language code in the LANGUAGES_BIDI setting: {!r}.",
id="translation.E003",
)
E004 = Error(
"You have provided a value for the LANGUAGE_CODE setting that is not in "
"the LANGUAGES setting.",
id="translation.E004",
)
@register(Tags.translation)
def check_setting_language_code(app_configs, **kwargs):
"""Error if LANGUAGE_CODE setting is invalid."""
tag = settings.LANGUAGE_CODE
if not isinstance(tag, str) or not language_code_re.match(tag):
return [Error(E001.msg.format(tag), id=E001.id)]
return []
@register(Tags.translation)
def check_setting_languages(app_configs, **kwargs):
"""Error if LANGUAGES setting is invalid."""
return [
Error(E002.msg.format(tag), id=E002.id)
for tag, _ in settings.LANGUAGES
if not isinstance(tag, str) or not language_code_re.match(tag)
]
@register(Tags.translation)
def check_setting_languages_bidi(app_configs, **kwargs):
"""Error if LANGUAGES_BIDI setting is invalid."""
return [
Error(E003.msg.format(tag), id=E003.id)
for tag in settings.LANGUAGES_BIDI
if not isinstance(tag, str) or not language_code_re.match(tag)
]
@register(Tags.translation)
def check_language_settings_consistent(app_configs, **kwargs):
"""Error if language settings are not consistent with each other."""
try:
get_supported_language_variant(settings.LANGUAGE_CODE)
except LookupError:
return [E004]
else:
return []
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/translation.py
|
Python
|
mit
| 1,990 |
from collections import Counter
from django.conf import settings
from . import Error, Tags, Warning, register
@register(Tags.urls)
def check_url_config(app_configs, **kwargs):
if getattr(settings, "ROOT_URLCONF", None):
from django.urls import get_resolver
resolver = get_resolver()
return check_resolver(resolver)
return []
def check_resolver(resolver):
"""
Recursively check the resolver.
"""
check_method = getattr(resolver, "check", None)
if check_method is not None:
return check_method()
elif not hasattr(resolver, "resolve"):
return get_warning_for_invalid_pattern(resolver)
else:
return []
@register(Tags.urls)
def check_url_namespaces_unique(app_configs, **kwargs):
"""
Warn if URL namespaces used in applications aren't unique.
"""
if not getattr(settings, "ROOT_URLCONF", None):
return []
from django.urls import get_resolver
resolver = get_resolver()
all_namespaces = _load_all_namespaces(resolver)
counter = Counter(all_namespaces)
non_unique_namespaces = [n for n, count in counter.items() if count > 1]
errors = []
for namespace in non_unique_namespaces:
errors.append(
Warning(
"URL namespace '{}' isn't unique. You may not be able to reverse "
"all URLs in this namespace".format(namespace),
id="urls.W005",
)
)
return errors
def _load_all_namespaces(resolver, parents=()):
"""
Recursively load all namespaces from URL patterns.
"""
url_patterns = getattr(resolver, "url_patterns", [])
namespaces = [
":".join(parents + (url.namespace,))
for url in url_patterns
if getattr(url, "namespace", None) is not None
]
for pattern in url_patterns:
namespace = getattr(pattern, "namespace", None)
current = parents
if namespace is not None:
current += (namespace,)
namespaces.extend(_load_all_namespaces(pattern, current))
return namespaces
def get_warning_for_invalid_pattern(pattern):
"""
Return a list containing a warning that the pattern is invalid.
describe_pattern() cannot be used here, because we cannot rely on the
urlpattern having regex or name attributes.
"""
if isinstance(pattern, str):
hint = (
"Try removing the string '{}'. The list of urlpatterns should not "
"have a prefix string as the first element.".format(pattern)
)
elif isinstance(pattern, tuple):
hint = "Try using path() instead of a tuple."
else:
hint = None
return [
Error(
"Your URL pattern {!r} is invalid. Ensure that urlpatterns is a list "
"of path() and/or re_path() instances.".format(pattern),
hint=hint,
id="urls.E004",
)
]
@register(Tags.urls)
def check_url_settings(app_configs, **kwargs):
errors = []
for name in ("STATIC_URL", "MEDIA_URL"):
value = getattr(settings, name)
if value and not value.endswith("/"):
errors.append(E006(name))
return errors
def E006(name):
return Error(
"The {} setting must end with a slash.".format(name),
id="urls.E006",
)
|
castiel248/Convert
|
Lib/site-packages/django/core/checks/urls.py
|
Python
|
mit
| 3,328 |
"""
Global Django exception and warning classes.
"""
import operator
from django.utils.hashable import make_hashable
class FieldDoesNotExist(Exception):
"""The requested model field does not exist"""
pass
class AppRegistryNotReady(Exception):
"""The django.apps registry is not populated yet"""
pass
class ObjectDoesNotExist(Exception):
"""The requested object does not exist"""
silent_variable_failure = True
class MultipleObjectsReturned(Exception):
"""The query returned multiple objects when only one was expected."""
pass
class SuspiciousOperation(Exception):
"""The user did something suspicious"""
class SuspiciousMultipartForm(SuspiciousOperation):
"""Suspect MIME request in multipart form data"""
pass
class SuspiciousFileOperation(SuspiciousOperation):
"""A Suspicious filesystem operation was attempted"""
pass
class DisallowedHost(SuspiciousOperation):
"""HTTP_HOST header contains invalid value"""
pass
class DisallowedRedirect(SuspiciousOperation):
"""Redirect to scheme not in allowed list"""
pass
class TooManyFieldsSent(SuspiciousOperation):
"""
The number of fields in a GET or POST request exceeded
settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.
"""
pass
class TooManyFilesSent(SuspiciousOperation):
"""
The number of fields in a GET or POST request exceeded
settings.DATA_UPLOAD_MAX_NUMBER_FILES.
"""
pass
class RequestDataTooBig(SuspiciousOperation):
"""
The size of the request (excluding any file uploads) exceeded
settings.DATA_UPLOAD_MAX_MEMORY_SIZE.
"""
pass
class RequestAborted(Exception):
"""The request was closed before it was completed, or timed out."""
pass
class BadRequest(Exception):
"""The request is malformed and cannot be processed."""
pass
class PermissionDenied(Exception):
"""The user did not have permission to do that"""
pass
class ViewDoesNotExist(Exception):
"""The requested view does not exist"""
pass
class MiddlewareNotUsed(Exception):
"""This middleware is not used in this server configuration"""
pass
class ImproperlyConfigured(Exception):
"""Django is somehow improperly configured"""
pass
class FieldError(Exception):
"""Some kind of problem with a model field."""
pass
NON_FIELD_ERRORS = "__all__"
class ValidationError(Exception):
"""An error while validating data."""
def __init__(self, message, code=None, params=None):
"""
The `message` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. What we define as
an "error" can be either a simple string or an instance of
ValidationError with its message attribute set, and what we define as
list or dictionary can be an actual `list` or `dict` or an instance
of ValidationError with its `error_list` or `error_dict` attribute set.
"""
super().__init__(message, code, params)
if isinstance(message, ValidationError):
if hasattr(message, "error_dict"):
message = message.error_dict
elif not hasattr(message, "message"):
message = message.error_list
else:
message, code, params = message.message, message.code, message.params
if isinstance(message, dict):
self.error_dict = {}
for field, messages in message.items():
if not isinstance(messages, ValidationError):
messages = ValidationError(messages)
self.error_dict[field] = messages.error_list
elif isinstance(message, list):
self.error_list = []
for message in message:
# Normalize plain strings to instances of ValidationError.
if not isinstance(message, ValidationError):
message = ValidationError(message)
if hasattr(message, "error_dict"):
self.error_list.extend(sum(message.error_dict.values(), []))
else:
self.error_list.extend(message.error_list)
else:
self.message = message
self.code = code
self.params = params
self.error_list = [self]
@property
def message_dict(self):
# Trigger an AttributeError if this ValidationError
# doesn't have an error_dict.
getattr(self, "error_dict")
return dict(self)
@property
def messages(self):
if hasattr(self, "error_dict"):
return sum(dict(self).values(), [])
return list(self)
def update_error_dict(self, error_dict):
if hasattr(self, "error_dict"):
for field, error_list in self.error_dict.items():
error_dict.setdefault(field, []).extend(error_list)
else:
error_dict.setdefault(NON_FIELD_ERRORS, []).extend(self.error_list)
return error_dict
def __iter__(self):
if hasattr(self, "error_dict"):
for field, errors in self.error_dict.items():
yield field, list(ValidationError(errors))
else:
for error in self.error_list:
message = error.message
if error.params:
message %= error.params
yield str(message)
def __str__(self):
if hasattr(self, "error_dict"):
return repr(dict(self))
return repr(list(self))
def __repr__(self):
return "ValidationError(%s)" % self
def __eq__(self, other):
if not isinstance(other, ValidationError):
return NotImplemented
return hash(self) == hash(other)
def __hash__(self):
if hasattr(self, "message"):
return hash(
(
self.message,
self.code,
make_hashable(self.params),
)
)
if hasattr(self, "error_dict"):
return hash(make_hashable(self.error_dict))
return hash(tuple(sorted(self.error_list, key=operator.attrgetter("message"))))
class EmptyResultSet(Exception):
"""A database query predicate is impossible."""
pass
class FullResultSet(Exception):
"""A database query predicate is matches everything."""
pass
class SynchronousOnlyOperation(Exception):
"""The user tried to call a sync-only function from an async context."""
pass
|
castiel248/Convert
|
Lib/site-packages/django/core/exceptions.py
|
Python
|
mit
| 6,576 |
from django.core.files.base import File
__all__ = ["File"]
|
castiel248/Convert
|
Lib/site-packages/django/core/files/__init__.py
|
Python
|
mit
| 60 |
import os
from io import BytesIO, StringIO, UnsupportedOperation
from django.core.files.utils import FileProxyMixin
from django.utils.functional import cached_property
class File(FileProxyMixin):
DEFAULT_CHUNK_SIZE = 64 * 2**10
def __init__(self, file, name=None):
self.file = file
if name is None:
name = getattr(file, "name", None)
self.name = name
if hasattr(file, "mode"):
self.mode = file.mode
def __str__(self):
return self.name or ""
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self or "None")
def __bool__(self):
return bool(self.name)
def __len__(self):
return self.size
@cached_property
def size(self):
if hasattr(self.file, "size"):
return self.file.size
if hasattr(self.file, "name"):
try:
return os.path.getsize(self.file.name)
except (OSError, TypeError):
pass
if hasattr(self.file, "tell") and hasattr(self.file, "seek"):
pos = self.file.tell()
self.file.seek(0, os.SEEK_END)
size = self.file.tell()
self.file.seek(pos)
return size
raise AttributeError("Unable to determine the file's size.")
def chunks(self, chunk_size=None):
"""
Read the file and yield chunks of ``chunk_size`` bytes (defaults to
``File.DEFAULT_CHUNK_SIZE``).
"""
chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE
try:
self.seek(0)
except (AttributeError, UnsupportedOperation):
pass
while True:
data = self.read(chunk_size)
if not data:
break
yield data
def multiple_chunks(self, chunk_size=None):
"""
Return ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
"""
return self.size > (chunk_size or self.DEFAULT_CHUNK_SIZE)
def __iter__(self):
# Iterate over this file-like object by newlines
buffer_ = None
for chunk in self.chunks():
for line in chunk.splitlines(True):
if buffer_:
if endswith_cr(buffer_) and not equals_lf(line):
# Line split after a \r newline; yield buffer_.
yield buffer_
# Continue with line.
else:
# Line either split without a newline (line
# continues after buffer_) or with \r\n
# newline (line == b'\n').
line = buffer_ + line
# buffer_ handled, clear it.
buffer_ = None
# If this is the end of a \n or \r\n line, yield.
if endswith_lf(line):
yield line
else:
buffer_ = line
if buffer_ is not None:
yield buffer_
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close()
def open(self, mode=None):
if not self.closed:
self.seek(0)
elif self.name and os.path.exists(self.name):
self.file = open(self.name, mode or self.mode)
else:
raise ValueError("The file cannot be reopened.")
return self
def close(self):
self.file.close()
class ContentFile(File):
"""
A File-like object that takes just raw content, rather than an actual file.
"""
def __init__(self, content, name=None):
stream_class = StringIO if isinstance(content, str) else BytesIO
super().__init__(stream_class(content), name=name)
self.size = len(content)
def __str__(self):
return "Raw content"
def __bool__(self):
return True
def open(self, mode=None):
self.seek(0)
return self
def close(self):
pass
def write(self, data):
self.__dict__.pop("size", None) # Clear the computed size.
return self.file.write(data)
def endswith_cr(line):
"""Return True if line (a text or bytestring) ends with '\r'."""
return line.endswith("\r" if isinstance(line, str) else b"\r")
def endswith_lf(line):
"""Return True if line (a text or bytestring) ends with '\n'."""
return line.endswith("\n" if isinstance(line, str) else b"\n")
def equals_lf(line):
"""Return True if line (a text or bytestring) equals '\n'."""
return line == ("\n" if isinstance(line, str) else b"\n")
|
castiel248/Convert
|
Lib/site-packages/django/core/files/base.py
|
Python
|
mit
| 4,811 |
"""
Utility functions for handling images.
Requires Pillow as you might imagine.
"""
import struct
import zlib
from django.core.files import File
class ImageFile(File):
"""
A mixin for use alongside django.core.files.base.File, which provides
additional features for dealing with images.
"""
@property
def width(self):
return self._get_image_dimensions()[0]
@property
def height(self):
return self._get_image_dimensions()[1]
def _get_image_dimensions(self):
if not hasattr(self, "_dimensions_cache"):
close = self.closed
self.open()
self._dimensions_cache = get_image_dimensions(self, close=close)
return self._dimensions_cache
def get_image_dimensions(file_or_path, close=False):
"""
Return the (width, height) of an image, given an open file or a path. Set
'close' to True to close the file at the end if it is initially in an open
state.
"""
from PIL import ImageFile as PillowImageFile
p = PillowImageFile.Parser()
if hasattr(file_or_path, "read"):
file = file_or_path
file_pos = file.tell()
file.seek(0)
else:
try:
file = open(file_or_path, "rb")
except OSError:
return (None, None)
close = True
try:
# Most of the time Pillow only needs a small chunk to parse the image
# and get the dimensions, but with some TIFF files Pillow needs to
# parse the whole file.
chunk_size = 1024
while 1:
data = file.read(chunk_size)
if not data:
break
try:
p.feed(data)
except zlib.error as e:
# ignore zlib complaining on truncated stream, just feed more
# data to parser (ticket #19457).
if e.args[0].startswith("Error -5"):
pass
else:
raise
except struct.error:
# Ignore PIL failing on a too short buffer when reads return
# less bytes than expected. Skip and feed more data to the
# parser (ticket #24544).
pass
except RuntimeError:
# e.g. "RuntimeError: could not create decoder object" for
# WebP files. A different chunk_size may work.
pass
if p.image:
return p.image.size
chunk_size *= 2
return (None, None)
finally:
if close:
file.close()
else:
file.seek(file_pos)
|
castiel248/Convert
|
Lib/site-packages/django/core/files/images.py
|
Python
|
mit
| 2,643 |
"""
Portable file locking utilities.
Based partially on an example by Jonathan Feignberg in the Python
Cookbook [1] (licensed under the Python Software License) and a ctypes port by
Anatoly Techtonik for Roundup [2] (license [3]).
[1] https://code.activestate.com/recipes/65203/
[2] https://sourceforge.net/p/roundup/code/ci/default/tree/roundup/backends/portalocker.py # NOQA
[3] https://sourceforge.net/p/roundup/code/ci/default/tree/COPYING.txt
Example Usage::
>>> from django.core.files import locks
>>> with open('./file', 'wb') as f:
... locks.lock(f, locks.LOCK_EX)
... f.write('Django')
"""
import os
__all__ = ("LOCK_EX", "LOCK_SH", "LOCK_NB", "lock", "unlock")
def _fd(f):
"""Get a filedescriptor from something which could be a file or an fd."""
return f.fileno() if hasattr(f, "fileno") else f
if os.name == "nt":
import msvcrt
from ctypes import (
POINTER,
Structure,
Union,
WinDLL,
byref,
c_int64,
c_ulong,
c_void_p,
sizeof,
)
from ctypes.wintypes import BOOL, DWORD, HANDLE
LOCK_SH = 0 # the default
LOCK_NB = 0x1 # LOCKFILE_FAIL_IMMEDIATELY
LOCK_EX = 0x2 # LOCKFILE_EXCLUSIVE_LOCK
# --- Adapted from the pyserial project ---
# detect size of ULONG_PTR
if sizeof(c_ulong) != sizeof(c_void_p):
ULONG_PTR = c_int64
else:
ULONG_PTR = c_ulong
PVOID = c_void_p
# --- Union inside Structure by stackoverflow:3480240 ---
class _OFFSET(Structure):
_fields_ = [("Offset", DWORD), ("OffsetHigh", DWORD)]
class _OFFSET_UNION(Union):
_anonymous_ = ["_offset"]
_fields_ = [("_offset", _OFFSET), ("Pointer", PVOID)]
class OVERLAPPED(Structure):
_anonymous_ = ["_offset_union"]
_fields_ = [
("Internal", ULONG_PTR),
("InternalHigh", ULONG_PTR),
("_offset_union", _OFFSET_UNION),
("hEvent", HANDLE),
]
LPOVERLAPPED = POINTER(OVERLAPPED)
# --- Define function prototypes for extra safety ---
kernel32 = WinDLL("kernel32")
LockFileEx = kernel32.LockFileEx
LockFileEx.restype = BOOL
LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, LPOVERLAPPED]
UnlockFileEx = kernel32.UnlockFileEx
UnlockFileEx.restype = BOOL
UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, LPOVERLAPPED]
def lock(f, flags):
hfile = msvcrt.get_osfhandle(_fd(f))
overlapped = OVERLAPPED()
ret = LockFileEx(hfile, flags, 0, 0, 0xFFFF0000, byref(overlapped))
return bool(ret)
def unlock(f):
hfile = msvcrt.get_osfhandle(_fd(f))
overlapped = OVERLAPPED()
ret = UnlockFileEx(hfile, 0, 0, 0xFFFF0000, byref(overlapped))
return bool(ret)
else:
try:
import fcntl
LOCK_SH = fcntl.LOCK_SH # shared lock
LOCK_NB = fcntl.LOCK_NB # non-blocking
LOCK_EX = fcntl.LOCK_EX
except (ImportError, AttributeError):
# File locking is not supported.
LOCK_EX = LOCK_SH = LOCK_NB = 0
# Dummy functions that don't do anything.
def lock(f, flags):
# File is not locked
return False
def unlock(f):
# File is unlocked
return True
else:
def lock(f, flags):
try:
fcntl.flock(_fd(f), flags)
return True
except BlockingIOError:
return False
def unlock(f):
fcntl.flock(_fd(f), fcntl.LOCK_UN)
return True
|
castiel248/Convert
|
Lib/site-packages/django/core/files/locks.py
|
Python
|
mit
| 3,613 |
"""
Move a file in the safest way possible::
>>> from django.core.files.move import file_move_safe
>>> file_move_safe("/tmp/old_file", "/tmp/new_file")
"""
import os
from shutil import copymode, copystat
from django.core.files import locks
__all__ = ["file_move_safe"]
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path, "samefile"):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return os.path.normcase(os.path.abspath(src)) == os.path.normcase(
os.path.abspath(dst)
)
def file_move_safe(
old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False
):
"""
Move a file from one location to another in the safest way possible.
First, try ``os.rename``, which is simple but will break across filesystems.
If that fails, stream manually from one file to another in pure Python.
If the destination file exists and ``allow_overwrite`` is ``False``, raise
``FileExistsError``.
"""
# There's no reason to move if we don't have to.
if _samefile(old_file_name, new_file_name):
return
try:
if not allow_overwrite and os.access(new_file_name, os.F_OK):
raise FileExistsError(
"Destination file %s exists and allow_overwrite is False."
% new_file_name
)
os.rename(old_file_name, new_file_name)
return
except OSError:
# OSError happens with os.rename() if moving to another filesystem or
# when moving opened files on certain operating systems.
pass
# first open the old file, so that it won't go away
with open(old_file_name, "rb") as old_file:
# now open the new file, not forgetting allow_overwrite
fd = os.open(
new_file_name,
(
os.O_WRONLY
| os.O_CREAT
| getattr(os, "O_BINARY", 0)
| (os.O_EXCL if not allow_overwrite else 0)
),
)
try:
locks.lock(fd, locks.LOCK_EX)
current_chunk = None
while current_chunk != b"":
current_chunk = old_file.read(chunk_size)
os.write(fd, current_chunk)
finally:
locks.unlock(fd)
os.close(fd)
try:
copystat(old_file_name, new_file_name)
except PermissionError:
# Certain filesystems (e.g. CIFS) fail to copy the file's metadata if
# the type of the destination filesystem isn't the same as the source
# filesystem. This also happens with some SELinux-enabled systems.
# Ignore that, but try to set basic permissions.
try:
copymode(old_file_name, new_file_name)
except PermissionError:
pass
try:
os.remove(old_file_name)
except PermissionError as e:
# Certain operating systems (Cygwin and Windows)
# fail when deleting opened files, ignore it. (For the
# systems where this happens, temporary files will be auto-deleted
# on close anyway.)
if getattr(e, "winerror", 0) != 32:
raise
|
castiel248/Convert
|
Lib/site-packages/django/core/files/move.py
|
Python
|
mit
| 3,250 |
import warnings
from django.conf import DEFAULT_STORAGE_ALIAS, settings
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.functional import LazyObject
from django.utils.module_loading import import_string
from .base import Storage
from .filesystem import FileSystemStorage
from .handler import InvalidStorageError, StorageHandler
from .memory import InMemoryStorage
__all__ = (
"FileSystemStorage",
"InMemoryStorage",
"Storage",
"DefaultStorage",
"default_storage",
"get_storage_class",
"InvalidStorageError",
"StorageHandler",
"storages",
)
GET_STORAGE_CLASS_DEPRECATED_MSG = (
"django.core.files.storage.get_storage_class is deprecated in favor of "
"using django.core.files.storage.storages."
)
def get_storage_class(import_path=None):
warnings.warn(GET_STORAGE_CLASS_DEPRECATED_MSG, RemovedInDjango51Warning)
return import_string(import_path or settings.DEFAULT_FILE_STORAGE)
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = storages[DEFAULT_STORAGE_ALIAS]
storages = StorageHandler()
default_storage = DefaultStorage()
|
castiel248/Convert
|
Lib/site-packages/django/core/files/storage/__init__.py
|
Python
|
mit
| 1,147 |
import os
import pathlib
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import File
from django.core.files.utils import validate_file_name
from django.utils.crypto import get_random_string
from django.utils.text import get_valid_filename
class Storage:
"""
A base storage class, providing some default behaviors that all other
storage systems can inherit or override, as necessary.
"""
# The following methods represent a public interface to private methods.
# These shouldn't be overridden by subclasses unless absolutely necessary.
def open(self, name, mode="rb"):
"""Retrieve the specified file from storage."""
return self._open(name, mode)
def save(self, name, content, max_length=None):
"""
Save new content to the file specified by name. The content should be
a proper File object or any Python file-like object, ready to be read
from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = content.name
if not hasattr(content, "chunks"):
content = File(content, name)
name = self.get_available_name(name, max_length=max_length)
name = self._save(name, content)
# Ensure that the name returned from the storage system is still valid.
validate_file_name(name, allow_relative_path=True)
return name
# These methods are part of the public API, with default implementations.
def get_valid_name(self, name):
"""
Return a filename, based on the provided filename, that's suitable for
use in the target storage system.
"""
return get_valid_filename(name)
def get_alternative_name(self, file_root, file_ext):
"""
Return an alternative filename, by adding an underscore and a random 7
character alphanumeric string (before the file extension, if one
exists) to the filename.
"""
return "%s_%s%s" % (file_root, get_random_string(7), file_ext)
def get_available_name(self, name, max_length=None):
"""
Return a filename that's free on the target storage system and
available for new content to be written to.
"""
name = str(name).replace("\\", "/")
dir_name, file_name = os.path.split(name)
if ".." in pathlib.PurePath(dir_name).parts:
raise SuspiciousFileOperation(
"Detected path traversal attempt in '%s'" % dir_name
)
validate_file_name(file_name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, generate an alternative filename
# until it doesn't exist.
# Truncate original name if required, so the new filename does not
# exceed the max_length.
while self.exists(name) or (max_length and len(name) > max_length):
# file_ext includes the dot.
name = os.path.join(
dir_name, self.get_alternative_name(file_root, file_ext)
)
if max_length is None:
continue
# Truncate file_root if max_length exceeded.
truncation = len(name) - max_length
if truncation > 0:
file_root = file_root[:-truncation]
# Entire file_root was truncated in attempt to find an
# available filename.
if not file_root:
raise SuspiciousFileOperation(
'Storage can not find an available filename for "%s". '
"Please make sure that the corresponding file field "
'allows sufficient "max_length".' % name
)
name = os.path.join(
dir_name, self.get_alternative_name(file_root, file_ext)
)
return name
def generate_filename(self, filename):
"""
Validate the filename by calling get_valid_name() and return a filename
to be passed to the save() method.
"""
filename = str(filename).replace("\\", "/")
# `filename` may include a path as returned by FileField.upload_to.
dirname, filename = os.path.split(filename)
if ".." in pathlib.PurePath(dirname).parts:
raise SuspiciousFileOperation(
"Detected path traversal attempt in '%s'" % dirname
)
return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename)))
def path(self, name):
"""
Return a local filesystem path where the file can be retrieved using
Python's built-in open() function. Storage systems that can't be
accessed using open() should *not* implement this method.
"""
raise NotImplementedError("This backend doesn't support absolute paths.")
# The following methods form the public API for storage systems, but with
# no default implementations. Subclasses must implement *all* of these.
def delete(self, name):
"""
Delete the specified file from the storage system.
"""
raise NotImplementedError(
"subclasses of Storage must provide a delete() method"
)
def exists(self, name):
"""
Return True if a file referenced by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
raise NotImplementedError(
"subclasses of Storage must provide an exists() method"
)
def listdir(self, path):
"""
List the contents of the specified path. Return a 2-tuple of lists:
the first item being directories, the second item being files.
"""
raise NotImplementedError(
"subclasses of Storage must provide a listdir() method"
)
def size(self, name):
"""
Return the total size, in bytes, of the file specified by name.
"""
raise NotImplementedError("subclasses of Storage must provide a size() method")
def url(self, name):
"""
Return an absolute URL where the file's contents can be accessed
directly by a web browser.
"""
raise NotImplementedError("subclasses of Storage must provide a url() method")
def get_accessed_time(self, name):
"""
Return the last accessed time (as a datetime) of the file specified by
name. The datetime will be timezone-aware if USE_TZ=True.
"""
raise NotImplementedError(
"subclasses of Storage must provide a get_accessed_time() method"
)
def get_created_time(self, name):
"""
Return the creation time (as a datetime) of the file specified by name.
The datetime will be timezone-aware if USE_TZ=True.
"""
raise NotImplementedError(
"subclasses of Storage must provide a get_created_time() method"
)
def get_modified_time(self, name):
"""
Return the last modified time (as a datetime) of the file specified by
name. The datetime will be timezone-aware if USE_TZ=True.
"""
raise NotImplementedError(
"subclasses of Storage must provide a get_modified_time() method"
)
|
castiel248/Convert
|
Lib/site-packages/django/core/files/storage/base.py
|
Python
|
mit
| 7,424 |
import os
from datetime import datetime, timezone
from urllib.parse import urljoin
from django.conf import settings
from django.core.files import File, locks
from django.core.files.move import file_move_safe
from django.core.signals import setting_changed
from django.utils._os import safe_join
from django.utils.deconstruct import deconstructible
from django.utils.encoding import filepath_to_uri
from django.utils.functional import cached_property
from .base import Storage
from .mixins import StorageSettingsMixin
@deconstructible(path="django.core.files.storage.FileSystemStorage")
class FileSystemStorage(Storage, StorageSettingsMixin):
"""
Standard filesystem storage
"""
# The combination of O_CREAT and O_EXCL makes os.open() raise OSError if
# the file already exists before it's opened.
OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0)
def __init__(
self,
location=None,
base_url=None,
file_permissions_mode=None,
directory_permissions_mode=None,
):
self._location = location
self._base_url = base_url
self._file_permissions_mode = file_permissions_mode
self._directory_permissions_mode = directory_permissions_mode
setting_changed.connect(self._clear_cached_properties)
@cached_property
def base_location(self):
return self._value_or_setting(self._location, settings.MEDIA_ROOT)
@cached_property
def location(self):
return os.path.abspath(self.base_location)
@cached_property
def base_url(self):
if self._base_url is not None and not self._base_url.endswith("/"):
self._base_url += "/"
return self._value_or_setting(self._base_url, settings.MEDIA_URL)
@cached_property
def file_permissions_mode(self):
return self._value_or_setting(
self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS
)
@cached_property
def directory_permissions_mode(self):
return self._value_or_setting(
self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
)
def _open(self, name, mode="rb"):
return File(open(self.path(name), mode))
def _save(self, name, content):
full_path = self.path(name)
# Create any intermediate directories that do not exist.
directory = os.path.dirname(full_path)
try:
if self.directory_permissions_mode is not None:
# Set the umask because os.makedirs() doesn't apply the "mode"
# argument to intermediate-level directories.
old_umask = os.umask(0o777 & ~self.directory_permissions_mode)
try:
os.makedirs(
directory, self.directory_permissions_mode, exist_ok=True
)
finally:
os.umask(old_umask)
else:
os.makedirs(directory, exist_ok=True)
except FileExistsError:
raise FileExistsError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, "temporary_file_path"):
file_move_safe(content.temporary_file_path(), full_path)
# This is a normal uploadedfile that we can stream.
else:
# The current umask value is masked out by os.open!
fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666)
_file = None
try:
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
if _file is None:
mode = "wb" if isinstance(chunk, bytes) else "wt"
_file = os.fdopen(fd, mode)
_file.write(chunk)
finally:
locks.unlock(fd)
if _file is not None:
_file.close()
else:
os.close(fd)
except FileExistsError:
# A new name is needed if the file exists.
name = self.get_available_name(name)
full_path = self.path(name)
else:
# OK, the file save worked. Break out of the loop.
break
if self.file_permissions_mode is not None:
os.chmod(full_path, self.file_permissions_mode)
# Ensure the saved path is always relative to the storage root.
name = os.path.relpath(full_path, self.location)
# Ensure the moved file has the same gid as the storage root.
self._ensure_location_group_id(full_path)
# Store filenames with forward slashes, even on Windows.
return str(name).replace("\\", "/")
def _ensure_location_group_id(self, full_path):
if os.name == "posix":
file_gid = os.stat(full_path).st_gid
location_gid = os.stat(self.location).st_gid
if file_gid != location_gid:
try:
os.chown(full_path, uid=-1, gid=location_gid)
except PermissionError:
pass
def delete(self, name):
if not name:
raise ValueError("The name must be given to delete().")
name = self.path(name)
# If the file or directory exists, delete it from the filesystem.
try:
if os.path.isdir(name):
os.rmdir(name)
else:
os.remove(name)
except FileNotFoundError:
# FileNotFoundError is raised if the file or directory was removed
# concurrently.
pass
def exists(self, name):
return os.path.lexists(self.path(name))
def listdir(self, path):
path = self.path(path)
directories, files = [], []
with os.scandir(path) as entries:
for entry in entries:
if entry.is_dir():
directories.append(entry.name)
else:
files.append(entry.name)
return directories, files
def path(self, name):
return safe_join(self.location, name)
def size(self, name):
return os.path.getsize(self.path(name))
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
url = filepath_to_uri(name)
if url is not None:
url = url.lstrip("/")
return urljoin(self.base_url, url)
def _datetime_from_timestamp(self, ts):
"""
If timezone support is enabled, make an aware datetime object in UTC;
otherwise make a naive one in the local timezone.
"""
tz = timezone.utc if settings.USE_TZ else None
return datetime.fromtimestamp(ts, tz=tz)
def get_accessed_time(self, name):
return self._datetime_from_timestamp(os.path.getatime(self.path(name)))
def get_created_time(self, name):
return self._datetime_from_timestamp(os.path.getctime(self.path(name)))
def get_modified_time(self, name):
return self._datetime_from_timestamp(os.path.getmtime(self.path(name)))
|
castiel248/Convert
|
Lib/site-packages/django/core/files/storage/filesystem.py
|
Python
|
mit
| 7,792 |
from django.conf import DEFAULT_STORAGE_ALIAS, STATICFILES_STORAGE_ALIAS, settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class InvalidStorageError(ImproperlyConfigured):
pass
class StorageHandler:
def __init__(self, backends=None):
# backends is an optional dict of storage backend definitions
# (structured like settings.STORAGES).
self._backends = backends
self._storages = {}
@cached_property
def backends(self):
if self._backends is None:
self._backends = settings.STORAGES.copy()
# RemovedInDjango51Warning.
if settings.is_overridden("DEFAULT_FILE_STORAGE"):
self._backends[DEFAULT_STORAGE_ALIAS] = {
"BACKEND": settings.DEFAULT_FILE_STORAGE
}
if settings.is_overridden("STATICFILES_STORAGE"):
self._backends[STATICFILES_STORAGE_ALIAS] = {
"BACKEND": settings.STATICFILES_STORAGE
}
return self._backends
def __getitem__(self, alias):
try:
return self._storages[alias]
except KeyError:
try:
params = self.backends[alias]
except KeyError:
raise InvalidStorageError(
f"Could not find config for '{alias}' in settings.STORAGES."
)
storage = self.create_storage(params)
self._storages[alias] = storage
return storage
def create_storage(self, params):
params = params.copy()
backend = params.pop("BACKEND")
options = params.pop("OPTIONS", {})
try:
storage_cls = import_string(backend)
except ImportError as e:
raise InvalidStorageError(f"Could not find backend {backend!r}: {e}") from e
return storage_cls(**options)
|
castiel248/Convert
|
Lib/site-packages/django/core/files/storage/handler.py
|
Python
|
mit
| 1,999 |
"""
Based on dj-inmemorystorage (BSD) by Cody Soyland, Seán Hayes, Tore Birkeland,
and Nick Presta.
"""
import errno
import io
import os
import pathlib
from urllib.parse import urljoin
from django.conf import settings
from django.core.files.base import ContentFile
from django.core.signals import setting_changed
from django.utils._os import safe_join
from django.utils.deconstruct import deconstructible
from django.utils.encoding import filepath_to_uri
from django.utils.functional import cached_property
from django.utils.timezone import now
from .base import Storage
from .mixins import StorageSettingsMixin
__all__ = ("InMemoryStorage",)
class TimingMixin:
def _initialize_times(self):
self.created_time = now()
self.accessed_time = self.created_time
self.modified_time = self.created_time
def _update_accessed_time(self):
self.accessed_time = now()
def _update_modified_time(self):
self.modified_time = now()
class InMemoryFileNode(ContentFile, TimingMixin):
"""
Helper class representing an in-memory file node.
Handle unicode/bytes conversion during I/O operations and record creation,
modification, and access times.
"""
def __init__(self, content="", name=""):
self.file = None
self._content_type = type(content)
self._initialize_stream()
self._initialize_times()
def open(self, mode):
self._convert_stream_content(mode)
self._update_accessed_time()
return super().open(mode)
def write(self, data):
super().write(data)
self._update_modified_time()
def _initialize_stream(self):
"""Initialize underlying stream according to the content type."""
self.file = io.BytesIO() if self._content_type == bytes else io.StringIO()
def _convert_stream_content(self, mode):
"""Convert actual file content according to the opening mode."""
new_content_type = bytes if "b" in mode else str
# No conversion needed.
if self._content_type == new_content_type:
return
content = self.file.getvalue()
content = content.encode() if isinstance(content, str) else content.decode()
self._content_type = new_content_type
self._initialize_stream()
self.file.write(content)
class InMemoryDirNode(TimingMixin):
"""
Helper class representing an in-memory directory node.
Handle path navigation of directory trees, creating missing nodes if
needed.
"""
def __init__(self):
self._children = {}
self._initialize_times()
def resolve(self, path, create_if_missing=False, leaf_cls=None, check_exists=True):
"""
Navigate current directory tree, returning node matching path or
creating a new one, if missing.
- path: path of the node to search
- create_if_missing: create nodes if not exist. Defaults to False.
- leaf_cls: expected type of leaf node. Defaults to None.
- check_exists: if True and the leaf node does not exist, raise a
FileNotFoundError. Defaults to True.
"""
path_segments = list(pathlib.Path(path).parts)
current_node = self
while path_segments:
path_segment = path_segments.pop(0)
# If current node is a file node and there are unprocessed
# segments, raise an error.
if isinstance(current_node, InMemoryFileNode):
path_segments = os.path.split(path)
current_path = "/".join(
path_segments[: path_segments.index(path_segment)]
)
raise NotADirectoryError(
errno.ENOTDIR, os.strerror(errno.ENOTDIR), current_path
)
current_node = current_node._resolve_child(
path_segment,
create_if_missing,
leaf_cls if len(path_segments) == 0 else InMemoryDirNode,
)
if current_node is None:
break
if current_node is None and check_exists:
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path)
# If a leaf_cls is not None, check if leaf node is of right type.
if leaf_cls and not isinstance(current_node, leaf_cls):
error_cls, error_code = (
(NotADirectoryError, errno.ENOTDIR)
if leaf_cls is InMemoryDirNode
else (IsADirectoryError, errno.EISDIR)
)
raise error_cls(error_code, os.strerror(error_code), path)
return current_node
def _resolve_child(self, path_segment, create_if_missing, child_cls):
if create_if_missing:
self._update_accessed_time()
self._update_modified_time()
return self._children.setdefault(path_segment, child_cls())
return self._children.get(path_segment)
def listdir(self):
directories, files = [], []
for name, entry in self._children.items():
if isinstance(entry, InMemoryDirNode):
directories.append(name)
else:
files.append(name)
return directories, files
def remove_child(self, name):
if name in self._children:
self._update_accessed_time()
self._update_modified_time()
del self._children[name]
@deconstructible(path="django.core.files.storage.InMemoryStorage")
class InMemoryStorage(Storage, StorageSettingsMixin):
"""A storage saving files in memory."""
def __init__(
self,
location=None,
base_url=None,
file_permissions_mode=None,
directory_permissions_mode=None,
):
self._location = location
self._base_url = base_url
self._file_permissions_mode = file_permissions_mode
self._directory_permissions_mode = directory_permissions_mode
self._root = InMemoryDirNode()
self._resolve(
self.base_location, create_if_missing=True, leaf_cls=InMemoryDirNode
)
setting_changed.connect(self._clear_cached_properties)
@cached_property
def base_location(self):
return self._value_or_setting(self._location, settings.MEDIA_ROOT)
@cached_property
def location(self):
return os.path.abspath(self.base_location)
@cached_property
def base_url(self):
if self._base_url is not None and not self._base_url.endswith("/"):
self._base_url += "/"
return self._value_or_setting(self._base_url, settings.MEDIA_URL)
@cached_property
def file_permissions_mode(self):
return self._value_or_setting(
self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS
)
@cached_property
def directory_permissions_mode(self):
return self._value_or_setting(
self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
)
def _relative_path(self, name):
full_path = self.path(name)
return os.path.relpath(full_path, self.location)
def _resolve(self, name, create_if_missing=False, leaf_cls=None, check_exists=True):
try:
relative_path = self._relative_path(name)
return self._root.resolve(
relative_path,
create_if_missing=create_if_missing,
leaf_cls=leaf_cls,
check_exists=check_exists,
)
except NotADirectoryError as exc:
absolute_path = self.path(exc.filename)
raise FileExistsError(f"{absolute_path} exists and is not a directory.")
def _open(self, name, mode="rb"):
create_if_missing = "w" in mode
file_node = self._resolve(
name, create_if_missing=create_if_missing, leaf_cls=InMemoryFileNode
)
return file_node.open(mode)
def _save(self, name, content):
file_node = self._resolve(
name, create_if_missing=True, leaf_cls=InMemoryFileNode
)
fd = None
for chunk in content.chunks():
if fd is None:
mode = "wb" if isinstance(chunk, bytes) else "wt"
fd = file_node.open(mode)
fd.write(chunk)
if hasattr(content, "temporary_file_path"):
os.remove(content.temporary_file_path())
file_node.modified_time = now()
return self._relative_path(name).replace("\\", "/")
def path(self, name):
return safe_join(self.location, name)
def delete(self, name):
path, filename = os.path.split(name)
dir_node = self._resolve(path, check_exists=False)
if dir_node is None:
return None
dir_node.remove_child(filename)
def exists(self, name):
return self._resolve(name, check_exists=False) is not None
def listdir(self, path):
node = self._resolve(path, leaf_cls=InMemoryDirNode)
return node.listdir()
def size(self, name):
return len(self._open(name, "rb").file.getvalue())
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
url = filepath_to_uri(name)
if url is not None:
url = url.lstrip("/")
return urljoin(self.base_url, url)
def get_accessed_time(self, name):
file_node = self._resolve(name)
return file_node.accessed_time
def get_created_time(self, name):
file_node = self._resolve(name)
return file_node.created_time
def get_modified_time(self, name):
file_node = self._resolve(name)
return file_node.modified_time
|
castiel248/Convert
|
Lib/site-packages/django/core/files/storage/memory.py
|
Python
|
mit
| 9,745 |
class StorageSettingsMixin:
def _clear_cached_properties(self, setting, **kwargs):
"""Reset setting based property values."""
if setting == "MEDIA_ROOT":
self.__dict__.pop("base_location", None)
self.__dict__.pop("location", None)
elif setting == "MEDIA_URL":
self.__dict__.pop("base_url", None)
elif setting == "FILE_UPLOAD_PERMISSIONS":
self.__dict__.pop("file_permissions_mode", None)
elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS":
self.__dict__.pop("directory_permissions_mode", None)
def _value_or_setting(self, value, setting):
return setting if value is None else value
|
castiel248/Convert
|
Lib/site-packages/django/core/files/storage/mixins.py
|
Python
|
mit
| 700 |
"""
The temp module provides a NamedTemporaryFile that can be reopened in the same
process on any platform. Most platforms use the standard Python
tempfile.NamedTemporaryFile class, but Windows users are given a custom class.
This is needed because the Python implementation of NamedTemporaryFile uses the
O_TEMPORARY flag under Windows, which prevents the file from being reopened
if the same flag is not provided [1][2]. Note that this does not address the
more general issue of opening a file for writing and reading in multiple
processes in a manner that works across platforms.
The custom version of NamedTemporaryFile doesn't support the same keyword
arguments available in tempfile.NamedTemporaryFile.
1: https://mail.python.org/pipermail/python-list/2005-December/336957.html
2: https://bugs.python.org/issue14243
"""
import os
import tempfile
from django.core.files.utils import FileProxyMixin
__all__ = (
"NamedTemporaryFile",
"gettempdir",
)
if os.name == "nt":
class TemporaryFile(FileProxyMixin):
"""
Temporary file object constructor that supports reopening of the
temporary file in Windows.
Unlike tempfile.NamedTemporaryFile from the standard library,
__init__() doesn't support the 'delete', 'buffering', 'encoding', or
'newline' keyword arguments.
"""
def __init__(self, mode="w+b", bufsize=-1, suffix="", prefix="", dir=None):
fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
self.name = name
self.file = os.fdopen(fd, mode, bufsize)
self.close_called = False
# Because close can be called during shutdown
# we need to cache os.unlink and access it
# as self.unlink only
unlink = os.unlink
def close(self):
if not self.close_called:
self.close_called = True
try:
self.file.close()
except OSError:
pass
try:
self.unlink(self.name)
except OSError:
pass
def __del__(self):
self.close()
def __enter__(self):
self.file.__enter__()
return self
def __exit__(self, exc, value, tb):
self.file.__exit__(exc, value, tb)
NamedTemporaryFile = TemporaryFile
else:
NamedTemporaryFile = tempfile.NamedTemporaryFile
gettempdir = tempfile.gettempdir
|
castiel248/Convert
|
Lib/site-packages/django/core/files/temp.py
|
Python
|
mit
| 2,503 |
"""
Classes representing uploaded files.
"""
import os
from io import BytesIO
from django.conf import settings
from django.core.files import temp as tempfile
from django.core.files.base import File
from django.core.files.utils import validate_file_name
__all__ = (
"UploadedFile",
"TemporaryUploadedFile",
"InMemoryUploadedFile",
"SimpleUploadedFile",
)
class UploadedFile(File):
"""
An abstract uploaded file (``TemporaryUploadedFile`` and
``InMemoryUploadedFile`` are the built-in concrete subclasses).
An ``UploadedFile`` object behaves somewhat like a file object and
represents some file data that the user submitted with a form.
"""
def __init__(
self,
file=None,
name=None,
content_type=None,
size=None,
charset=None,
content_type_extra=None,
):
super().__init__(file, name)
self.size = size
self.content_type = content_type
self.charset = charset
self.content_type_extra = content_type_extra
def __repr__(self):
return "<%s: %s (%s)>" % (self.__class__.__name__, self.name, self.content_type)
def _get_name(self):
return self._name
def _set_name(self, name):
# Sanitize the file name so that it can't be dangerous.
if name is not None:
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
# File names longer than 255 characters can cause problems on older OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
ext = ext[:255]
name = name[: 255 - len(ext)] + ext
name = validate_file_name(name)
self._name = name
name = property(_get_name, _set_name)
class TemporaryUploadedFile(UploadedFile):
"""
A file uploaded to a temporary location (i.e. stream-to-disk).
"""
def __init__(self, name, content_type, size, charset, content_type_extra=None):
_, ext = os.path.splitext(name)
file = tempfile.NamedTemporaryFile(
suffix=".upload" + ext, dir=settings.FILE_UPLOAD_TEMP_DIR
)
super().__init__(file, name, content_type, size, charset, content_type_extra)
def temporary_file_path(self):
"""Return the full path of this file."""
return self.file.name
def close(self):
try:
return self.file.close()
except FileNotFoundError:
# The file was moved or deleted before the tempfile could unlink
# it. Still sets self.file.close_called and calls
# self.file.file.close() before the exception.
pass
class InMemoryUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(
self,
file,
field_name,
name,
content_type,
size,
charset,
content_type_extra=None,
):
super().__init__(file, name, content_type, size, charset, content_type_extra)
self.field_name = field_name
def open(self, mode=None):
self.file.seek(0)
return self
def chunks(self, chunk_size=None):
self.file.seek(0)
yield self.read()
def multiple_chunks(self, chunk_size=None):
# Since it's in memory, we'll never have multiple chunks.
return False
class SimpleUploadedFile(InMemoryUploadedFile):
"""
A simple representation of a file, which just has content, size, and a name.
"""
def __init__(self, name, content, content_type="text/plain"):
content = content or b""
super().__init__(
BytesIO(content), None, name, content_type, len(content), None, None
)
@classmethod
def from_dict(cls, file_dict):
"""
Create a SimpleUploadedFile object from a dictionary with keys:
- filename
- content-type
- content
"""
return cls(
file_dict["filename"],
file_dict["content"],
file_dict.get("content-type", "text/plain"),
)
|
castiel248/Convert
|
Lib/site-packages/django/core/files/uploadedfile.py
|
Python
|
mit
| 4,189 |
"""
Base file upload handler classes, and the built-in concrete subclasses
"""
import os
from io import BytesIO
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
from django.utils.module_loading import import_string
__all__ = [
"UploadFileException",
"StopUpload",
"SkipFile",
"FileUploadHandler",
"TemporaryFileUploadHandler",
"MemoryFileUploadHandler",
"load_handler",
"StopFutureHandlers",
]
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __str__(self):
if self.connection_reset:
return "StopUpload: Halt current upload."
else:
return "StopUpload: Consume request data, then halt."
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handlers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler:
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2**10 # : The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.content_type_extra = None
self.request = request
def handle_raw_input(
self, input_data, META, content_length, boundary, encoding=None
):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(
self,
field_name,
file_name,
content_type,
content_length,
charset=None,
content_type_extra=None,
):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
self.content_type_extra = content_type_extra
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError(
"subclasses of FileUploadHandler must provide a receive_data_chunk() method"
)
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError(
"subclasses of FileUploadHandler must provide a file_complete() method"
)
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
def upload_interrupted(self):
"""
Signal that the upload was interrupted. Subclasses should perform
cleanup that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def new_file(self, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super().new_file(*args, **kwargs)
self.file = TemporaryUploadedFile(
self.file_name, self.content_type, 0, self.charset, self.content_type_extra
)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
def upload_interrupted(self):
if hasattr(self, "file"):
temp_location = self.file.temporary_file_path()
try:
self.file.close()
os.remove(temp_location)
except FileNotFoundError:
pass
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(
self, input_data, META, content_length, boundary, encoding=None
):
"""
Use the content_length to signal whether or not this handler should be
used.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
self.activated = content_length <= settings.FILE_UPLOAD_MAX_MEMORY_SIZE
def new_file(self, *args, **kwargs):
super().new_file(*args, **kwargs)
if self.activated:
self.file = BytesIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""Add the data to the BytesIO file."""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""Return a file object if this handler is activated."""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra,
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> from django.http import HttpRequest
>>> request = HttpRequest()
>>> load_handler(
... 'django.core.files.uploadhandler.TemporaryFileUploadHandler',
... request,
... )
<TemporaryFileUploadHandler object at 0x...>
"""
return import_string(path)(*args, **kwargs)
|
castiel248/Convert
|
Lib/site-packages/django/core/files/uploadhandler.py
|
Python
|
mit
| 7,179 |
import os
import pathlib
from django.core.exceptions import SuspiciousFileOperation
def validate_file_name(name, allow_relative_path=False):
# Remove potentially dangerous names
if os.path.basename(name) in {"", ".", ".."}:
raise SuspiciousFileOperation("Could not derive file name from '%s'" % name)
if allow_relative_path:
# Use PurePosixPath() because this branch is checked only in
# FileField.generate_filename() where all file paths are expected to be
# Unix style (with forward slashes).
path = pathlib.PurePosixPath(name)
if path.is_absolute() or ".." in path.parts:
raise SuspiciousFileOperation(
"Detected path traversal attempt in '%s'" % name
)
elif name != os.path.basename(name):
raise SuspiciousFileOperation("File name '%s' includes path elements" % name)
return name
class FileProxyMixin:
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
@property
def closed(self):
return not self.file or self.file.closed
def readable(self):
if self.closed:
return False
if hasattr(self.file, "readable"):
return self.file.readable()
return True
def writable(self):
if self.closed:
return False
if hasattr(self.file, "writable"):
return self.file.writable()
return "w" in getattr(self.file, "mode", "")
def seekable(self):
if self.closed:
return False
if hasattr(self.file, "seekable"):
return self.file.seekable()
return True
def __iter__(self):
return iter(self.file)
|
castiel248/Convert
|
Lib/site-packages/django/core/files/utils.py
|
Python
|
mit
| 2,659 |
castiel248/Convert
|
Lib/site-packages/django/core/handlers/__init__.py
|
Python
|
mit
| 0 |
|
import logging
import sys
import tempfile
import traceback
from asgiref.sync import ThreadSensitiveContext, sync_to_async
from django.conf import settings
from django.core import signals
from django.core.exceptions import RequestAborted, RequestDataTooBig
from django.core.handlers import base
from django.http import (
FileResponse,
HttpRequest,
HttpResponse,
HttpResponseBadRequest,
HttpResponseServerError,
QueryDict,
parse_cookie,
)
from django.urls import set_script_prefix
from django.utils.asyncio import aclosing
from django.utils.functional import cached_property
logger = logging.getLogger("django.request")
class ASGIRequest(HttpRequest):
"""
Custom request subclass that decodes from an ASGI-standard request dict
and wraps request body handling.
"""
# Number of seconds until a Request gives up on trying to read a request
# body and aborts.
body_receive_timeout = 60
def __init__(self, scope, body_file):
self.scope = scope
self._post_parse_error = False
self._read_started = False
self.resolver_match = None
self.script_name = self.scope.get("root_path", "")
if self.script_name and scope["path"].startswith(self.script_name):
# TODO: Better is-prefix checking, slash handling?
self.path_info = scope["path"][len(self.script_name) :]
else:
self.path_info = scope["path"]
# The Django path is different from ASGI scope path args, it should
# combine with script name.
if self.script_name:
self.path = "%s/%s" % (
self.script_name.rstrip("/"),
self.path_info.replace("/", "", 1),
)
else:
self.path = scope["path"]
# HTTP basics.
self.method = self.scope["method"].upper()
# Ensure query string is encoded correctly.
query_string = self.scope.get("query_string", "")
if isinstance(query_string, bytes):
query_string = query_string.decode()
self.META = {
"REQUEST_METHOD": self.method,
"QUERY_STRING": query_string,
"SCRIPT_NAME": self.script_name,
"PATH_INFO": self.path_info,
# WSGI-expecting code will need these for a while
"wsgi.multithread": True,
"wsgi.multiprocess": True,
}
if self.scope.get("client"):
self.META["REMOTE_ADDR"] = self.scope["client"][0]
self.META["REMOTE_HOST"] = self.META["REMOTE_ADDR"]
self.META["REMOTE_PORT"] = self.scope["client"][1]
if self.scope.get("server"):
self.META["SERVER_NAME"] = self.scope["server"][0]
self.META["SERVER_PORT"] = str(self.scope["server"][1])
else:
self.META["SERVER_NAME"] = "unknown"
self.META["SERVER_PORT"] = "0"
# Headers go into META.
for name, value in self.scope.get("headers", []):
name = name.decode("latin1")
if name == "content-length":
corrected_name = "CONTENT_LENGTH"
elif name == "content-type":
corrected_name = "CONTENT_TYPE"
else:
corrected_name = "HTTP_%s" % name.upper().replace("-", "_")
# HTTP/2 say only ASCII chars are allowed in headers, but decode
# latin1 just in case.
value = value.decode("latin1")
if corrected_name in self.META:
value = self.META[corrected_name] + "," + value
self.META[corrected_name] = value
# Pull out request encoding, if provided.
self._set_content_type_params(self.META)
# Directly assign the body file to be our stream.
self._stream = body_file
# Other bits.
self.resolver_match = None
@cached_property
def GET(self):
return QueryDict(self.META["QUERY_STRING"])
def _get_scheme(self):
return self.scope.get("scheme") or super()._get_scheme()
def _get_post(self):
if not hasattr(self, "_post"):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
def _get_files(self):
if not hasattr(self, "_files"):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
FILES = property(_get_files)
@cached_property
def COOKIES(self):
return parse_cookie(self.META.get("HTTP_COOKIE", ""))
def close(self):
super().close()
self._stream.close()
class ASGIHandler(base.BaseHandler):
"""Handler for ASGI requests."""
request_class = ASGIRequest
# Size to chunk response bodies into for multiple response messages.
chunk_size = 2**16
def __init__(self):
super().__init__()
self.load_middleware(is_async=True)
async def __call__(self, scope, receive, send):
"""
Async entrypoint - parses the request and hands off to get_response.
"""
# Serve only HTTP connections.
# FIXME: Allow to override this.
if scope["type"] != "http":
raise ValueError(
"Django can only handle ASGI/HTTP connections, not %s." % scope["type"]
)
async with ThreadSensitiveContext():
await self.handle(scope, receive, send)
async def handle(self, scope, receive, send):
"""
Handles the ASGI request. Called via the __call__ method.
"""
# Receive the HTTP request body as a stream object.
try:
body_file = await self.read_body(receive)
except RequestAborted:
return
# Request is complete and can be served.
set_script_prefix(self.get_script_prefix(scope))
await sync_to_async(signals.request_started.send, thread_sensitive=True)(
sender=self.__class__, scope=scope
)
# Get the request and check for basic issues.
request, error_response = self.create_request(scope, body_file)
if request is None:
body_file.close()
await self.send_response(error_response, send)
return
# Get the response, using the async mode of BaseHandler.
response = await self.get_response_async(request)
response._handler_class = self.__class__
# Increase chunk size on file responses (ASGI servers handles low-level
# chunking).
if isinstance(response, FileResponse):
response.block_size = self.chunk_size
# Send the response.
await self.send_response(response, send)
async def read_body(self, receive):
"""Reads an HTTP body from an ASGI connection."""
# Use the tempfile that auto rolls-over to a disk file as it fills up.
body_file = tempfile.SpooledTemporaryFile(
max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode="w+b"
)
while True:
message = await receive()
if message["type"] == "http.disconnect":
body_file.close()
# Early client disconnect.
raise RequestAborted()
# Add a body chunk from the message, if provided.
if "body" in message:
body_file.write(message["body"])
# Quit out if that's the end.
if not message.get("more_body", False):
break
body_file.seek(0)
return body_file
def create_request(self, scope, body_file):
"""
Create the Request object and returns either (request, None) or
(None, response) if there is an error response.
"""
try:
return self.request_class(scope, body_file), None
except UnicodeDecodeError:
logger.warning(
"Bad Request (UnicodeDecodeError)",
exc_info=sys.exc_info(),
extra={"status_code": 400},
)
return None, HttpResponseBadRequest()
except RequestDataTooBig:
return None, HttpResponse("413 Payload too large", status=413)
def handle_uncaught_exception(self, request, resolver, exc_info):
"""Last-chance handler for exceptions."""
# There's no WSGI server to catch the exception further up
# if this fails, so translate it into a plain text response.
try:
return super().handle_uncaught_exception(request, resolver, exc_info)
except Exception:
return HttpResponseServerError(
traceback.format_exc() if settings.DEBUG else "Internal Server Error",
content_type="text/plain",
)
async def send_response(self, response, send):
"""Encode and send a response out over ASGI."""
# Collect cookies into headers. Have to preserve header case as there
# are some non-RFC compliant clients that require e.g. Content-Type.
response_headers = []
for header, value in response.items():
if isinstance(header, str):
header = header.encode("ascii")
if isinstance(value, str):
value = value.encode("latin1")
response_headers.append((bytes(header), bytes(value)))
for c in response.cookies.values():
response_headers.append(
(b"Set-Cookie", c.output(header="").encode("ascii").strip())
)
# Initial response message.
await send(
{
"type": "http.response.start",
"status": response.status_code,
"headers": response_headers,
}
)
# Streaming responses need to be pinned to their iterator.
if response.streaming:
# - Consume via `__aiter__` and not `streaming_content` directly, to
# allow mapping of a sync iterator.
# - Use aclosing() when consuming aiter.
# See https://github.com/python/cpython/commit/6e8dcda
async with aclosing(response.__aiter__()) as content:
async for part in content:
for chunk, _ in self.chunk_bytes(part):
await send(
{
"type": "http.response.body",
"body": chunk,
# Ignore "more" as there may be more parts; instead,
# use an empty final closing message with False.
"more_body": True,
}
)
# Final closing message.
await send({"type": "http.response.body"})
# Other responses just need chunking.
else:
# Yield chunks of response.
for chunk, last in self.chunk_bytes(response.content):
await send(
{
"type": "http.response.body",
"body": chunk,
"more_body": not last,
}
)
await sync_to_async(response.close, thread_sensitive=True)()
@classmethod
def chunk_bytes(cls, data):
"""
Chunks some data up so it can be sent in reasonable size messages.
Yields (chunk, last_chunk) tuples.
"""
position = 0
if not data:
yield data, True
return
while position < len(data):
yield (
data[position : position + cls.chunk_size],
(position + cls.chunk_size) >= len(data),
)
position += cls.chunk_size
def get_script_prefix(self, scope):
"""
Return the script prefix to use from either the scope or a setting.
"""
if settings.FORCE_SCRIPT_NAME:
return settings.FORCE_SCRIPT_NAME
return scope.get("root_path", "") or ""
|
castiel248/Convert
|
Lib/site-packages/django/core/handlers/asgi.py
|
Python
|
mit
| 12,067 |
import asyncio
import logging
import types
from asgiref.sync import async_to_sync, iscoroutinefunction, sync_to_async
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed
from django.core.signals import request_finished
from django.db import connections, transaction
from django.urls import get_resolver, set_urlconf
from django.utils.log import log_response
from django.utils.module_loading import import_string
from .exception import convert_exception_to_response
logger = logging.getLogger("django.request")
class BaseHandler:
_view_middleware = None
_template_response_middleware = None
_exception_middleware = None
_middleware_chain = None
def load_middleware(self, is_async=False):
"""
Populate middleware lists from settings.MIDDLEWARE.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._exception_middleware = []
get_response = self._get_response_async if is_async else self._get_response
handler = convert_exception_to_response(get_response)
handler_is_async = is_async
for middleware_path in reversed(settings.MIDDLEWARE):
middleware = import_string(middleware_path)
middleware_can_sync = getattr(middleware, "sync_capable", True)
middleware_can_async = getattr(middleware, "async_capable", False)
if not middleware_can_sync and not middleware_can_async:
raise RuntimeError(
"Middleware %s must have at least one of "
"sync_capable/async_capable set to True." % middleware_path
)
elif not handler_is_async and middleware_can_sync:
middleware_is_async = False
else:
middleware_is_async = middleware_can_async
try:
# Adapt handler, if needed.
adapted_handler = self.adapt_method_mode(
middleware_is_async,
handler,
handler_is_async,
debug=settings.DEBUG,
name="middleware %s" % middleware_path,
)
mw_instance = middleware(adapted_handler)
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if str(exc):
logger.debug("MiddlewareNotUsed(%r): %s", middleware_path, exc)
else:
logger.debug("MiddlewareNotUsed: %r", middleware_path)
continue
else:
handler = adapted_handler
if mw_instance is None:
raise ImproperlyConfigured(
"Middleware factory %s returned None." % middleware_path
)
if hasattr(mw_instance, "process_view"):
self._view_middleware.insert(
0,
self.adapt_method_mode(is_async, mw_instance.process_view),
)
if hasattr(mw_instance, "process_template_response"):
self._template_response_middleware.append(
self.adapt_method_mode(
is_async, mw_instance.process_template_response
),
)
if hasattr(mw_instance, "process_exception"):
# The exception-handling stack is still always synchronous for
# now, so adapt that way.
self._exception_middleware.append(
self.adapt_method_mode(False, mw_instance.process_exception),
)
handler = convert_exception_to_response(mw_instance)
handler_is_async = middleware_is_async
# Adapt the top of the stack, if needed.
handler = self.adapt_method_mode(is_async, handler, handler_is_async)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._middleware_chain = handler
def adapt_method_mode(
self,
is_async,
method,
method_is_async=None,
debug=False,
name=None,
):
"""
Adapt a method to be in the correct "mode":
- If is_async is False:
- Synchronous methods are left alone
- Asynchronous methods are wrapped with async_to_sync
- If is_async is True:
- Synchronous methods are wrapped with sync_to_async()
- Asynchronous methods are left alone
"""
if method_is_async is None:
method_is_async = iscoroutinefunction(method)
if debug and not name:
name = name or "method %s()" % method.__qualname__
if is_async:
if not method_is_async:
if debug:
logger.debug("Synchronous handler adapted for %s.", name)
return sync_to_async(method, thread_sensitive=True)
elif method_is_async:
if debug:
logger.debug("Asynchronous handler adapted for %s.", name)
return async_to_sync(method)
return method
def get_response(self, request):
"""Return an HttpResponse object for the given HttpRequest."""
# Setup default url resolver for this thread
set_urlconf(settings.ROOT_URLCONF)
response = self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
log_response(
"%s: %s",
response.reason_phrase,
request.path,
response=response,
request=request,
)
return response
async def get_response_async(self, request):
"""
Asynchronous version of get_response.
Funneling everything, including WSGI, into a single async
get_response() is too slow. Avoid the context switch by using
a separate async response path.
"""
# Setup default url resolver for this thread.
set_urlconf(settings.ROOT_URLCONF)
response = await self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
await sync_to_async(log_response, thread_sensitive=False)(
"%s: %s",
response.reason_phrase,
request.path,
response=response,
request=request,
)
return response
def _get_response(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(
request, callback, callback_args, callback_kwargs
)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is an asynchronous view, run it in a subthread.
if iscoroutinefunction(wrapped_callback):
wrapped_callback = async_to_sync(wrapped_callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
if response is None:
raise
# Complain if the view returned None (a common error).
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, "render") and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None
# (a common error).
self.check_response(
response,
middleware_method,
name="%s.process_template_response"
% (middleware_method.__self__.__class__.__name__,),
)
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
if response is None:
raise
return response
async def _get_response_async(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware.
for middleware_method in self._view_middleware:
response = await middleware_method(
request, callback, callback_args, callback_kwargs
)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is a synchronous view, run it in a subthread
if not iscoroutinefunction(wrapped_callback):
wrapped_callback = sync_to_async(
wrapped_callback, thread_sensitive=True
)
try:
response = await wrapped_callback(
request, *callback_args, **callback_kwargs
)
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
if response is None:
raise
# Complain if the view returned None or an uncalled coroutine.
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, "render") and callable(response.render):
for middleware_method in self._template_response_middleware:
response = await middleware_method(request, response)
# Complain if the template response middleware returned None or
# an uncalled coroutine.
self.check_response(
response,
middleware_method,
name="%s.process_template_response"
% (middleware_method.__self__.__class__.__name__,),
)
try:
if iscoroutinefunction(response.render):
response = await response.render()
else:
response = await sync_to_async(
response.render, thread_sensitive=True
)()
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
if response is None:
raise
# Make sure the response is not a coroutine
if asyncio.iscoroutine(response):
raise RuntimeError("Response is still a coroutine.")
return response
def resolve_request(self, request):
"""
Retrieve/set the urlconf for the request. Return the view resolved,
with its args and kwargs.
"""
# Work out the resolver.
if hasattr(request, "urlconf"):
urlconf = request.urlconf
set_urlconf(urlconf)
resolver = get_resolver(urlconf)
else:
resolver = get_resolver()
# Resolve the view, and assign the match object back to the request.
resolver_match = resolver.resolve(request.path_info)
request.resolver_match = resolver_match
return resolver_match
def check_response(self, response, callback, name=None):
"""
Raise an error if the view returned None or an uncalled coroutine.
"""
if not (response is None or asyncio.iscoroutine(response)):
return
if not name:
if isinstance(callback, types.FunctionType): # FBV
name = "The view %s.%s" % (callback.__module__, callback.__name__)
else: # CBV
name = "The view %s.%s.__call__" % (
callback.__module__,
callback.__class__.__name__,
)
if response is None:
raise ValueError(
"%s didn't return an HttpResponse object. It returned None "
"instead." % name
)
elif asyncio.iscoroutine(response):
raise ValueError(
"%s didn't return an HttpResponse object. It returned an "
"unawaited coroutine instead. You may need to add an 'await' "
"into your view." % name
)
# Other utility methods.
def make_view_atomic(self, view):
non_atomic_requests = getattr(view, "_non_atomic_requests", set())
for alias, settings_dict in connections.settings.items():
if settings_dict["ATOMIC_REQUESTS"] and alias not in non_atomic_requests:
if iscoroutinefunction(view):
raise RuntimeError(
"You cannot use ATOMIC_REQUESTS with async views."
)
view = transaction.atomic(using=alias)(view)
return view
def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, return None.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
return None
def reset_urlconf(sender, **kwargs):
"""Reset the URLconf after each request is finished."""
set_urlconf(None)
request_finished.connect(reset_urlconf)
|
castiel248/Convert
|
Lib/site-packages/django/core/handlers/base.py
|
Python
|
mit
| 14,813 |
import logging
import sys
from functools import wraps
from asgiref.sync import iscoroutinefunction, sync_to_async
from django.conf import settings
from django.core import signals
from django.core.exceptions import (
BadRequest,
PermissionDenied,
RequestDataTooBig,
SuspiciousOperation,
TooManyFieldsSent,
TooManyFilesSent,
)
from django.http import Http404
from django.http.multipartparser import MultiPartParserError
from django.urls import get_resolver, get_urlconf
from django.utils.log import log_response
from django.views import debug
def convert_exception_to_response(get_response):
"""
Wrap the given get_response callable in exception-to-response conversion.
All exceptions will be converted. All known 4xx exceptions (Http404,
PermissionDenied, MultiPartParserError, SuspiciousOperation) will be
converted to the appropriate response, and all other exceptions will be
converted to 500 responses.
This decorator is automatically applied to all middleware to ensure that
no middleware leaks an exception and that the next middleware in the stack
can rely on getting a response instead of an exception.
"""
if iscoroutinefunction(get_response):
@wraps(get_response)
async def inner(request):
try:
response = await get_response(request)
except Exception as exc:
response = await sync_to_async(
response_for_exception, thread_sensitive=False
)(request, exc)
return response
return inner
else:
@wraps(get_response)
def inner(request):
try:
response = get_response(request)
except Exception as exc:
response = response_for_exception(request, exc)
return response
return inner
def response_for_exception(request, exc):
if isinstance(exc, Http404):
if settings.DEBUG:
response = debug.technical_404_response(request, exc)
else:
response = get_exception_response(
request, get_resolver(get_urlconf()), 404, exc
)
elif isinstance(exc, PermissionDenied):
response = get_exception_response(
request, get_resolver(get_urlconf()), 403, exc
)
log_response(
"Forbidden (Permission denied): %s",
request.path,
response=response,
request=request,
exception=exc,
)
elif isinstance(exc, MultiPartParserError):
response = get_exception_response(
request, get_resolver(get_urlconf()), 400, exc
)
log_response(
"Bad request (Unable to parse request body): %s",
request.path,
response=response,
request=request,
exception=exc,
)
elif isinstance(exc, BadRequest):
if settings.DEBUG:
response = debug.technical_500_response(
request, *sys.exc_info(), status_code=400
)
else:
response = get_exception_response(
request, get_resolver(get_urlconf()), 400, exc
)
log_response(
"%s: %s",
str(exc),
request.path,
response=response,
request=request,
exception=exc,
)
elif isinstance(exc, SuspiciousOperation):
if isinstance(exc, (RequestDataTooBig, TooManyFieldsSent, TooManyFilesSent)):
# POST data can't be accessed again, otherwise the original
# exception would be raised.
request._mark_post_parse_error()
# The request logger receives events for any problematic request
# The security logger receives events for all SuspiciousOperations
security_logger = logging.getLogger(
"django.security.%s" % exc.__class__.__name__
)
security_logger.error(
str(exc),
exc_info=exc,
extra={"status_code": 400, "request": request},
)
if settings.DEBUG:
response = debug.technical_500_response(
request, *sys.exc_info(), status_code=400
)
else:
response = get_exception_response(
request, get_resolver(get_urlconf()), 400, exc
)
else:
signals.got_request_exception.send(sender=None, request=request)
response = handle_uncaught_exception(
request, get_resolver(get_urlconf()), sys.exc_info()
)
log_response(
"%s: %s",
response.reason_phrase,
request.path,
response=response,
request=request,
exception=exc,
)
# Force a TemplateResponse to be rendered.
if not getattr(response, "is_rendered", True) and callable(
getattr(response, "render", None)
):
response = response.render()
return response
def get_exception_response(request, resolver, status_code, exception):
try:
callback = resolver.resolve_error_handler(status_code)
response = callback(request, exception=exception)
except Exception:
signals.got_request_exception.send(sender=None, request=request)
response = handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def handle_uncaught_exception(request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses).
"""
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
if settings.DEBUG:
return debug.technical_500_response(request, *exc_info)
# Return an HttpResponse that displays a friendly error message.
callback = resolver.resolve_error_handler(500)
return callback(request)
|
castiel248/Convert
|
Lib/site-packages/django/core/handlers/exception.py
|
Python
|
mit
| 5,922 |
from io import IOBase
from django.conf import settings
from django.core import signals
from django.core.handlers import base
from django.http import HttpRequest, QueryDict, parse_cookie
from django.urls import set_script_prefix
from django.utils.encoding import repercent_broken_unicode
from django.utils.functional import cached_property
from django.utils.regex_helper import _lazy_re_compile
_slashes_re = _lazy_re_compile(rb"/+")
class LimitedStream(IOBase):
"""
Wrap another stream to disallow reading it past a number of bytes.
Based on the implementation from werkzeug.wsgi.LimitedStream
See https://github.com/pallets/werkzeug/blob/dbf78f67/src/werkzeug/wsgi.py#L828
"""
def __init__(self, stream, limit):
self._read = stream.read
self._readline = stream.readline
self._pos = 0
self.limit = limit
def read(self, size=-1, /):
_pos = self._pos
limit = self.limit
if _pos >= limit:
return b""
if size == -1 or size is None:
size = limit - _pos
else:
size = min(size, limit - _pos)
data = self._read(size)
self._pos += len(data)
return data
def readline(self, size=-1, /):
_pos = self._pos
limit = self.limit
if _pos >= limit:
return b""
if size == -1 or size is None:
size = limit - _pos
else:
size = min(size, limit - _pos)
line = self._readline(size)
self._pos += len(line)
return line
class WSGIRequest(HttpRequest):
def __init__(self, environ):
script_name = get_script_name(environ)
# If PATH_INFO is empty (e.g. accessing the SCRIPT_NAME URL without a
# trailing slash), operate as if '/' was requested.
path_info = get_path_info(environ) or "/"
self.environ = environ
self.path_info = path_info
# be careful to only replace the first slash in the path because of
# http://test/something and http://test//something being different as
# stated in RFC 3986.
self.path = "%s/%s" % (script_name.rstrip("/"), path_info.replace("/", "", 1))
self.META = environ
self.META["PATH_INFO"] = path_info
self.META["SCRIPT_NAME"] = script_name
self.method = environ["REQUEST_METHOD"].upper()
# Set content_type, content_params, and encoding.
self._set_content_type_params(environ)
try:
content_length = int(environ.get("CONTENT_LENGTH"))
except (ValueError, TypeError):
content_length = 0
self._stream = LimitedStream(self.environ["wsgi.input"], content_length)
self._read_started = False
self.resolver_match = None
def _get_scheme(self):
return self.environ.get("wsgi.url_scheme")
@cached_property
def GET(self):
# The WSGI spec says 'QUERY_STRING' may be absent.
raw_query_string = get_bytes_from_wsgi(self.environ, "QUERY_STRING", "")
return QueryDict(raw_query_string, encoding=self._encoding)
def _get_post(self):
if not hasattr(self, "_post"):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
@cached_property
def COOKIES(self):
raw_cookie = get_str_from_wsgi(self.environ, "HTTP_COOKIE", "")
return parse_cookie(raw_cookie)
@property
def FILES(self):
if not hasattr(self, "_files"):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
class WSGIHandler(base.BaseHandler):
request_class = WSGIRequest
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.load_middleware()
def __call__(self, environ, start_response):
set_script_prefix(get_script_name(environ))
signals.request_started.send(sender=self.__class__, environ=environ)
request = self.request_class(environ)
response = self.get_response(request)
response._handler_class = self.__class__
status = "%d %s" % (response.status_code, response.reason_phrase)
response_headers = [
*response.items(),
*(("Set-Cookie", c.output(header="")) for c in response.cookies.values()),
]
start_response(status, response_headers)
if getattr(response, "file_to_stream", None) is not None and environ.get(
"wsgi.file_wrapper"
):
# If `wsgi.file_wrapper` is used the WSGI server does not call
# .close on the response, but on the file wrapper. Patch it to use
# response.close instead which takes care of closing all files.
response.file_to_stream.close = response.close
response = environ["wsgi.file_wrapper"](
response.file_to_stream, response.block_size
)
return response
def get_path_info(environ):
"""Return the HTTP request's PATH_INFO as a string."""
path_info = get_bytes_from_wsgi(environ, "PATH_INFO", "/")
return repercent_broken_unicode(path_info).decode()
def get_script_name(environ):
"""
Return the equivalent of the HTTP request's SCRIPT_NAME environment
variable. If Apache mod_rewrite is used, return what would have been
the script name prior to any rewriting (so it's the script name as seen
from the client's perspective), unless the FORCE_SCRIPT_NAME setting is
set (to anything).
"""
if settings.FORCE_SCRIPT_NAME is not None:
return settings.FORCE_SCRIPT_NAME
# If Apache's mod_rewrite had a whack at the URL, Apache set either
# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
# rewrites. Unfortunately not every web server (lighttpd!) passes this
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
# needed.
script_url = get_bytes_from_wsgi(environ, "SCRIPT_URL", "") or get_bytes_from_wsgi(
environ, "REDIRECT_URL", ""
)
if script_url:
if b"//" in script_url:
# mod_wsgi squashes multiple successive slashes in PATH_INFO,
# do the same with script_url before manipulating paths (#17133).
script_url = _slashes_re.sub(b"/", script_url)
path_info = get_bytes_from_wsgi(environ, "PATH_INFO", "")
script_name = script_url[: -len(path_info)] if path_info else script_url
else:
script_name = get_bytes_from_wsgi(environ, "SCRIPT_NAME", "")
return script_name.decode()
def get_bytes_from_wsgi(environ, key, default):
"""
Get a value from the WSGI environ dictionary as bytes.
key and default should be strings.
"""
value = environ.get(key, default)
# Non-ASCII values in the WSGI environ are arbitrarily decoded with
# ISO-8859-1. This is wrong for Django websites where UTF-8 is the default.
# Re-encode to recover the original bytestring.
return value.encode("iso-8859-1")
def get_str_from_wsgi(environ, key, default):
"""
Get a value from the WSGI environ dictionary as str.
key and default should be str objects.
"""
value = get_bytes_from_wsgi(environ, key, default)
return value.decode(errors="replace")
|
castiel248/Convert
|
Lib/site-packages/django/core/handlers/wsgi.py
|
Python
|
mit
| 7,339 |
"""
Tools for sending email.
"""
from django.conf import settings
# Imported for backwards compatibility and for the sake
# of a cleaner namespace. These symbols used to be in
# django/core/mail.py before the introduction of email
# backends and the subsequent reorganization (See #10355)
from django.core.mail.message import (
DEFAULT_ATTACHMENT_MIME_TYPE,
BadHeaderError,
EmailMessage,
EmailMultiAlternatives,
SafeMIMEMultipart,
SafeMIMEText,
forbid_multi_line_headers,
make_msgid,
)
from django.core.mail.utils import DNS_NAME, CachedDnsName
from django.utils.module_loading import import_string
__all__ = [
"CachedDnsName",
"DNS_NAME",
"EmailMessage",
"EmailMultiAlternatives",
"SafeMIMEText",
"SafeMIMEMultipart",
"DEFAULT_ATTACHMENT_MIME_TYPE",
"make_msgid",
"BadHeaderError",
"forbid_multi_line_headers",
"get_connection",
"send_mail",
"send_mass_mail",
"mail_admins",
"mail_managers",
]
def get_connection(backend=None, fail_silently=False, **kwds):
"""Load an email backend and return an instance of it.
If backend is None (default), use settings.EMAIL_BACKEND.
Both fail_silently and other keyword arguments are used in the
constructor of the backend.
"""
klass = import_string(backend or settings.EMAIL_BACKEND)
return klass(fail_silently=fail_silently, **kwds)
def send_mail(
subject,
message,
from_email,
recipient_list,
fail_silently=False,
auth_user=None,
auth_password=None,
connection=None,
html_message=None,
):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
If from_email is None, use the DEFAULT_FROM_EMAIL setting.
If auth_user is None, use the EMAIL_HOST_USER setting.
If auth_password is None, use the EMAIL_HOST_PASSWORD setting.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = connection or get_connection(
username=auth_user,
password=auth_password,
fail_silently=fail_silently,
)
mail = EmailMultiAlternatives(
subject, message, from_email, recipient_list, connection=connection
)
if html_message:
mail.attach_alternative(html_message, "text/html")
return mail.send()
def send_mass_mail(
datatuple, fail_silently=False, auth_user=None, auth_password=None, connection=None
):
"""
Given a datatuple of (subject, message, from_email, recipient_list), send
each message to each recipient list. Return the number of emails sent.
If from_email is None, use the DEFAULT_FROM_EMAIL setting.
If auth_user and auth_password are set, use them to log in.
If auth_user is None, use the EMAIL_HOST_USER setting.
If auth_password is None, use the EMAIL_HOST_PASSWORD setting.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = connection or get_connection(
username=auth_user,
password=auth_password,
fail_silently=fail_silently,
)
messages = [
EmailMessage(subject, message, sender, recipient, connection=connection)
for subject, message, sender, recipient in datatuple
]
return connection.send_messages(messages)
def mail_admins(
subject, message, fail_silently=False, connection=None, html_message=None
):
"""Send a message to the admins, as defined by the ADMINS setting."""
if not settings.ADMINS:
return
if not all(isinstance(a, (list, tuple)) and len(a) == 2 for a in settings.ADMINS):
raise ValueError("The ADMINS setting must be a list of 2-tuples.")
mail = EmailMultiAlternatives(
"%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subject),
message,
settings.SERVER_EMAIL,
[a[1] for a in settings.ADMINS],
connection=connection,
)
if html_message:
mail.attach_alternative(html_message, "text/html")
mail.send(fail_silently=fail_silently)
def mail_managers(
subject, message, fail_silently=False, connection=None, html_message=None
):
"""Send a message to the managers, as defined by the MANAGERS setting."""
if not settings.MANAGERS:
return
if not all(isinstance(a, (list, tuple)) and len(a) == 2 for a in settings.MANAGERS):
raise ValueError("The MANAGERS setting must be a list of 2-tuples.")
mail = EmailMultiAlternatives(
"%s%s" % (settings.EMAIL_SUBJECT_PREFIX, subject),
message,
settings.SERVER_EMAIL,
[a[1] for a in settings.MANAGERS],
connection=connection,
)
if html_message:
mail.attach_alternative(html_message, "text/html")
mail.send(fail_silently=fail_silently)
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/__init__.py
|
Python
|
mit
| 4,958 |
# Mail backends shipped with Django.
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/__init__.py
|
Python
|
mit
| 37 |
"""Base email backend class."""
class BaseEmailBackend:
"""
Base class for email backend implementations.
Subclasses must at least overwrite send_messages().
open() and close() can be called indirectly by using a backend object as a
context manager:
with backend as connection:
# do something with connection
pass
"""
def __init__(self, fail_silently=False, **kwargs):
self.fail_silently = fail_silently
def open(self):
"""
Open a network connection.
This method can be overwritten by backend implementations to
open a network connection.
It's up to the backend implementation to track the status of
a network connection if it's needed by the backend.
This method can be called by applications to force a single
network connection to be used when sending mails. See the
send_messages() method of the SMTP backend for a reference
implementation.
The default implementation does nothing.
"""
pass
def close(self):
"""Close a network connection."""
pass
def __enter__(self):
try:
self.open()
except Exception:
self.close()
raise
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def send_messages(self, email_messages):
"""
Send one or more EmailMessage objects and return the number of email
messages sent.
"""
raise NotImplementedError(
"subclasses of BaseEmailBackend must override send_messages() method"
)
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/base.py
|
Python
|
mit
| 1,683 |
"""
Email backend that writes messages to console instead of sending them.
"""
import sys
import threading
from django.core.mail.backends.base import BaseEmailBackend
class EmailBackend(BaseEmailBackend):
def __init__(self, *args, **kwargs):
self.stream = kwargs.pop("stream", sys.stdout)
self._lock = threading.RLock()
super().__init__(*args, **kwargs)
def write_message(self, message):
msg = message.message()
msg_data = msg.as_bytes()
charset = (
msg.get_charset().get_output_charset() if msg.get_charset() else "utf-8"
)
msg_data = msg_data.decode(charset)
self.stream.write("%s\n" % msg_data)
self.stream.write("-" * 79)
self.stream.write("\n")
def send_messages(self, email_messages):
"""Write all messages to the stream in a thread-safe way."""
if not email_messages:
return
msg_count = 0
with self._lock:
try:
stream_created = self.open()
for message in email_messages:
self.write_message(message)
self.stream.flush() # flush after each message
msg_count += 1
if stream_created:
self.close()
except Exception:
if not self.fail_silently:
raise
return msg_count
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/console.py
|
Python
|
mit
| 1,426 |
"""
Dummy email backend that does nothing.
"""
from django.core.mail.backends.base import BaseEmailBackend
class EmailBackend(BaseEmailBackend):
def send_messages(self, email_messages):
return len(list(email_messages))
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/dummy.py
|
Python
|
mit
| 234 |
"""Email backend that writes messages to a file."""
import datetime
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
class EmailBackend(ConsoleEmailBackend):
def __init__(self, *args, file_path=None, **kwargs):
self._fname = None
if file_path is not None:
self.file_path = file_path
else:
self.file_path = getattr(settings, "EMAIL_FILE_PATH", None)
self.file_path = os.path.abspath(self.file_path)
try:
os.makedirs(self.file_path, exist_ok=True)
except FileExistsError:
raise ImproperlyConfigured(
"Path for saving email messages exists, but is not a directory: %s"
% self.file_path
)
except OSError as err:
raise ImproperlyConfigured(
"Could not create directory for saving email messages: %s (%s)"
% (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured(
"Could not write to directory: %s" % self.file_path
)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs["stream"] = None
super().__init__(*args, **kwargs)
def write_message(self, message):
self.stream.write(message.message().as_bytes() + b"\n")
self.stream.write(b"-" * 79)
self.stream.write(b"\n")
def _get_filename(self):
"""Return a unique file name."""
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), "ab")
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/filebased.py
|
Python
|
mit
| 2,353 |
"""
Backend for test environment.
"""
from django.core import mail
from django.core.mail.backends.base import BaseEmailBackend
class EmailBackend(BaseEmailBackend):
"""
An email backend for use during test sessions.
The test connection stores email messages in a dummy outbox,
rather than sending them out on the wire.
The dummy outbox is accessible through the outbox instance attribute.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not hasattr(mail, "outbox"):
mail.outbox = []
def send_messages(self, messages):
"""Redirect messages to the dummy outbox"""
msg_count = 0
for message in messages: # .message() triggers header validation
message.message()
mail.outbox.append(message)
msg_count += 1
return msg_count
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/locmem.py
|
Python
|
mit
| 885 |
"""SMTP email backend class."""
import smtplib
import ssl
import threading
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.message import sanitize_address
from django.core.mail.utils import DNS_NAME
from django.utils.functional import cached_property
class EmailBackend(BaseEmailBackend):
"""
A wrapper that manages the SMTP network connection.
"""
def __init__(
self,
host=None,
port=None,
username=None,
password=None,
use_tls=None,
fail_silently=False,
use_ssl=None,
timeout=None,
ssl_keyfile=None,
ssl_certfile=None,
**kwargs,
):
super().__init__(fail_silently=fail_silently)
self.host = host or settings.EMAIL_HOST
self.port = port or settings.EMAIL_PORT
self.username = settings.EMAIL_HOST_USER if username is None else username
self.password = settings.EMAIL_HOST_PASSWORD if password is None else password
self.use_tls = settings.EMAIL_USE_TLS if use_tls is None else use_tls
self.use_ssl = settings.EMAIL_USE_SSL if use_ssl is None else use_ssl
self.timeout = settings.EMAIL_TIMEOUT if timeout is None else timeout
self.ssl_keyfile = (
settings.EMAIL_SSL_KEYFILE if ssl_keyfile is None else ssl_keyfile
)
self.ssl_certfile = (
settings.EMAIL_SSL_CERTFILE if ssl_certfile is None else ssl_certfile
)
if self.use_ssl and self.use_tls:
raise ValueError(
"EMAIL_USE_TLS/EMAIL_USE_SSL are mutually exclusive, so only set "
"one of those settings to True."
)
self.connection = None
self._lock = threading.RLock()
@property
def connection_class(self):
return smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP
@cached_property
def ssl_context(self):
if self.ssl_certfile or self.ssl_keyfile:
ssl_context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT)
ssl_context.load_cert_chain(self.ssl_certfile, self.ssl_keyfile)
return ssl_context
else:
return ssl.create_default_context()
def open(self):
"""
Ensure an open connection to the email server. Return whether or not a
new connection was required (True or False) or None if an exception
passed silently.
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
# If local_hostname is not specified, socket.getfqdn() gets used.
# For performance, we use the cached FQDN for local_hostname.
connection_params = {"local_hostname": DNS_NAME.get_fqdn()}
if self.timeout is not None:
connection_params["timeout"] = self.timeout
if self.use_ssl:
connection_params["context"] = self.ssl_context
try:
self.connection = self.connection_class(
self.host, self.port, **connection_params
)
# TLS/SSL are mutually exclusive, so only attempt TLS over
# non-secure connections.
if not self.use_ssl and self.use_tls:
self.connection.starttls(context=self.ssl_context)
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
except OSError:
if not self.fail_silently:
raise
def close(self):
"""Close the connection to the email server."""
if self.connection is None:
return
try:
try:
self.connection.quit()
except (ssl.SSLError, smtplib.SMTPServerDisconnected):
# This happens when calling quit() on a TLS connection
# sometimes, or when the connection was already disconnected
# by the server.
self.connection.close()
except smtplib.SMTPException:
if self.fail_silently:
return
raise
finally:
self.connection = None
def send_messages(self, email_messages):
"""
Send one or more EmailMessage objects and return the number of email
messages sent.
"""
if not email_messages:
return 0
with self._lock:
new_conn_created = self.open()
if not self.connection or new_conn_created is None:
# We failed silently on open().
# Trying to send would be pointless.
return 0
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
encoding = email_message.encoding or settings.DEFAULT_CHARSET
from_email = sanitize_address(email_message.from_email, encoding)
recipients = [
sanitize_address(addr, encoding) for addr in email_message.recipients()
]
message = email_message.message()
try:
self.connection.sendmail(
from_email, recipients, message.as_bytes(linesep="\r\n")
)
except smtplib.SMTPException:
if not self.fail_silently:
raise
return False
return True
|
castiel248/Convert
|
Lib/site-packages/django/core/mail/backends/smtp.py
|
Python
|
mit
| 5,740 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.