Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit54e6ae4

Browse files
committed
attempt to fix asset compilation
ref:https://code.djangoproject.com/ticket/21080#comment:14
1 parent85c5231 commit54e6ae4

File tree

1 file changed

+156
-7
lines changed

1 file changed

+156
-7
lines changed

‎custom_storages.py

Lines changed: 156 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
1+
importos
2+
importposixpath
3+
importre
4+
5+
fromurllib.parseimportunquote,urldefrag
6+
17
fromdjango.confimportsettings
28
fromdjango.contrib.staticfiles.storageimportManifestFilesMixin,StaticFilesStorage
9+
fromdjango.contrib.staticfiles.utilsimportmatches_patterns
10+
fromdjango.core.files.baseimportContentFile
311

412
frompipeline.storageimportPipelineMixin
513
fromstorages.backends.s3boto3importS3Boto3Storage
@@ -11,11 +19,152 @@ class MediaStorage(S3Boto3Storage):
1119

1220
classPipelineManifestStorage(PipelineMixin,ManifestFilesMixin,StaticFilesStorage):
1321
"""
14-
Override the replacement patterns to match URL-encoded quotations.
22+
Applys patches from https://github.com/django/django/pull/11241 to ignore
23+
imports in comments. Ref: https://code.djangoproject.com/ticket/21080
1524
"""
16-
patterns= (
17-
("*.css", (
18-
r"""(url\((?:['"]|%22|%27){0,1}\s*(.*?)(?:['"]|%22|%27){0,1}\))""",
19-
(r"""(@import\s*["']\s*(.*?)["'])""","""@import url("%s")"""),
20-
)),
21-
)
25+
26+
defget_comment_blocks(self,content):
27+
"""
28+
Return a list of (start, end) tuples for each comment block.
29+
"""
30+
return [
31+
(match.start(),match.end())
32+
formatchinre.finditer(r"\/\*.*?\*\/",content,flags=re.DOTALL)
33+
]
34+
35+
defurl_converter(self,name,hashed_files,template=None,comment_blocks=[]):
36+
"""
37+
Return the custom URL converter for the given file name.
38+
"""
39+
iftemplateisNone:
40+
template=self.default_template
41+
42+
defconverter(matchobj):
43+
"""
44+
Convert the matched URL to a normalized and hashed URL.
45+
This requires figuring out which files the matched URL resolves
46+
to and calling the url() method of the storage.
47+
"""
48+
matched,url=matchobj.groups()
49+
50+
# Ignore URLs in comments.
51+
ifself.is_in_comment(matchobj.start(),comment_blocks):
52+
returnmatched
53+
54+
# Ignore absolute/protocol-relative and data-uri URLs.
55+
ifre.match(r'^[a-z]+:',url):
56+
returnmatched
57+
58+
# Ignore absolute URLs that don't point to a static file (dynamic
59+
# CSS / JS?). Note that STATIC_URL cannot be empty.
60+
ifurl.startswith('/')andnoturl.startswith(settings.STATIC_URL):
61+
returnmatched
62+
63+
# Strip off the fragment so a path-like fragment won't interfere.
64+
url_path,fragment=urldefrag(url)
65+
66+
ifurl_path.startswith('/'):
67+
# Otherwise the condition above would have returned prematurely.
68+
asserturl_path.startswith(settings.STATIC_URL)
69+
target_name=url_path[len(settings.STATIC_URL):]
70+
else:
71+
# We're using the posixpath module to mix paths and URLs conveniently.
72+
source_name=nameifos.sep=='/'elsename.replace(os.sep,'/')
73+
target_name=posixpath.join(posixpath.dirname(source_name),url_path)
74+
75+
# Determine the hashed name of the target file with the storage backend.
76+
hashed_url=self._url(
77+
self._stored_name,unquote(target_name),
78+
force=True,hashed_files=hashed_files,
79+
)
80+
81+
transformed_url='/'.join(url_path.split('/')[:-1]+hashed_url.split('/')[-1:])
82+
83+
# Restore the fragment that was stripped off earlier.
84+
iffragment:
85+
transformed_url+= ('?#'if'?#'inurlelse'#')+fragment
86+
87+
# Return the hashed version to the file
88+
returntemplate%unquote(transformed_url)
89+
90+
returnconverter
91+
92+
defis_in_comment(self,pos,comments):
93+
forstart,endincomments:
94+
ifstart<posandpos<end:
95+
returnTrue
96+
ifpos<start:
97+
returnFalse
98+
returnFalse
99+
100+
def_post_process(self,paths,adjustable_paths,hashed_files):
101+
# Sort the files by directory level
102+
defpath_level(name):
103+
returnlen(name.split(os.sep))
104+
105+
fornameinsorted(paths,key=path_level,reverse=True):
106+
substitutions=True
107+
# use the original, local file, not the copied-but-unprocessed
108+
# file, which might be somewhere far away, like S3
109+
storage,path=paths[name]
110+
withstorage.open(path)asoriginal_file:
111+
cleaned_name=self.clean_name(name)
112+
hash_key=self.hash_key(cleaned_name)
113+
114+
# generate the hash with the original content, even for
115+
# adjustable files.
116+
ifhash_keynotinhashed_files:
117+
hashed_name=self.hashed_name(name,original_file)
118+
else:
119+
hashed_name=hashed_files[hash_key]
120+
121+
# then get the original's file content..
122+
ifhasattr(original_file,'seek'):
123+
original_file.seek(0)
124+
125+
hashed_file_exists=self.exists(hashed_name)
126+
processed=False
127+
128+
# ..to apply each replacement pattern to the content
129+
ifnameinadjustable_paths:
130+
old_hashed_name=hashed_name
131+
content=original_file.read().decode(settings.FILE_CHARSET)
132+
forextension,patternsinself._patterns.items():
133+
ifmatches_patterns(path, (extension,)):
134+
comment_blocks=self.get_comment_blocks(content)
135+
forpattern,templateinpatterns:
136+
converter=self.url_converter(name,hashed_files,template,comment_blocks)
137+
try:
138+
content=pattern.sub(converter,content)
139+
exceptValueErrorasexc:
140+
yieldname,None,exc,False
141+
ifhashed_file_exists:
142+
self.delete(hashed_name)
143+
# then save the processed result
144+
content_file=ContentFile(content.encode())
145+
# Save intermediate file for reference
146+
saved_name=self._save(hashed_name,content_file)
147+
hashed_name=self.hashed_name(name,content_file)
148+
149+
ifself.exists(hashed_name):
150+
self.delete(hashed_name)
151+
152+
saved_name=self._save(hashed_name,content_file)
153+
hashed_name=self.clean_name(saved_name)
154+
# If the file hash stayed the same, this file didn't change
155+
ifold_hashed_name==hashed_name:
156+
substitutions=False
157+
processed=True
158+
159+
ifnotprocessed:
160+
# or handle the case in which neither processing nor
161+
# a change to the original file happened
162+
ifnothashed_file_exists:
163+
processed=True
164+
saved_name=self._save(hashed_name,original_file)
165+
hashed_name=self.clean_name(saved_name)
166+
167+
# and then set the cache accordingly
168+
hashed_files[hash_key]=hashed_name
169+
170+
yieldname,hashed_name,processed,substitutions

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp